text
stringlengths 3
1.05M
|
|---|
exports.seed = async function (knex) {
// Deletes ALL existing entries
await knex("items").del();
await knex("items").insert([
{
item_id: 1,
item_name: "Shroom Variant One",
item_blurb:
"This is a blurb describing the scroom variant, a little bit of info to get the custy clicking.",
item_description:
"This is a unique mushroom strain only available on spore stash. It was created in our labs by cross pollenating classic psycilibin cubensis with the ever popular penis envy. The girthy stem and bulbous cap are not to be missed. The spores look very interesting under a microscope, especially after eating some of the fully grown mushrooms. But definitely don’t do that because it is illegal. One time I did this and ran around the house naked until my friends pleaded with me to put my clothes back on because I was “ruining their high”.",
item_category: "cubensis",
item_quantity: 20,
item_price: 22.0,
units_available: 20,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1570977042406-8fba157deca0?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1470&q=80",
created_date: "2022-03-17",
is_active: true,
swab_price: 22.0,
print_price: 22.0,
syringe_price: 23.0,
swab_available: true,
print_available: true,
syringe_available: true,
swab_quantity: 30,
print_quantity: 35,
syringe_quantity: 40,
},
{
item_id: 2,
item_name: "Another Kind of Shroom",
item_blurb:
"This is a blurb describing the scroom variant, a little bit of info to get the custy clicking.",
item_description:
"This is a unique mushroom strain only available on spore stash. It was created in our labs by cross pollenating classic psycilibin cubensis with the ever popular penis envy. The girthy stem and bulbous cap are not to be missed. The spores look very interesting under a microscope, especially after eating some of the fully grown mushrooms. But definitely don’t do that because it is illegal. One time I did this and ran around the house naked until my friends pleaded with me to put my clothes back on because I was “ruining their high”.",
item_category: "cubensis",
item_quantity: 30,
item_price: 18.0,
units_available: 30,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1542417938-a39dba3afeb4?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1471&q=80",
created_date: "2022-03-10",
is_active: true,
swab_price: 18.0,
syringe_price: 20.0,
swab_available: true,
print_available: false,
syringe_available: true,
swab_quantity: 20,
syringe_quantity: 25,
},
{
item_id: 3,
item_name: "Yet Another Shroom",
item_blurb:
"This is a blurb describing the scroom variant, a little bit of info to get the custy clicking.",
item_description:
"This is a unique mushroom strain only available on spore stash. It was created in our labs by cross pollenating classic psycilibin cubensis with the ever popular penis envy. The girthy stem and bulbous cap are not to be missed. The spores look very interesting under a microscope, especially after eating some of the fully grown mushrooms. But definitely don’t do that because it is illegal. One time I did this and ran around the house naked until my friends pleaded with me to put my clothes back on because I was “ruining their high”.",
item_category: "cubensis",
item_quantity: 40,
item_price: 24.0,
units_available: 40,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1570977042406-8fba157deca0?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1470&q=80",
created_date: "2022-03-03",
is_active: true,
swab_price: 24.0,
swab_available: true,
print_available: false,
syringe_available: false,
swab_quantity: 50,
},
{
item_id: 4,
item_name: "Exotic Shroom One",
item_blurb:
"This is a blurb describing the scroom variant, a little bit of info to get the custy clicking.",
item_description:
"This is a unique mushroom strain only available on spore stash. It was created in our labs by cross pollenating classic psycilibin cubensis with the ever popular penis envy. The girthy stem and bulbous cap are not to be missed. The spores look very interesting under a microscope, especially after eating some of the fully grown mushrooms. But definitely don’t do that because it is illegal. One time I did this and ran around the house naked until my friends pleaded with me to put my clothes back on because I was “ruining their high”.",
item_category: "exotic",
item_quantity: 20,
item_price: 25.0,
units_available: 20,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1519305124423-5ccccff55da9?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1738&q=80",
created_date: "2022-03-17",
is_active: true,
swab_price: 25.0,
print_price: 27.0,
syringe_price: 27.0,
swab_available: true,
print_available: true,
syringe_available: true,
swab_quantity: 30,
print_quantity: 35,
syringe_quantity: 40,
},
{
item_id: 5,
item_name: "Exotic Shroom Two",
item_blurb:
"This is a blurb describing the scroom variant, a little bit of info to get the custy clicking.",
item_description:
"This is a unique mushroom strain only available on spore stash. It was created in our labs by cross pollenating classic psycilibin cubensis with the ever popular penis envy. The girthy stem and bulbous cap are not to be missed. The spores look very interesting under a microscope, especially after eating some of the fully grown mushrooms. But definitely don’t do that because it is illegal. One time I did this and ran around the house naked until my friends pleaded with me to put my clothes back on because I was “ruining their high”.",
item_category: "exotic",
item_quantity: 20,
item_price: 20.0,
units_available: 20,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1505820013142-f86a3439c5b2?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1742&q=80",
created_date: "2022-03-17",
is_active: true,
swab_price: 20.0,
print_price: 24.0,
syringe_price: 24.0,
swab_available: true,
print_available: true,
syringe_available: true,
swab_quantity: 30,
print_quantity: 35,
syringe_quantity: 40,
},
{
item_id: 6,
item_name: "Sporestash Stickers",
item_blurb:
"This is a blurb describing the strickers, a little bit of info to get the custy clicking.",
item_description:
"This is some details about the stickers I'm trying to make it long so tha tit fills up space. This is some details about the stickers I'm trying to make it long so tha tit fills up space. This is some details about the stickers I'm trying to make it long so tha tit fills up space. This is some details about the stickers I'm trying to make it long so tha tit fills up space. This is some details about the stickers I'm trying to make it long so tha tit fills up space. ",
item_category: "merch",
item_quantity: 100,
item_price: 5.0,
units_available: 100,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1619646081160-033d1d793388?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=774&q=80",
created_date: "2022-02-27",
is_active: true,
hide_type: true,
},
{
item_id: 7,
item_name: "Sporestash Hoody",
item_blurb:
"This is a blurb describing the hoodies, a little bit of info to get the custy clicking.",
item_description:
"This is some details about the Hoodies I'm trying to make it long so tha tit fills up space. This is some details about the Hoodies I'm trying to make it long so tha tit fills up space. This is some details about the Hoodies I'm trying to make it long so tha tit fills up space. This is some details about the Hoodies I'm trying to make it long so tha tit fills up space. This is some details about the Hoodies I'm trying to make it long so tha tit fills up space. ",
item_category: "merch",
item_quantity: 10,
item_price: 45.0,
units_available: 10,
is_available: true,
image_url:
"https://images.unsplash.com/photo-1499971856191-1a420a42b498?ixlib=rb-1.2.1&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=762&q=80",
created_date: "2022-02-22",
is_active: true,
display_size: true,
sizes_available: '["XS", "SM", "MD", "LG", "XL", "XXL"]',
},
]);
};
|
mycallback( {"CONTRIBUTOR OCCUPATION": "", "CONTRIBUTION AMOUNT (F3L Bundled)": "2500.00", "ELECTION CODE": "G2010", "MEMO CODE": "", "CONTRIBUTOR EMPLOYER": "", "DONOR CANDIDATE STATE": "", "CONTRIBUTOR STREET 1": "Post Office Box 1254", "CONTRIBUTOR MIDDLE NAME": "", "DONOR CANDIDATE FEC ID": "", "DONOR CANDIDATE MIDDLE NAME": "", "CONTRIBUTOR STATE": "CA", "DONOR CANDIDATE FIRST NAME": "", "CONTRIBUTOR FIRST NAME": "", "BACK REFERENCE SCHED NAME": "", "DONOR CANDIDATE DISTRICT": "", "CONTRIBUTION DATE": "20101201", "DONOR COMMITTEE NAME": "", "MEMO TEXT/DESCRIPTION": "", "Reference to SI or SL system code that identifies the Account": "", "FILER COMMITTEE ID NUMBER": "C00461061", "DONOR CANDIDATE LAST NAME": "", "CONTRIBUTOR LAST NAME": "", "_record_type": "fec.version.v7_0.SA", "CONDUIT STREET2": "", "CONDUIT STREET1": "", "DONOR COMMITTEE FEC ID": "", "CONTRIBUTION PURPOSE DESCRIP": "Uncashed check", "CONTRIBUTOR ZIP": "93406", "CONTRIBUTOR STREET 2": "", "CONDUIT CITY": "", "ENTITY TYPE": "ORG", "CONTRIBUTOR CITY": "San Luis Obispo", "CONTRIBUTOR SUFFIX": "", "TRANSACTION ID": "INCA8326", "DONOR CANDIDATE SUFFIX": "", "DONOR CANDIDATE OFFICE": "", "CONTRIBUTION PURPOSE CODE": "17U", "ELECTION OTHER DESCRIPTION": "", "_src_file": "2011/20110504/727409.fec_1.yml", "CONDUIT STATE": "", "CONTRIBUTOR ORGANIZATION NAME": "Public Policy Solutions", "BACK REFERENCE TRAN ID NUMBER": "", "DONOR CANDIDATE PREFIX": "", "CONTRIBUTOR PREFIX": "", "CONDUIT ZIP": "", "CONDUIT NAME": "", "CONTRIBUTION AGGREGATE F3L Semi-annual Bundled": "2500.00", "FORM TYPE": "SA14"});
|
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need
# fine tuning.
buildOptions = dict(packages = [], excludes = [])
import sys
base = 'Win32GUI' if sys.platform=='win32' else None
executables = [
Executable('WordGuesserHelper.py', base=base)
]
setup(name='Word Guesser Helper',
version = '2.0',
description = 'Helps guess an unknown word, Wheel of Fortune style',
options = dict(build_exe = buildOptions),
executables = executables)
|
#The MIT License (MIT)
#Copyright (c) 2015 bpyamasinn.
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import argparse
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
from xml.dom import minidom
import json
import math
CAMERA_SENSOR_TYPES = ['Color Camera']
SENSOR_TYPES = ['GPS Device','GPS Odometry','IMU','Lidar','Color Camera']
class UrdfGenerator:
def __init__(self,json_path,output_file,base_link_offset_x,base_link_offset_y,base_link_offset_z,robot_name,dae_path):
self.json_path = json_path
self.output_file = output_file
self.base_link_offset_x = base_link_offset_x
self.base_link_offset_y = base_link_offset_y
self.base_link_offset_z = base_link_offset_z
self.robot_name = robot_name
self.dae_path = dae_path
try:
f = open(self.json_path, 'r')
self.json_data = json.load(f)
except:
print("json path is invalid")
return
self.generate()
def parse_json(self):
self.non_camera_frames = {}
self.camera_frames = {}
for data in self.json_data:
if data['type'] in SENSOR_TYPES and data['params']['Frame'] not in self.non_camera_frames:
if data['type'] in CAMERA_SENSOR_TYPES:
self.camera_frames[data['params']['Frame']] = data['transform']
else:
self.non_camera_frames[data['params']['Frame']] = data['transform']
def generate_base_urdf(self):
base = Element('robot')
base.set('name',self.robot_name)
base_link = SubElement(base,'link')
base_link.set('name','base_link')
base_link_visual = SubElement(base_link,'visual')
origin = SubElement(base_link_visual,'origin')
origin.set('rpy', "0 0 1.57079632679")
xyz_str = str(self.base_link_offset_x) + " " + str(self.base_link_offset_y) + " " + str(self.base_link_offset_z)
origin.set('xyz', xyz_str)
base_link_geometry = SubElement(base_link_visual,'geometry')
base_link_mesh = SubElement(base_link_geometry,'mesh')
base_link_mesh.set('filename',self.dae_path)
return base
def reshape_urdf(self,urdf_tree):
reparsed = minidom.parseString(tostring(urdf_tree))
return reparsed.toprettyxml(indent=" ")
def add_frame(self,urdf,frame_id,transform):
link = SubElement(urdf,'link')
link.set('name',frame_id+'_link')
joint = SubElement(urdf,'joint')
joint.set('name',frame_id+'_joint')
joint.set('type','fixed')
parent = SubElement(joint,'parent')
parent.set('link','base_link')
child = SubElement(joint,'child')
child.set('link',frame_id+'_link')
origin = SubElement(joint,'origin')
rpy_str = str(transform["roll"]/180.0*math.pi) + " " + str(-1*transform["pitch"]/180.0*math.pi) + " " + str(transform["yaw"]/180.0*math.pi)
origin.set('rpy', rpy_str)
xyz_str = str(transform["z"]+self.base_link_offset_x) + " " + str(-1*(transform["x"]+self.base_link_offset_y)) + " " + str((transform["y"]+self.base_link_offset_z))
origin.set('xyz', xyz_str)
return urdf
def add_optical_frame(self,urdf,frame_id):
optical_link = SubElement(urdf,'link')
optical_link.set('name',frame_id+"_optical_link")
optical_joint = SubElement(urdf,'joint')
optical_joint.set('name',frame_id+'_optical_joint')
optical_joint.set('type','fixed')
parent = SubElement(optical_joint,'parent')
parent.set('link',frame_id+'_link')
child = SubElement(optical_joint,'child')
child.set('link',frame_id+"_optical_link")
origin = SubElement(optical_joint,'origin')
origin.set('rpy', "-1.57079632679 0 -1.57079632679")
origin.set('xyz', "0 0 0")
return urdf
def output_urdf(self,urdf_string):
with open(self.output_file, mode='w') as f:
f.write(urdf_string)
print("urdf saved to "+self.output_file)
def generate(self):
self.parse_json()
urdf = self.generate_base_urdf()
for frame_id in self.non_camera_frames.keys():
self.add_frame(urdf,frame_id,self.non_camera_frames[frame_id])
for frame_id in self.camera_frames.keys():
self.add_frame(urdf,frame_id,self.camera_frames[frame_id])
self.add_optical_frame(urdf,frame_id)
urdf_string = self.reshape_urdf(urdf)
self.output_urdf(urdf_string)
def main():
parser = argparse.ArgumentParser(description='URDF generation tool for LGSVL Simulator')
parser.add_argument('json_path', help='json_file_path for the LGSVL simulator vehicle configuration')
parser.add_argument('output_file', help='output_path of the URDF')
parser.add_argument('base_link_offset_x', help='offset value of the base_link (in ROS coordinate)',type=float)
parser.add_argument('base_link_offset_y', help='offset value of the base_link (in ROS coordinate)',type=float)
parser.add_argument('base_link_offset_z', help='offset value of the base_link (in ROS coordinate)',type=float)
parser.add_argument('robot_name', help='name of the robot')
parser.add_argument('dae_path',help='path of the .dae files')
args = parser.parse_args()
generator = UrdfGenerator(args.json_path,args.output_file,args.base_link_offset_x,args.base_link_offset_y,args.base_link_offset_z,args.robot_name,args.dae_path)
if __name__ == "__main__":
main()
|
import pytest
from pytest import approx
import subprocess
import igrf
time = "2010-07-12"
def test_bad_date():
with pytest.raises(RuntimeError):
igrf.igrf("2500-01-01", 65, 80, 0)
def test_igrf13():
mag = igrf.igrf(time, 65, 85, 0)
assert mag.north.item() == approx(9295.415460)
assert mag.east.item() == approx(2559.7889298)
assert mag.down.item() == approx(59670.379598)
assert mag.total.item() == approx(60444.284008)
assert mag.incl.item() == approx(80.821575)
assert mag.decl.item() == approx(15.396590)
def test_cli():
subprocess.check_call(["igrf", time, "-c", "65", "85", "-a", "0"])
# def test_igrf12():
# mag = igrf.igrf(time, 65, 85, 0, model=12)
# assert mag.north.item() == approx(9295.100256)
# assert mag.east.item() == approx(2560.199706)
# assert mag.down.item() == approx(59670.251893)
# assert mag.total.item() == approx(60444.126863)
# assert mag.incl.item() == approx(80.821738)
# assert mag.decl.item() == approx(15.399442)
# def test_igrf11():
# mag = igrf.igrf(time, 65, 85, 0, model=11)
# assert mag.north.item() == approx(9301.523160)
# assert mag.east.item() == approx(2563.450424)
# assert mag.down.item() == approx(59666.132881)
# assert mag.total.item() == approx(60441.186489)
# assert mag.incl.item() == approx(80.814513)
# assert mag.decl.item() == approx(15.407924)
|
#!/usr/bin/env python3
# Copyright 2019 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os.path
import sys
import xmlrpc.client
from netaddr import IPNetwork
from time import time, sleep
from lib.config import Config
from lib.inventory import Inventory
import lib.genesis as gen
import lib.utilities as util
from set_power_clients import set_power_clients
from lib.exception import UserException
import lib.logger as logger
import lib.bmc as _bmc
DNSMASQ_TEMPLATE = '/etc/cobbler/dnsmasq.template'
COBBLER_USER = gen.get_cobbler_user()
COBBLER_PASS = gen.get_cobbler_pass()
WAIT_TIME = 1200
POWER_WAIT = gen.get_power_wait()
SLEEP_TIME = gen.get_power_sleep_time()
class IPManager(object):
"""Manage IP address assignments from a given network
Args:
network (IPNetwork): netaddr IPNetwork object
start_offset (int): Starting IP address offset
"""
def __init__(self, network, start_offset):
self.log = logger.getlogger()
self.network = network
self.next_offset = start_offset
self.next_ip = network.network + self.next_offset
def get_next_ip(self, reserve=True):
"""Get next available sequential IP address
Args:
reserve (bool): If true the IP will be considered reserved
Returns:
ip_address (str): Next IP address
Raises:
UserException: No more IP addresses available
"""
if self.next_ip == self.network.network + self.network.size:
raise UserException('Not enough IP addresses in network \'%s\'' %
str(self.network.cidr))
ip_address = str(self.next_ip)
if reserve:
self.next_ip += 1
return ip_address
def inv_set_ipmi_pxe_ip(config_path):
"""Configure DHCP IP reservations for IPMI and PXE interfaces
IP addresses are assigned sequentially within the appropriate
client networks starting with the DHCP pool start offset defined
in 'lib.genesis'.
Raises:
UserException: - No IPMI or PXE client networks defined within
the 'config.yml'
- Unable to connect to BMC at new IPMI IP address
"""
log = logger.getlogger()
cfg = Config(config_path)
inv = Inventory(cfg_file=config_path)
ipmiNetwork = None
pxeNetwork = None
nodes_list = []
# All nodes should be powered off before starting
set_power_clients('off', config_path, wait=POWER_WAIT)
# Create IPManager object for IPMI and/or PXE networks
start_offset = gen.get_dhcp_pool_start()
for index, netw_type in enumerate(cfg.yield_depl_netw_client_type()):
ip = cfg.get_depl_netw_client_cont_ip(index)
netmask = cfg.get_depl_netw_client_netmask(index)
if netw_type == 'ipmi':
ipmiNetwork = IPManager(IPNetwork(ip + '/' + netmask), start_offset)
elif netw_type == 'pxe':
pxeNetwork = IPManager(IPNetwork(ip + '/' + netmask), start_offset)
# If only one network is defined use the same IPManager for both
if ipmiNetwork is None and pxeNetwork is not None:
ipmiNetwork = pxeNetwork
elif ipmiNetwork is not None and pxeNetwork is None:
pxeNetwork = ipmiNetwork
elif ipmiNetwork is None and pxeNetwork is None:
raise UserException('No IPMI or PXE client network found')
# Modify IP addresses for each node
dhcp_lease_time = cfg.get_globals_dhcp_lease_time()
for index, hostname in enumerate(inv.yield_nodes_hostname()):
# IPMI reservations are written directly to the dnsmasq template
ipmi_ipaddr = inv.get_nodes_ipmi_ipaddr(0, index)
ipmi_mac = inv.get_nodes_ipmi_mac(0, index)
ipmi_new_ipaddr = ipmiNetwork.get_next_ip()
util.remove_line(DNSMASQ_TEMPLATE, "^dhcp-host=" + ipmi_mac + ".*")
util.append_line(DNSMASQ_TEMPLATE, 'dhcp-host=%s,%s-bmc,%s,%s\n' %
(ipmi_mac, hostname, ipmi_new_ipaddr,
dhcp_lease_time))
_adjust_dhcp_pool(ipmiNetwork.network,
ipmiNetwork.get_next_ip(reserve=False),
dhcp_lease_time)
# PXE reservations are handled by Cobbler
pxe_ipaddr = inv.get_nodes_pxe_ipaddr(0, index)
pxe_mac = inv.get_nodes_pxe_mac(0, index)
pxe_new_ipaddr = pxeNetwork.get_next_ip()
log.info('Modifying Inventory PXE IP - Node: %s MAC: %s '
'Original IP: %s New IP: %s' %
(hostname, pxe_mac, pxe_ipaddr, pxe_new_ipaddr))
inv.set_nodes_pxe_ipaddr(0, index, pxe_new_ipaddr)
_adjust_dhcp_pool(pxeNetwork.network,
pxeNetwork.get_next_ip(reserve=False),
dhcp_lease_time)
# Run Cobbler sync to process DNSMASQ template
cobbler_server = xmlrpc.client.Server("http://127.0.0.1/cobbler_api")
token = cobbler_server.login(COBBLER_USER, COBBLER_PASS)
cobbler_server.sync(token)
log.debug("Running Cobbler sync")
# Save info to verify connection come back up
ipmi_userid = inv.get_nodes_ipmi_userid(index)
ipmi_password = inv.get_nodes_ipmi_password(index)
bmc_type = inv.get_nodes_bmc_type(index)
# No need to reset and check if the IP does not change
if ipmi_new_ipaddr != ipmi_ipaddr:
nodes_list.append({'hostname': hostname,
'index': index,
'ipmi_userid': ipmi_userid,
'ipmi_password': ipmi_password,
'ipmi_new_ipaddr': ipmi_new_ipaddr,
'ipmi_ipaddr': ipmi_ipaddr,
'ipmi_mac': ipmi_mac,
'bmc_type': bmc_type})
# Issue MC cold reset to force refresh of IPMI interfaces
for node in nodes_list:
ipmi_userid = node['ipmi_userid']
ipmi_password = node['ipmi_password']
ipmi_ipaddr = node['ipmi_ipaddr']
bmc_type = node['bmc_type']
bmc = _bmc.Bmc(ipmi_ipaddr, ipmi_userid, ipmi_password, bmc_type)
if bmc.is_connected():
log.debug(f'Issuing BMC Cold Reset - Node: {node["hostname"]} '
f'- IP: {ipmi_ipaddr}')
if not bmc.bmc_reset('cold'):
log.error(f'Failed attempting BMC reset on {node["ipmi_ipaddr"]}')
bmc.logout()
log.info('Pausing 1 minute for BMCs to begin reset')
sleep(60)
# Check connections for set amount of time
end_time = time() + WAIT_TIME
while time() < end_time and len(nodes_list) > 0:
print(f'\rTimeout count down: {int(end_time - time())} ', end='')
sys.stdout.flush()
success_list = []
sleep(2)
for list_index, node in enumerate(nodes_list):
hostname = node['hostname']
index = node['index']
ipmi_userid = node['ipmi_userid']
ipmi_password = node['ipmi_password']
ipmi_new_ipaddr = node['ipmi_new_ipaddr']
ipmi_ipaddr = node['ipmi_ipaddr']
ipmi_mac = node['ipmi_mac']
bmc_type = node['bmc_type']
# Attempt to connect to new IPMI IP address
bmc = _bmc.Bmc(ipmi_new_ipaddr, ipmi_userid, ipmi_password, bmc_type)
if bmc.is_connected():
if bmc.chassis_power('status') in ('on', 'off'):
log.debug(f'BMC connection success - Node: {hostname} '
f'IP: {ipmi_ipaddr}')
else:
log.debug(f'BMC communication failed - Node: {hostname} '
f'IP: {ipmi_ipaddr}')
continue
log.info(f'Modifying Inventory IPMI IP - Node: {hostname} MAC: '
f'{ipmi_mac} Original IP: {ipmi_ipaddr} New IP: '
f'{ipmi_new_ipaddr}')
inv.set_nodes_ipmi_ipaddr(0, index, ipmi_new_ipaddr)
success_list.append(list_index)
else:
log.debug(f'BMC connection failed - Node: {hostname} '
f'IP: {ipmi_ipaddr}')
continue
# Remove nodes that connected successfully
for remove_index in sorted(success_list, reverse=True):
del nodes_list[remove_index]
for node in nodes_list:
log.error('Unable to connect to BMC at new IPMI IP address- Node: %s '
'MAC: %s Original IP: %s New IP: %s' %
(hostname, ipmi_mac, ipmi_ipaddr, ipmi_new_ipaddr))
if len(nodes_list) > 0:
raise UserException('%d BMC(s) not responding after IP modification' %
len(nodes_list))
def _adjust_dhcp_pool(network, dhcp_pool_start, dhcp_lease_time):
dhcp_range = 'dhcp-range=%s,%s,%s # %s'
new_entry = dhcp_range % (dhcp_pool_start,
str(network.network + network.size - 1),
str(dhcp_lease_time),
str(network.cidr))
entry = "^dhcp-range=.* # " + str(network.cidr)
util.replace_regex(DNSMASQ_TEMPLATE, entry, new_entry)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('config_path', default='config.yml',
help='Config file path. Absolute path or relative '
'to power-up/')
parser.add_argument('--print', '-p', dest='log_lvl_print',
help='print log level', default='info')
parser.add_argument('--file', '-f', dest='log_lvl_file',
help='file log level', default='info')
args = parser.parse_args()
if not os.path.isfile(args.config_path):
args.config_path = gen.GEN_PATH + args.config_path
print('Using config path: {}'.format(args.config_path))
if not os.path.isfile(args.config_path):
sys.exit('{} does not exist'.format(args.config_path))
logger.create(args.log_lvl_print, args.log_lvl_file)
inv_set_ipmi_pxe_ip(args.config_path)
|
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
import sys
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"egrep -r -I '(map(Multi)?Args(\.count\(|\[)|Get(Bool)?Arg\()\"\-[^\"]+?\"' {} | grep -v '{}'".format(CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"egrep -r -I 'HelpMessageOpt\(\"\-[^\"=]+?(=|\")' {}".format(CMD_ROOT_DIR)
REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"')
REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")')
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-prematurewitness', '-walletprematurewitness', '-promiscuousmempoolflags', '-blockminsize', '-sendfreetransactions', '-checklevel', '-liquidityprovider', '-anonymizekabberryamount'])
def main():
used = check_output(CMD_GREP_ARGS, shell=True, universal_newlines=True)
docd = check_output(CMD_GREP_DOCS, shell=True, universal_newlines=True)
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
sys.exit(len(args_need_doc))
if __name__ == "__main__":
main()
|
"""A task_utils package.
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='task_utils',
version='0.1',
description='Utilities for synchronization between "tasks"',
long_description='Various utilites related to synchronization between "tasks" (processes or threads)',
# The project's main homepage.
url='https://github.com/vporton/task-utils',
# Author details
author='Victor Porton',
author_email='porton@narod.ru',
# Choose your license
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: BSD License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='tasks threads processes synchronization',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['test*']),
)
|
module.exports = {
port: 8888,
env: 'dev'
}
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-04 04:33
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, unique=True, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.')], verbose_name='username'),
),
]
|
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <aws/elasticbeanstalk/ElasticBeanstalk_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSStreamFwd.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/elasticbeanstalk/model/ApplicationVersionLifecycleConfig.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Xml
{
class XmlNode;
} // namespace Xml
} // namespace Utils
namespace ElasticBeanstalk
{
namespace Model
{
/**
* <p>The resource lifecycle configuration for an application. Defines lifecycle
* settings for resources that belong to the application, and the service role that
* Elastic Beanstalk assumes in order to apply lifecycle settings. The version
* lifecycle configuration defines lifecycle settings for application
* versions.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/elasticbeanstalk-2010-12-01/ApplicationResourceLifecycleConfig">AWS
* API Reference</a></p>
*/
class AWS_ELASTICBEANSTALK_API ApplicationResourceLifecycleConfig
{
public:
ApplicationResourceLifecycleConfig();
ApplicationResourceLifecycleConfig(const Aws::Utils::Xml::XmlNode& xmlNode);
ApplicationResourceLifecycleConfig& operator=(const Aws::Utils::Xml::XmlNode& xmlNode);
void OutputToStream(Aws::OStream& ostream, const char* location, unsigned index, const char* locationValue) const;
void OutputToStream(Aws::OStream& oStream, const char* location) const;
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline const Aws::String& GetServiceRole() const{ return m_serviceRole; }
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline bool ServiceRoleHasBeenSet() const { return m_serviceRoleHasBeenSet; }
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline void SetServiceRole(const Aws::String& value) { m_serviceRoleHasBeenSet = true; m_serviceRole = value; }
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline void SetServiceRole(Aws::String&& value) { m_serviceRoleHasBeenSet = true; m_serviceRole = std::move(value); }
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline void SetServiceRole(const char* value) { m_serviceRoleHasBeenSet = true; m_serviceRole.assign(value); }
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline ApplicationResourceLifecycleConfig& WithServiceRole(const Aws::String& value) { SetServiceRole(value); return *this;}
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline ApplicationResourceLifecycleConfig& WithServiceRole(Aws::String&& value) { SetServiceRole(std::move(value)); return *this;}
/**
* <p>The ARN of an IAM service role that Elastic Beanstalk has permission to
* assume.</p> <p>The <code>ServiceRole</code> property is required the first time
* that you provide a <code>VersionLifecycleConfig</code> for the application in
* one of the supporting calls (<code>CreateApplication</code> or
* <code>UpdateApplicationResourceLifecycle</code>). After you provide it once, in
* either one of the calls, Elastic Beanstalk persists the Service Role with the
* application, and you don't need to specify it again in subsequent
* <code>UpdateApplicationResourceLifecycle</code> calls. You can, however, specify
* it in subsequent calls to change the Service Role to another value.</p>
*/
inline ApplicationResourceLifecycleConfig& WithServiceRole(const char* value) { SetServiceRole(value); return *this;}
/**
* <p>The application version lifecycle configuration.</p>
*/
inline const ApplicationVersionLifecycleConfig& GetVersionLifecycleConfig() const{ return m_versionLifecycleConfig; }
/**
* <p>The application version lifecycle configuration.</p>
*/
inline bool VersionLifecycleConfigHasBeenSet() const { return m_versionLifecycleConfigHasBeenSet; }
/**
* <p>The application version lifecycle configuration.</p>
*/
inline void SetVersionLifecycleConfig(const ApplicationVersionLifecycleConfig& value) { m_versionLifecycleConfigHasBeenSet = true; m_versionLifecycleConfig = value; }
/**
* <p>The application version lifecycle configuration.</p>
*/
inline void SetVersionLifecycleConfig(ApplicationVersionLifecycleConfig&& value) { m_versionLifecycleConfigHasBeenSet = true; m_versionLifecycleConfig = std::move(value); }
/**
* <p>The application version lifecycle configuration.</p>
*/
inline ApplicationResourceLifecycleConfig& WithVersionLifecycleConfig(const ApplicationVersionLifecycleConfig& value) { SetVersionLifecycleConfig(value); return *this;}
/**
* <p>The application version lifecycle configuration.</p>
*/
inline ApplicationResourceLifecycleConfig& WithVersionLifecycleConfig(ApplicationVersionLifecycleConfig&& value) { SetVersionLifecycleConfig(std::move(value)); return *this;}
private:
Aws::String m_serviceRole;
bool m_serviceRoleHasBeenSet;
ApplicationVersionLifecycleConfig m_versionLifecycleConfig;
bool m_versionLifecycleConfigHasBeenSet;
};
} // namespace Model
} // namespace ElasticBeanstalk
} // namespace Aws
|
/* Copyright 2007-2015 QReal Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#pragma once
#include <twoDModel/robotModel/parts/rangeSensor.h>
#include "trikKitInterpreterCommon/declSpec.h"
namespace trik {
namespace robotModel {
namespace twoD {
namespace parts {
class ROBOTS_TRIK_KIT_INTERPRETER_COMMON_EXPORT TwoDInfraredSensor : public twoDModel::robotModel::parts::RangeSensor
{
Q_OBJECT
public:
TwoDInfraredSensor(const kitBase::robotModel::DeviceInfo &info
, const kitBase::robotModel::PortInfo &port
, twoDModel::engine::TwoDModelEngineInterface &engine);
void read() override;
};
}
}
}
}
|
/**
* Auto-generated action file for "Microsoft Graph API" API.
*
* Generated at: 2019-08-07T14:53:12.910Z
* Mass generator version: 1.1.0
*
* flowground :- Telekom iPaaS / microsoft-graph-api-connector
* Copyright © 2019, Deutsche Telekom AG
* contact: flowground@telekom.de
*
* All files of this connector are licensed under the Apache 2.0 License. For details
* see the file LICENSE on the toplevel directory.
*
*
* Operation: 'workbooks.workbook.worksheets.tables.GetRows'
* Endpoint Path: '/workbooks/{driveItem-id}/workbook/worksheets/{workbookWorksheet-id}/tables/{workbookTable-id}/rows/{workbookTableRow-id}'
* Method: 'get'
*
*/
const Swagger = require('swagger-client');
const processWrapper = require('../services/process-wrapper');
const spec = require('../spec.json');
// this wrapers offers a simplified emitData(data) function
module.exports.process = processWrapper(processAction);
// parameter names for this call
const PARAMETERS = [
"driveItem-id",
"workbookWorksheet-id",
"workbookTable-id",
"workbookTableRow-id",
"$select",
"$expand"
];
// mappings from connector field names to API field names
const FIELD_MAP = {
"driveItem_id": "driveItem-id",
"workbookWorksheet_id": "workbookWorksheet-id",
"workbookTable_id": "workbookTable-id",
"workbookTableRow_id": "workbookTableRow-id",
"_select": "$select",
"_expand": "$expand"
};
function processAction(msg, cfg) {
var isVerbose = process.env.debug || cfg.verbose;
if (isVerbose) {
console.log(`---MSG: ${JSON.stringify(msg)}`);
console.log(`---CFG: ${JSON.stringify(cfg)}`);
console.log(`---ENV: ${JSON.stringify(process.env)}`);
}
const contentType = undefined;
const body = msg.body;
mapFieldNames(body);
let parameters = {};
for(let param of PARAMETERS) {
parameters[param] = body[param];
}
// credentials for this operation
let securities = {};
let callParams = {
spec: spec,
operationId: 'workbooks.workbook.worksheets.tables.GetRows',
pathName: '/workbooks/{driveItem-id}/workbook/worksheets/{workbookWorksheet-id}/tables/{workbookTable-id}/rows/{workbookTableRow-id}',
method: 'get',
parameters: parameters,
requestContentType: contentType,
requestBody: body.requestBody,
securities: {authorized: securities},
server: spec.servers[cfg.server] || cfg.otherServer,
};
if (isVerbose) {
let out = Object.assign({}, callParams);
out.spec = '[omitted]';
console.log(`--SWAGGER CALL: ${JSON.stringify(out)}`);
}
// Call operation via Swagger client
return Swagger.execute(callParams).then(data => {
// emit a single message with data
this.emitData(data);
// if the response contains an array of entities, you can emit them one by one:
// data.obj.someItems.forEach((item) => {
// this.emitData(item);
// }
});
}
function mapFieldNames(obj) {
if(Array.isArray(obj)) {
obj.forEach(mapFieldNames);
}
else if(typeof obj === 'object' && obj) {
Object.keys(obj).forEach(key => {
mapFieldNames(obj[key]);
let goodKey = FIELD_MAP[key];
if(goodKey && goodKey !== key) {
obj[goodKey] = obj[key];
delete obj[key];
}
});
}
}
|
var group__l3gd20h__link__driver =
[
[ "DRIVER_L3GD20H_LINK_DEBUG_PRINT", "group__l3gd20h__link__driver.html#gad894c8654ccab82dfa3cce2188793c44", null ],
[ "DRIVER_L3GD20H_LINK_DELAY_MS", "group__l3gd20h__link__driver.html#ga2456ba670a8c970abc730b7d7272f529", null ],
[ "DRIVER_L3GD20H_LINK_IIC_DEINIT", "group__l3gd20h__link__driver.html#ga0060bf24e23d4668e24d64702abdb026", null ],
[ "DRIVER_L3GD20H_LINK_IIC_INIT", "group__l3gd20h__link__driver.html#ga1ea224e70e513a7959b2739e36cea019", null ],
[ "DRIVER_L3GD20H_LINK_IIC_READ", "group__l3gd20h__link__driver.html#ga73261ffaacf7914a699ae21f24b8da14", null ],
[ "DRIVER_L3GD20H_LINK_IIC_WRITE", "group__l3gd20h__link__driver.html#gae40f5c6fee921d5468d0430f147f23cf", null ],
[ "DRIVER_L3GD20H_LINK_INIT", "group__l3gd20h__link__driver.html#ga864aefa2a17a22246397403cedb16d49", null ],
[ "DRIVER_L3GD20H_LINK_RECEIVE_CALLBACK", "group__l3gd20h__link__driver.html#ga8ba1ba58fe85a370e548c778f97d09d6", null ],
[ "DRIVER_L3GD20H_LINK_SPI_DEINIT", "group__l3gd20h__link__driver.html#gac82df37c974b397802743cc355b9e8d6", null ],
[ "DRIVER_L3GD20H_LINK_SPI_INIT", "group__l3gd20h__link__driver.html#ga20a44147a9fef5c4d806cd2586a3f975", null ],
[ "DRIVER_L3GD20H_LINK_SPI_READ", "group__l3gd20h__link__driver.html#ga8f2ccc6e7762da4877df3cc2198d98cd", null ],
[ "DRIVER_L3GD20H_LINK_SPI_WRITE", "group__l3gd20h__link__driver.html#ga38063e2797bcfeedb4846b1154822cd5", null ]
];
|
#!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.18.2
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1HostConnection(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'url': 'str',
'insecure': 'bool'
}
attribute_map = {
'url': 'url',
'insecure': 'insecure'
}
def __init__(self, url=None, insecure=None, local_vars_configuration=None): # noqa: E501
"""V1HostConnection - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._url = None
self._insecure = None
self.discriminator = None
if url is not None:
self.url = url
if insecure is not None:
self.insecure = insecure
@property
def url(self):
"""Gets the url of this V1HostConnection. # noqa: E501
:return: The url of this V1HostConnection. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this V1HostConnection.
:param url: The url of this V1HostConnection. # noqa: E501
:type url: str
"""
self._url = url
@property
def insecure(self):
"""Gets the insecure of this V1HostConnection. # noqa: E501
:return: The insecure of this V1HostConnection. # noqa: E501
:rtype: bool
"""
return self._insecure
@insecure.setter
def insecure(self, insecure):
"""Sets the insecure of this V1HostConnection.
:param insecure: The insecure of this V1HostConnection. # noqa: E501
:type insecure: bool
"""
self._insecure = insecure
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1HostConnection):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1HostConnection):
return True
return self.to_dict() != other.to_dict()
|
from django.apps import AppConfig
class PostConfig(AppConfig):
name = 'post'
|
"""Utilities for emulation_system tests."""
import os
import pytest
from emulation_system.opentrons_emulation_configuration import (
OpentronsEmulationConfiguration,
SourceDownloadLocations,
)
def get_test_configuration_file_path() -> str:
"""Returns path to test file."""
return os.path.join(
os.path.dirname(os.path.abspath(__file__)), "test_configuration.json"
)
def get_test_conf() -> OpentronsEmulationConfiguration:
"""Returns configuration settings from test config file."""
return OpentronsEmulationConfiguration.from_file_path(
get_test_configuration_file_path()
)
def get_default_folder_path(name: str) -> str:
"""Gets default folder path from test config file."""
return get_test_conf().global_settings.default_folder_paths.__getattribute__(
name.replace("-", "_")
)
def get_source_download_locations() -> SourceDownloadLocations:
"""Get source download locations from test config file."""
return get_test_conf().emulation_settings.source_download_locations
def get_head(name: str) -> str:
"""Get head download location for repo from test config file."""
return get_source_download_locations().heads.__getattribute__(
name.replace("-", "_")
)
def get_commit(name: str) -> str:
"""Get commit format string download location for repo from test config file."""
return get_source_download_locations().commits.__getattribute__(
name.replace("-", "_")
)
TEST_CONF_OPENTRONS_PATH = get_default_folder_path("opentrons")
TEST_CONF_FIRMWARE_PATH = get_default_folder_path("ot3-firmware")
TEST_CONF_MODULES_PATH = get_default_folder_path("modules")
TEST_CONF_OPENTRONS_HEAD = get_head("opentrons")
TEST_CONF_FIRMWARE_HEAD = get_head("ot3-firmware")
TEST_CONF_MODULES_HEAD = get_head("modules")
TEST_CONF_OPENTRONS_EXPECTED_COMMIT = get_commit("opentrons")
TEST_CONF_FIRMWARE_EXPECTED_COMMIT = get_commit("ot3-firmware")
TEST_CONF_MODULES_EXPECTED_COMMIT = get_commit("modules")
@pytest.fixture
def testing_opentrons_emulation_configuration() -> OpentronsEmulationConfiguration:
"""Get test configuration of OpentronsEmulationConfiguration."""
return get_test_conf()
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
host interface (1.1 extension).
"""
from novaclient import base
class Host(base.Resource):
def __repr__(self):
return "<Host: %s>" % self.host
def _add_details(self, info):
dico = 'resource' in info and \
info['resource'] or info
for (k, v) in dico.items():
setattr(self, k, v)
class HostManager(base.ManagerWithFind):
resource_class = Host
def get(self, host):
"""
Describes cpu/memory/hdd info for host.
:param host: destination host name.
"""
return self._list("/os-hosts/%s" % (host), "host")
|
/*
* For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
module.exports = {
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/private/var/folders/kt/h4qk7x817y5bj_s5r7ld1njm0000gn/T/jest_dx",
// Automatically clear mock calls and instances between every test
clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files
// coverageDirectory: undefined,
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// Indicates which provider should be used to instrument code for coverage
// coverageProvider: "babel",
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {},
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "jsx",
// "ts",
// "tsx",
// "json",
// "node"
// ],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
// moduleNameMapper: {},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
// preset: undefined,
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state between every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state between every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in
// roots: [
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
// setupFilesAfterEnv: [],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: "jsdom",
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
testMatch: [
"**/__tests__/**/*.[jt]s?(x)",
"**/?(*.)+(spec|test).[tj]s?(x)"
],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
testPathIgnorePatterns: [
"/node_modules/",
"react-app/"
],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jest-circus/runner",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
// transform: undefined,
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/",
// "\\.pnp\\.[^\\/]+$"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};
|
from typing import Container, Sequence
import string
#==============================================================================
""" Implementation of the phone number mnemonics. - [EPI: 6.7]. """
def phone_mnemonic(phn_num: str) -> Container[Sequence]:
def _phone_mnemonics(phn_num: str,
k: int,
MAPPING: tuple,
result,
prefix=""):
if k == len(phn_num):
result.append(prefix)
else:
for i in MAPPING[string.digits.index(phn_num[k])]:
_phone_mnemonics(phn_num, k + 1, MAPPING, result, prefix + i)
MAPPING = ('0', '1', 'ABC', 'DEF', 'GHI', 'JKL', 'MNO', 'PQRS', 'TUV',
'WXYZ')
result = []
_phone_mnemonics(phn_num, 0, MAPPING, result)
return result
def main():
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from ..utils import walk
from ..scan import Dispatcher
from ..spec.v2_0.objects import (
Schema,
Parameter,
Response,
PathItem,
)
from ..spec.v2_0.parser import (
SchemaContext,
ParameterContext,
ResponseContext,
PathItemContext,
)
import functools
import six
def _out(app, parser, path):
obj = app.resolve(path, parser=parser)
r = getattr(obj, '$ref')
return [r] if r else []
def _schema_out_obj(obj, out=None):
out = [] if out == None else out
for o in six.itervalues(obj.properties):
out = _schema_out_obj(o, out)
for o in obj.allOf:
out = _schema_out_obj(o, out)
if isinstance(obj.additionalProperties, Schema):
out = _schema_out_obj(obj.additionalProperties, out)
if obj.items:
out = _schema_out_obj(obj.items, out)
r = getattr(obj, '$ref')
if r:
out.append(r)
return out
def _schema_out(app, path):
obj = app.resolve(path, parser=SchemaContext)
return [] if obj == None else _schema_out_obj(obj)
class CycleDetector(object):
""" circular detector """
class Disp(Dispatcher): pass
def __init__(self):
self.cycles = {
'schema':[],
'parameter':[],
'response':[],
'path_item':[]
}
@Disp.register([Schema])
def _schema(self, path, _, app):
self.cycles['schema'] = walk(
path,
functools.partial(_schema_out, app),
self.cycles['schema']
)
@Disp.register([Parameter])
def _parameter(self, path, _, app):
self.cycles['parameter'] = walk(
path,
functools.partial(_out, app, ParameterContext),
self.cycles['parameter']
)
@Disp.register([Response])
def _response(self, path, _, app):
self.cycles['response'] = walk(
path,
functools.partial(_out, app, ResponseContext),
self.cycles['response']
)
@Disp.register([PathItem])
def _path_item(self, path, _, app):
self.cycles['path_item'] = walk(
path,
functools.partial(_out, app, PathItemContext),
self.cycles['path_item']
)
|
const Storage = require('minio')
require("dotenv").config({ path: '../.env' })
const config = require('./config')
// console.log( config.storage )
config.storage.endPoint = 'localhost'
var storage = new Storage.Client( config.storage )
// File that needs to be uploaded.
// var file = './config.js'
// var file = '/Users/vg/Movies/vsms-ht-logo.mp4'
var file = '/Users/vg/Movies/MBfin.mp4'
async function start() {
// Make a bucket called europetrip.
try {
await storage.makeBucket('users', 'Orenburg');
} catch (err) {
// console.log(err)
}
var metaData = {
// 'Content-Type': 'video/mov',
'Content-Type': 'video/mp4',
// 'Content-Type': 'application/octet-stream',
'X-Amz-Meta-Testing': 1234,
'example': 5678
}
// Using fPutObject API upload your file to the bucket europetrip.
const etag = await storage.fPutObject('users', 'video/81fc512a-4236-11eb-96d1-975577b0463c', file, metaData)
// , function(err, etag) {
// // storage.fPutObject('users', 'config.js', file, metaData, function(err, etag) {
// if (err) return console.log(err)
// console.log('File uploaded successfully. etag=', etag)
// });
console.log('File uploaded successfully. etag=', etag)
}
start()
|
from entityfx.linpack import Linpack
from entityfx.benchmark_base import BenchmarkBase
from entityfx.writer import Writer
class ParallelLinpackBenchmark(BenchmarkBase):
def __init__(self, writer: Writer, print_to_console : bool=True, is_enabled : bool=True) -> None:
super().__init__(writer, print_to_console, is_enabled)
self.is_parallel = True
self.ratio = 10
def benchImplementation(self) -> list:
return self.bench_in_parallel(lambda : Linpack(False), lambda a: a.bench(1000), lambda a, r : self.setBenchResultFunc(a, r))
def warmup(self, aspect : float=.05) -> None:
pass
def populateResult(self, bench_result, results : list):
result = self._buildParallelResult(bench_result, results)
result["Result"] = sum(map(lambda x : x["Result"], results))
result["Units"] = "MFLOPS"
result["Output"] = "".join(map(lambda x : x["Output"], results))
return result
def setBenchResultFunc(self, a, r):
r["Points"] = a["MFLOPS"] * self.ratio
r["Result"] = a["MFLOPS"]
r["Output"] = a["Output"]
|
/** \file
* \brief iup_pplot Binding for Lua.
*
* See Copyright Notice in iup.h
*/
#ifndef __IUPLUA_PPLOT_H
#define __IUPLUA_PPLOT_H
#ifdef __cplusplus
extern "C"
{
#endif
#ifdef LUA_NOOBJECT /* Lua 3 */
int iup_pplotlua_open(void);
#endif
#ifdef LUA_TNONE /* Lua 5 */
int iup_pplotlua_open(lua_State* L);
#endif
#ifdef __cplusplus
}
#endif
#endif
|
from typing import Any, Dict, Union
import os
import sys
import time
import torch
import logging
from omegaconf import DictConfig
from .checkpointer import Checkpointer
from ..events import Events
from ..callback import Callback, handle_event
from ....distributed import get_rank
logger = logging.getLogger("checkpointer")
__all__ = ["Checkpoint"]
Trainer = Any
@Callback.register("checkpoint")
class Checkpoint(Callback):
"""
Callback that handles Checkpointing
"""
def __init__(self, config: DictConfig) -> None:
super().__init__(config)
# Check distributed
if get_rank() != 0:
raise NotImplementedError("Checkpoint callback can only be called for rank 0!")
# Initialize Checkpointer
self.checkpointer = Checkpointer(
sync_every_save=True,
async_save=self.config.checkpointing.async_save,
num_checkpoints_to_keep=self.config.checkpointing.num_checkpoints_to_keep,
keep_checkpoint_every_num_seconds=(self.config.checkpointing.keep_checkpoint_every_num_seconds),
storage_dir=self.config.checkpointing.directory)
self.last_save_time = time.time()
@handle_event(Events.INITIALIZE, priority=199)
def setup_checkpointer(self, trainer: Trainer):
# Checkpoint in epochs or steps
if self.config.checkpointing.steps_interval < 0 and self.config.checkpointing.seconds_interval < 0:
self.checkpoint_in_epoch = True
else:
self.checkpoint_in_epoch = False
# Checkpoint in seconds or steps
if self.config.checkpointing.steps_interval > 0 and self.config.checkpointing.seconds_interval > 0:
raise ValueError(
"Either `checkpointing.steps_interval` or `checkpointing.seconds_interval` can be set greater than 0!")
elif self.config.checkpointing.steps_interval < 0 and self.config.checkpointing.seconds_interval > 0:
self.checkpoint_in_seconds = True
elif self.config.checkpointing.steps_interval > 0 and self.config.checkpointing.seconds_interval < 0:
self.checkpoint_in_seconds = False
else:
self.checkpoint_in_seconds = False
@handle_event(Events.BATCH_END)
def save_checkpoint(self, trainer: Trainer):
# Checkpointing
if not self.checkpoint_in_epoch:
if self.checkpoint_in_seconds:
current_time = time.time()
# the elapsed time is longer than the seconds
if (current_time - self.last_save_time) > self.config.checkpointing.seconds_interval:
self._save_trainer_state(trainer)
self.last_save_time = current_time
else:
if (trainer.global_step_count + 1) % self.config.checkpointing.steps_interval == 0:
self._save_trainer_state(trainer)
@handle_event(Events.EPOCH_END)
def save_checkpoint_epoch(self, trainer: Trainer):
# Checkpointing
if self.checkpoint_in_epoch:
self._save_trainer_state(trainer)
def _save_trainer_state(self, trainer: Trainer):
trainer_state_dict = trainer.get_trainer_state()
self.checkpointer.save_checkpoint("iter_" + str(trainer.global_step_count), trainer.get_model_state(),
trainer_state_dict)
logger.info(f"Saved Checkpoint for Epoch {trainer.epochs_trained + 1} Iteration {trainer.global_step_count}!")
def state_dict(self):
return self.checkpointer.state_dict()
def load_state_dict(self, state_dict):
self.checkpointer.load_state_dict(state_dict)
|
const fse = require('fs-extra');
const { Observable } = require('rx');
const { commonREs, titleify, info, readDir, pagesDir } = require('../utils');
const { httpsRE, isAStubRE, markdownLinkRE } = commonREs;
function appendStub(path) {
const pathArr = path.split('/');
const filePath = pathArr
.slice(pathArr.indexOf('pages') + 1)
.join('/')
.toLowerCase();
const title = path
.split('/')
.slice(-1)
.join('');
const pageTitle = titleify(title);
const newMeta = (
`---
title: ${pageTitle}
---`);
/* eslint-disable max-len */
return `${newMeta}
## ${pageTitle}
This is a stub. [Help our community expand it](https://github.com/freecodecamp/guides/tree/master/src/pages/${filePath}/index.md).
[This quick style guide will help ensure your pull request gets accepted](https://github.com/freecodecamp/guides/blob/master/README.md).
<!-- The article goes here, in GitHub-flavored Markdown. Feel free to add YouTube videos, images, and CodePen/JSBin embeds -->
#### More Information:
<!-- Please add any articles you think might be helpful to read before writing the article -->
`;
}
/* eslint-enable max-len */
function normaliseLinks(content) {
let anchored = content.slice(0);
const links = content.match(markdownLinkRE);
if (links) {
links
.filter(x => !x.startsWith('!'))
.filter(x => x.match(httpsRE))
.map(str => {
// raw will look like:
// [ '[guides website', 'https://guide.freecodecamp.org)' ]
const raw = str.slice(0).split('](');
const formatted = [ raw[0].replace('[', ''), raw[1].replace(')', '') ];
const [ childText, url ] = formatted;
const anchor = (
`<a href='${url}' target='_blank' rel='nofollow'>${childText}</a>`
);
anchored = anchored.replace(str, anchor);
});
}
return anchored;
}
function normalise(dirLevel) {
const filePath = `${dirLevel}/index.md`;
fse.open(filePath, 'r', (err) => {
if (err) {
if (err.code === 'ENOENT') {
console.error(
'index.md does not exist in %s',
filePath.replace(/index\.md$/, '')
);
return fse.ensureFile(filePath)
.then(() => {
console.log('%s created', filePath);
return normalise(dirLevel);
})
.catch(err => {
console.error(err);
});
}
throw err;
}
fse.readFile(filePath, 'utf-8')
.then(content => {
let normalised = content;
if (
normalised.length < 30 ||
isAStubRE.test(content)
) {
normalised = appendStub(dirLevel);
}
const finalNormalised = normaliseLinks(normalised);
fse.writeFile(filePath, finalNormalised);
})
.catch(err => {
console.error('something went wrong', err);
});
return null;
});
}
function applyNormaliser(dirLevel) {
return Observable.from(readDir(dirLevel))
.flatMap(dir => {
const dirPath = `${dirLevel}/${dir}`;
const subDirs = readDir(dirPath);
if (!subDirs) {
normalise(dirPath);
return Observable.of(null);
}
normalise(dirPath);
return applyNormaliser(dirPath);
});
}
applyNormaliser(pagesDir)
.subscribe((dir)=> {
if (dir) {
applyNormaliser(dir);
}
},
err => {
throw err;
},
() => {
info('\n\nNormalisation Completed\n\n', 'greenBright');
info('Please check for uncommited changes before pushing\n', 'yellow');
});
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .interface import *
|
# -*- coding: utf-8 -*-
'''
This is a PyTorch implementation of CURL: Neural Curve Layers for Global Image Enhancement
https://arxiv.org/pdf/1911.13175.pdf
Please cite paper if you use this code.
Tested with Pytorch 1.7.1, Python 3.7.9
Authors: Sean Moran (sean.j.moran@gmail.com), 2020
'''
import matplotlib
matplotlib.use('agg')
import numpy as np
import sys
import torch
from abc import ABCMeta, abstractmethod
from collections import defaultdict
import logging
import os
import util
import torchvision.transforms.functional as TF
import random
import matplotlib.pyplot as plt
from PIL import Image
np.set_printoptions(threshold=sys.maxsize)
class SamsungDataset(torch.utils.data.Dataset):
def __init__(self, data_dict, transform=None, normaliser=2 ** 8 - 1, is_valid=False):
"""Initialisation for the Dataset object
:param data_dict: dictionary of dictionaries containing images
:param transform: PyTorch image transformations to apply to the images
:returns: N/A
:rtype: N/A
"""
self.transform = transform
self.data_dict = data_dict
self.normaliser = normaliser # normaliser for groundtruth data
self.is_valid = is_valid
def __len__(self):
"""Returns the number of images in the dataset
:returns: number of images in the dataset
:rtype: Integer
"""
return (len(self.data_dict.keys()))
def __getitem__(self, idx):
"""Returns a pair of images with the given identifier. This is lazy loading
of data into memory. Only those image pairs needed for the current batch
are loaded.
:param idx: image pair identifier
:returns: dictionary containing input and output images and their identifier
:rtype: dictionary
"""
while True:
if idx in self.data_dict:
output_img = util.ImageProcessing.load_image(
self.data_dict[idx]['output_img'], normaliser=self.normaliser)
input_img = np.load(self.data_dict[idx]['input_img'])
input_img = input_img / (2**10-1) # change this normalisation
# factor for your data
shape = input_img.shape
input_img = np.clip(input_img, 0, 1)
input_img[np.isnan(input_img)] = 0
seed = random.uniform(0, 10000)
if not self.is_valid:
random.seed(seed) # make a seed with numpy generation
i = random.randint(0, input_img.shape[0]-512) # patch size
# of 512 pixels
j = random.randint(0, input_img.shape[1]-512)
i = i-(i % 2) # ensure on Bayer pattern boundary
j = j-(j % 2)
input_img = input_img[i:(i+512), j:(j+512)]
output_img = output_img[i:(i+512), j:(j+512), :]
return {'input_img': input_img, 'output_img': output_img,
'name': self.data_dict[idx]['input_img'].split("/")[-1]}
class Dataset(torch.utils.data.Dataset):
def __init__(self, data_dict, transform=None, normaliser=2 ** 8 - 1, is_valid=False, is_inference=False):
"""Initialisation for the Dataset object
:param data_dict: dictionary of dictionaries containing images
:param transform: PyTorch image transformations to apply to the images
:returns: N/A
:rtype: N/A
"""
self.transform = transform
self.data_dict = data_dict
self.normaliser = normaliser
self.is_valid = is_valid
self.is_inference = is_inference
def __len__(self):
"""Returns the number of images in the dataset
:returns: number of images in the dataset
:rtype: Integer
"""
return (len(self.data_dict.keys()))
def __getitem__(self, idx):
"""Returns a pair of images with the given identifier. This is lazy loading
of data into memory. Only those image pairs needed for the current batch
are loaded.
:param idx: image pair identifier
:returns: dictionary containing input and output images and their identifier
:rtype: dictionary
"""
while True:
if (self.is_inference) or (self.is_valid):
input_img = util.ImageProcessing.load_image(
self.data_dict[idx]['input_img'], normaliser=self.normaliser)
output_img = util.ImageProcessing.load_image(
self.data_dict[idx]['output_img'], normaliser=self.normaliser)
if self.normaliser==1:
input_img = input_img.astype(np.uint8)
output_img = output_img.astype(np.uint8)
input_img = TF.to_pil_image(input_img)
input_img = TF.to_tensor(input_img)
output_img = TF.to_pil_image(output_img)
output_img = TF.to_tensor(output_img)
if input_img.shape[1]==output_img.shape[2]:
output_img=output_img.permute(0,2,1)
return {'input_img': input_img, 'output_img': output_img,
'name': self.data_dict[idx]['input_img'].split("/")[-1]}
else:
output_img = util.ImageProcessing.load_image(
self.data_dict[idx]['output_img'], normaliser=self.normaliser)
input_img = util.ImageProcessing.load_image(
self.data_dict[idx]['input_img'], normaliser=self.normaliser)
if self.normaliser==1:
input_img = input_img.astype(np.uint8)
output_img = output_img.astype(np.uint8)
input_img = TF.to_pil_image(input_img)
output_img = TF.to_pil_image(output_img)
if not self.is_valid:
# Random horizontal flipping
if random.random() > 0.5:
input_img = TF.hflip(input_img)
output_img = TF.hflip(output_img)
# Random vertical flipping
if random.random() > 0.5:
input_img = TF.vflip(input_img)
output_img = TF.vflip(output_img)
# Random rotation +90
if random.random() > 0.5:
input_img=TF.rotate(input_img,90,expand=True)
output_img=TF.rotate(output_img,90,expand=True)
#input_img.save("./"+self.data_dict[idx]['input_img'].split("/")[-1]+"1.png")
#output_img.save("./"+self.data_dict[idx]['output_img'].split("/")[-1]+"2.png")
# Random rotation -90
if random.random() > 0.5:
input_img=TF.rotate(input_img,-90, expand=True)
output_img=TF.rotate(output_img,-90, expand=True)
# Random rotation -90
if random.random() > 0.5:
input_img=TF.rotate(input_img, 180, expand=True)
output_img=TF.rotate(output_img, 180, expand=True)
#output_img.save("./"+self.data_dict[idx]['output_img'].split("/")[-1]+"2.png")
# Transform to tensor
#print(output_img.shape)
#plt.imsave("./"+self.data_dict[idx]['input_img'].split("/")[-1]+".png", output_img,format='png')
input_img = TF.to_tensor(input_img)
output_img = TF.to_tensor(output_img)
return {'input_img': input_img, 'output_img': output_img,
'name': self.data_dict[idx]['input_img'].split("/")[-1]}
class DataLoader():
def __init__(self, data_dirpath, img_ids_filepath):
"""Initialisation function for the data loader
:param data_dirpath: directory containing the data
:param img_ids_filepath: file containing the ids of the images to load
:returns: N/A
:rtype: N/A
"""
self.data_dirpath = data_dirpath
self.img_ids_filepath = img_ids_filepath
@abstractmethod
def load_data(self):
"""Abstract function for the data loader class
:returns: N/A
:rtype: N/A
"""
pass
@abstractmethod
def perform_inference(self, net, data_dirpath):
"""Abstract function for the data loader class
:returns: N/A
:rtype: N/A
"""
pass
class Adobe5kDataLoader(DataLoader):
def __init__(self, data_dirpath, img_ids_filepath):
"""Initialisation function for the data loader
:param data_dirpath: directory containing the data
:param img_ids_filepath: file containing the ids of the images to load
:returns: N/A
:rtype: N/A
"""
super().__init__(data_dirpath, img_ids_filepath)
self.data_dict = defaultdict(dict)
def load_data(self):
""" Loads the Samsung image data into a Python dictionary
:returns: Python two-level dictionary containing the images
:rtype: Dictionary of dictionaries
"""
logging.info("Loading Adobe5k dataset ...")
with open(self.img_ids_filepath) as f:
'''
Load the image ids into a list data structure
'''
image_ids = f.readlines()
# you may also want to remove whitespace characters like `\n` at the end of each line
image_ids_list = [x.rstrip() for x in image_ids]
idx = 0
idx_tmp = 0
img_id_to_idx_dict = {}
for root, dirs, files in os.walk(self.data_dirpath):
for file in files:
img_id = file.split("-")[0]
is_id_in_list = False
for img_id_test in image_ids_list:
if img_id_test == img_id:
is_id_in_list = True
break
if is_id_in_list: # check that the image is a member of the appropriate training/test/validation split
if not img_id in img_id_to_idx_dict.keys():
img_id_to_idx_dict[img_id] = idx
self.data_dict[idx] = {}
self.data_dict[idx]['input_img'] = None
self.data_dict[idx]['output_img'] = None
idx_tmp = idx
idx += 1
else:
idx_tmp = img_id_to_idx_dict[img_id]
if "input" in root: # change this to the name of your
# input data folder
input_img_filepath = file
self.data_dict[idx_tmp]['input_img'] = root + \
"/" + input_img_filepath
elif ("output" in root): # change this to the name of your
# output data folder
output_img_filepath = file
self.data_dict[idx_tmp]['output_img'] = root + \
"/" + output_img_filepath
else:
logging.debug("Excluding file with id: " + str(img_id))
for idx, imgs in self.data_dict.items():
assert ('input_img' in imgs)
assert ('output_img' in imgs)
return self.data_dict
'''
This data loading class only works for the Samsung S7 dataset. You will need to
edit this class to handle a new dataset.
'''
class SamsungDataLoader(DataLoader):
def __init__(self, data_dirpath, img_ids_filepath):
"""Initialisation function for the data loader
:param data_dirpath: directory containing the data
:param img_ids_filepath: file containing the ids of the images to load
:returns: N/A
:rtype: N/A
"""
super().__init__(data_dirpath, img_ids_filepath)
self.data_dict = defaultdict(dict)
def load_data(self):
""" Loads the Samsung image data into a Python dictionary
:returns: Python two-level dictionary containing the images
:rtype: Dictionary of dictionaries
"""
logging.info("Loading Samsung dataset ...")
with open(self.img_ids_filepath) as f:
'''
Load the image ids into a list data structure
'''
image_ids = f.readlines()
# you may also want to remove whitespace characters like `\n` at the end of each line
image_ids_list = [x.rstrip() for x in image_ids]
idx = 0
idx_tmp = 0
img_id_to_idx_dict = {}
for root, dirs, files in os.walk(self.data_dirpath):
for file in files:
if "medium" in file:
img_id = file.split("-medium")[0]
else:
img_id = file.split("-short")[0]
is_id_in_list = False
for img_id_test in image_ids_list:
if img_id_test == img_id:
is_id_in_list = True
break
if is_id_in_list: # check that the image is a member of the appropriate training/test/validation split
if not img_id in img_id_to_idx_dict.keys():
img_id_to_idx_dict[img_id] = idx
self.data_dict[idx] = {}
self.data_dict[idx]['input_img'] = None
self.data_dict[idx]['output_img'] = None
idx_tmp = idx
idx += 1
else:
idx_tmp = img_id_to_idx_dict[img_id]
if "medium_input" in root: # change medium_input to match
# name of your data input subdirectory
input_img_filepath = file
if file.endswith(".dng"):
if not os.path.isfile(root+"/"+input_img_filepath.split(".")[0]+".npy"):
raw_img = rawpy.imread(
root+"/"+input_img_filepath)
np.save(root+"/"+input_img_filepath.split(".")
[0]+".npy", raw_img.raw_image)
self.data_dict[idx_tmp]['input_img'] = root + \
"/"+input_img_filepath.split(".")[0]+".npy"
elif ("output" in root): # change output to match name of
# your data groundtruth subdirectory
if (file.endswith(".jpg")) and (not file.endswith(".proc.jpg")):
'''
The target images are rgb format.
'''
output_img_filepath = root + "/" + file
if not os.path.isfile(output_img_filepath+".proc.jpg"):
output_img = ImageProcessing.load_image(
output_img_filepath, normaliser=2**8-1)
plt.imsave(output_img_filepath +
".proc.jpg", output_img)
self.data_dict[idx_tmp]['output_img'] = output_img_filepath+".proc.jpg"
else:
logging.debug("Excluding file with id: " + str(img_id))
for idx, imgs in self.data_dict.items():
assert('input_img' in imgs)
assert('output_img' in imgs)
return self.data_dict
|
import express from 'express';
import bodyParser from 'body-parser';
import morgan from 'morgan';
import http from 'http';
import dbmodels from 'bt-mongodb';
import cors from 'cors';
import router from './router';
import websocket from './websocket';
import mobileN from './mobile-notifier';
const conf = require('./config.json');
const app = express();
const server = http.Server(app);
app.use(cors());
// app.use(express.static('lib/public'));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(morgan('combined', { immediate: true }));
app.set('port', (process.env.PORT || conf.port));
app.post('/echo', (req, res) => {
res.send(req.body);
});
app.use('/api', router);
app.all('*', express.static('lib/public'));
websocket.createApplication(server);
mobileN.startProcess();
dbmodels.connect(conf.mongodb, (err) => {
if (err) {
console.log('Unable to connect to MongoDB');
process.exit(1);
} else {
server.listen(app.get('port'), () => {
console.log(`Server is listening on port ${app.get('port')}`);
});
}
});
module.exports = app;
|
/*
* Copyright (c) 2001 by David Brownell
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/* this file is part of ehci-hcd.c */
/*-------------------------------------------------------------------------*/
/*
* There's basically three types of memory:
* - data used only by the HCD ... kmalloc is fine
* - async and periodic schedules, shared by HC and HCD ... these
* need to use pci_pool or pci_alloc_consistent
* - driver buffers, read/written by HC ... single shot DMA mapped
*
* There's also PCI "register" data, which is memory mapped.
* No memory seen by this driver is pagable.
*/
/*-------------------------------------------------------------------------*/
/*
* Allocator / cleanup for the per device structure
* Called by hcd init / removal code
*/
static struct usb_hcd *ehci_hcd_alloc (void)
{
struct ehci_hcd *ehci;
ehci = (struct ehci_hcd *)
kmalloc (sizeof (struct ehci_hcd), GFP_KERNEL);
if (ehci != 0) {
memset (ehci, 0, sizeof (struct ehci_hcd));
ehci->hcd.product_desc = "EHCI Host Controller";
return &ehci->hcd;
}
return 0;
}
static void ehci_hcd_free (struct usb_hcd *hcd)
{
kfree (hcd_to_ehci (hcd));
}
/*-------------------------------------------------------------------------*/
/* Allocate the key transfer structures from the previously allocated pool */
static inline void ehci_qtd_init (struct ehci_qtd *qtd, dma_addr_t dma)
{
memset (qtd, 0, sizeof *qtd);
qtd->qtd_dma = dma;
qtd->hw_token = cpu_to_le32 (QTD_STS_HALT);
qtd->hw_next = EHCI_LIST_END;
qtd->hw_alt_next = EHCI_LIST_END;
INIT_LIST_HEAD (&qtd->qtd_list);
}
static struct ehci_qtd *ehci_qtd_alloc (struct ehci_hcd *ehci, int flags)
{
struct ehci_qtd *qtd;
dma_addr_t dma;
qtd = pci_pool_alloc (ehci->qtd_pool, flags, &dma);
if (qtd != 0) {
ehci_qtd_init (qtd, dma);
}
return qtd;
}
static inline void ehci_qtd_free (struct ehci_hcd *ehci, struct ehci_qtd *qtd)
{
pci_pool_free (ehci->qtd_pool, qtd, qtd->qtd_dma);
}
static struct ehci_qh *ehci_qh_alloc (struct ehci_hcd *ehci, int flags)
{
struct ehci_qh *qh;
dma_addr_t dma;
qh = (struct ehci_qh *)
pci_pool_alloc (ehci->qh_pool, flags, &dma);
if (!qh)
return qh;
memset (qh, 0, sizeof *qh);
atomic_set (&qh->refcount, 1);
qh->qh_dma = dma;
// INIT_LIST_HEAD (&qh->qh_list);
INIT_LIST_HEAD (&qh->qtd_list);
/* dummy td enables safe urb queuing */
qh->dummy = ehci_qtd_alloc (ehci, flags);
if (qh->dummy == 0) {
ehci_dbg (ehci, "no dummy td\n");
pci_pool_free (ehci->qh_pool, qh, qh->qh_dma);
qh = 0;
}
return qh;
}
/* to share a qh (cpu threads, or hc) */
static inline struct ehci_qh *qh_get (/* ehci, */ struct ehci_qh *qh)
{
atomic_inc (&qh->refcount);
return qh;
}
static void qh_put (struct ehci_hcd *ehci, struct ehci_qh *qh)
{
if (!atomic_dec_and_test (&qh->refcount))
return;
/* clean qtds first, and know this is not linked */
if (!list_empty (&qh->qtd_list) || qh->qh_next.ptr) {
ehci_dbg (ehci, "unused qh not empty!\n");
BUG ();
}
if (qh->dummy)
ehci_qtd_free (ehci, qh->dummy);
pci_pool_free (ehci->qh_pool, qh, qh->qh_dma);
}
/*-------------------------------------------------------------------------*/
/* The queue heads and transfer descriptors are managed from pools tied
* to each of the "per device" structures.
* This is the initialisation and cleanup code.
*/
static void ehci_mem_cleanup (struct ehci_hcd *ehci)
{
if (ehci->async)
qh_put (ehci, ehci->async);
ehci->async = 0;
/* PCI consistent memory and pools */
if (ehci->qtd_pool)
pci_pool_destroy (ehci->qtd_pool);
ehci->qtd_pool = 0;
if (ehci->qh_pool) {
pci_pool_destroy (ehci->qh_pool);
ehci->qh_pool = 0;
}
if (ehci->itd_pool)
pci_pool_destroy (ehci->itd_pool);
ehci->itd_pool = 0;
if (ehci->sitd_pool)
pci_pool_destroy (ehci->sitd_pool);
ehci->sitd_pool = 0;
if (ehci->periodic)
pci_free_consistent (ehci->hcd.pdev,
ehci->periodic_size * sizeof (u32),
ehci->periodic, ehci->periodic_dma);
ehci->periodic = 0;
/* shadow periodic table */
if (ehci->pshadow)
kfree (ehci->pshadow);
ehci->pshadow = 0;
}
/* remember to add cleanup code (above) if you add anything here */
static int ehci_mem_init (struct ehci_hcd *ehci, int flags)
{
int i;
/* QTDs for control/bulk/intr transfers */
ehci->qtd_pool = pci_pool_create ("ehci_qtd", ehci->hcd.pdev,
sizeof (struct ehci_qtd),
32 /* byte alignment (for hw parts) */,
4096 /* can't cross 4K */);
if (!ehci->qtd_pool) {
goto fail;
}
/* QHs for control/bulk/intr transfers */
ehci->qh_pool = pci_pool_create ("ehci_qh", ehci->hcd.pdev,
sizeof (struct ehci_qh),
32 /* byte alignment (for hw parts) */,
4096 /* can't cross 4K */);
if (!ehci->qh_pool) {
goto fail;
}
ehci->async = ehci_qh_alloc (ehci, flags);
if (!ehci->async) {
goto fail;
}
/* ITD for high speed ISO transfers */
ehci->itd_pool = pci_pool_create ("ehci_itd", ehci->hcd.pdev,
sizeof (struct ehci_itd),
32 /* byte alignment (for hw parts) */,
4096 /* can't cross 4K */);
if (!ehci->itd_pool) {
goto fail;
}
/* SITD for full/low speed split ISO transfers */
ehci->sitd_pool = pci_pool_create ("ehci_sitd", ehci->hcd.pdev,
sizeof (struct ehci_sitd),
32 /* byte alignment (for hw parts) */,
4096 /* can't cross 4K */);
if (!ehci->sitd_pool) {
goto fail;
}
/* Hardware periodic table */
ehci->periodic = (u32 *)
pci_alloc_consistent (ehci->hcd.pdev,
ehci->periodic_size * sizeof (u32),
&ehci->periodic_dma);
if (ehci->periodic == 0) {
goto fail;
}
for (i = 0; i < ehci->periodic_size; i++)
ehci->periodic [i] = EHCI_LIST_END;
/* software shadow of hardware table */
ehci->pshadow = kmalloc (ehci->periodic_size * sizeof (void *), flags);
if (ehci->pshadow == 0) {
goto fail;
}
memset (ehci->pshadow, 0, ehci->periodic_size * sizeof (void *));
return 0;
fail:
ehci_dbg (ehci, "couldn't init memory\n");
ehci_mem_cleanup (ehci);
return -ENOMEM;
}
|
#pragma once
#include <opencv2/opencv.hpp>
#include "Poco/URIStreamOpener.h"
#include "Poco/StreamCopier.h"
#include "Poco/Path.h"
#include "Poco/URI.h"
#include "Poco/Exception.h"
#include "Poco/SharedPtr.h"
#include "Poco/Net/HTTPStreamFactory.h"
#include "Poco/Net/HTTPSStreamFactory.h"
#include "Poco/Net/FTPStreamFactory.h"
#include "Poco/Net/SSLManager.h"
#include "Poco/Net/AcceptCertificateHandler.h"
#include "Poco/Net/PrivateKeyPassphraseHandler.h"
#include <memory>
#include <iostream>
#include <fstream>
using Poco::Exception;
using Poco::Path;
using Poco::SharedPtr;
using Poco::StreamCopier;
using Poco::URI;
using Poco::URIStreamOpener;
using Poco::Net::AcceptCertificateHandler;
using Poco::Net::Context;
using Poco::Net::FTPStreamFactory;
using Poco::Net::HTTPSStreamFactory;
using Poco::Net::HTTPStreamFactory;
using Poco::Net::InvalidCertificateHandler;
using Poco::Net::SSLManager;
class Downloader
{
public:
Downloader();
~Downloader(void);
std::string DownloadFile(const std::string &url);
};
|
export default class FormComponent {
constructor(component, dataContext) {
this.component = component;
this.dataContext = dataContext;
}
async accept(visitor) {
visitor.visit(this);
}
}
|
import time
import datetime
import os
import requests
from project.server.main.feed import harvest_and_insert
from project.server.main.logger import get_logger
logger = get_logger(__name__)
def create_task_harvest(arg):
collection_name = arg.get('collection_name')
if collection_name:
harvest_and_insert(collection_name)
# url_hal_update = "https://api.archives-ouvertes.fr/search/?fq=doiId_s:*%20AND%20structCountry_s:fr%20AND%20modifiedDate_tdate:[{0}T00:00:00Z%20TO%20{1}T00:00:00Z]%20AND%20producedDate_tdate:[2013-01-01T00:00:00Z%20TO%20{1}T00:00:00Z]&fl=halId_s,doiId_s,openAccess_bool&rows={2}&start={3}"
|
from __future__ import annotations
import time
from slack_sdk.web.client import WebClient
from slack_sdk.web.slack_response import SlackResponse
from . import templates
class Message:
def __init__(self, token: str, channel: str = ""):
"""Post ``message`` template to slack.
Note:
- You can set default value using self.default.set()
- You can set persistent value using self.persistent.set()
"""
self.client = WebClient(token)
self.tmpl = templates.Message()
self.channel = channel
def post(
self,
text: str = None,
severity: str = None,
header: str = None,
context: list = None,
channel: str = None,
) -> None:
"""Post simple message to Slack
Args:
severity (str): Sets message color. Possible values: info | success | warning | error
Note:
- If no value is passed, the default value will be used (if available).
- If there is no default/persistent value found - that part of the template will not be rendered.
- You can set default value using self.default.set()
- You can set persistent value using self.persistent.set()
"""
self.client.chat_postMessage(
channel=channel or self.channel,
**self.tmpl.construct(text, header, context, severity),
)
def seconds_to_dhms(seconds) -> str:
(days, remainder) = divmod(int(seconds), 86400)
(hours, remainder) = divmod(remainder, 3600)
(minutes, seconds) = divmod(remainder, 60)
return "".join(
[
f"{v}{k} " if v else ""
for k, v in {"d": days, "h": hours, "m": minutes, "s": seconds}.items()
]
).strip()
class Operation:
def __init__(
self, token: str = None, channel: str = "", packed_operation: dict = None
):
"""Post ``operation`` template to slack.
Note:
- You can set default value using self.default.set()
- You can set persistent value using self.persistent.set()
"""
self.tmpl = templates.Operation()
if packed_operation:
self.init_packed(packed_operation)
else:
self.token = token
self.channel = channel
self.client = WebClient(self.token)
def start(
self,
name: str,
status: str,
text: str = None,
severity: str = "info",
header: str = None,
context: list = None,
channel: str = None,
) -> SlackResponse:
"""Send message to slack about operation you're starting.
Args:
name (str): Name of operation. Example: Application update
status (str): Status of operation. Example: Backup
Note:
- If no value is passed, the default value will be used (if available).
- If there is no default/persistent value found - that part of the template will not be rendered.
- You can set default value using self.default.set()
- You can set persistent value using self.persistent.set()
"""
self.started = time.time()
self.tmpl.default.set(
started=self.started, text=text, name=name, header=header, context=context
)
response = self.client.chat_postMessage(
channel=channel or self.channel,
**self.tmpl.construct(status=status, severity=severity),
)
self._parent_ts = response["message"]["ts"]
self._channel_id = response["channel"]
self._post_to_parent_thread(status)
return response
def update(
self,
status: str,
severity: str = "info",
):
"""Update current status of the operation"""
self.client.chat_update(
channel=self._channel_id,
ts=self._parent_ts,
**self.tmpl.construct(severity=severity, status=status),
)
self._post_to_parent_thread(status)
def finish(self, status: str, severity: str = "success"):
finished = time.time()
self.client.chat_update(
channel=self._channel_id,
ts=self._parent_ts,
**self.tmpl.construct(status=status, finished=finished, severity=severity),
)
self._post_to_parent_thread(
f"The process took `{seconds_to_dhms(finished - self.started)}`"
)
def _post_to_parent_thread(self, text) -> SlackResponse:
return self.client.chat_postMessage(
text=f"`<!date^{int(time.time())}^{{time_secs}}|:( no time>` {text}",
mrkdwn=True,
channel=self._channel_id,
thread_ts=self._parent_ts,
)
def pack(self):
return {
"default": self.tmpl.default._dict,
"persistent": self.tmpl.persistent._dict,
"channel": self.channel,
"_channel_id": self._channel_id,
"_parent_ts": self._parent_ts,
"token": self.token,
}
def init_packed(self, packed_operation):
p = packed_operation
self.token = p["token"]
self.tmpl.default._dict = p["default"]
self.tmpl.persistent._dict = p["persistent"]
self.channel = p["channel"]
self._channel_id = p["_channel_id"]
self._parent_ts = p["_parent_ts"]
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class UsersContactFoldersChildFoldersOperations:
"""UsersContactFoldersChildFoldersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~users_functions.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def delta(
self,
user_id: str,
contact_folder_id: str,
**kwargs
) -> List["models.MicrosoftGraphContactFolder"]:
"""Invoke function delta.
Invoke function delta.
:param user_id: key: id of user.
:type user_id: str
:param contact_folder_id: key: id of contactFolder.
:type contact_folder_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphContactFolder, or the result of cls(response)
:rtype: list[~users_functions.models.MicrosoftGraphContactFolder]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphContactFolder"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delta.metadata['url'] # type: ignore
path_format_arguments = {
'user-id': self._serialize.url("user_id", user_id, 'str'),
'contactFolder-id': self._serialize.url("contact_folder_id", contact_folder_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphContactFolder]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delta.metadata = {'url': '/users/{user-id}/contactFolders/{contactFolder-id}/childFolders/microsoft.graph.delta()'} # type: ignore
|
deepmacDetailCallback("0050c2741000/36",[{"a":"Dongmun Vision Building, Sinjeong 6-dong, Yangcheon-gu, Seoul Yangcheon-gu, KR 158-740","o":"Dain","d":"2008-07-30","t":"add","s":"ieee","c":"KR"}]);
|
from dx.share import add_props_to_ns, validate_roman_numeral, roman2int
from .symbol_formatting import RegexMatchable, SymbolGroup
from .soup_postprocessing import listpager
from traceback import print_tb
class TocChapNum(RegexMatchable):
def __init__(self, ch_num_substr):
self.substr = ch_num_substr # Store input string (ToC entry title) in a property
# Complain if the (sub)chapter numbering regex doesn't match the title string
assert self.match(self.substr), ValueError("No chapter number in this string")
self.numeric = self.get_numbering_tuple(self.substr)
add_props_to_ns(["numeric", "substr"])
_re = r"^(Chapter |CHAPTER |§ ?)?(\d+\.)+" # set inherited read-only `RegexMatchable.re` property
@classmethod
def get_numbering_tuple(cls, target_str):
m = cls.match(target_str)
if m:
mg_ch, mg_num = m.groups()
num_group = m.group()
if mg_ch:
num_group = num_group[len(mg_ch):] # left-strip the chapter substring
num_tup = tuple(n for n in num_group.split(".") if n)
if not all(map(str.isnumeric, num_tup)):
raise ValueError(f"Non-numeric chapter numbering: {num_tup}")
return tuple(map(int, num_tup))
else:
return m
class TocChapRomNum(RegexMatchable):
def __init__(self, ch_num_substr):
self.substr = ch_num_substr # Store input string (ToC entry title) in a property
# Complain if the (sub)chapter numbering regex doesn't match the title string
if not self.match(self.substr):
raise ValueError("No chapter number in this string")
self.numeric = self.get_numbering_tuple(self.substr)
add_props_to_ns(["numeric", "substr"])
# set inherited read-only `RegexMatchable.re` property
_re = r"^(Chapter |CHAPTER |§ ?)?((M{0,4})(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})\.)+"
@classmethod
def get_numbering_tuple(cls, target_str):
m = cls.match(target_str)
if m:
mg_ch, mg_num = m.groups()[:2] # only need the broadest Roman numeral group
num_group = m.group()
if mg_ch:
num_group = num_group[len(mg_ch):] # left-strip the chapter substring
num_tup = tuple(n for n in num_group.split(".") if n)
if not all(map(validate_roman_numeral, num_tup)):
raise ValueError(f"Non-Roman numeric chapter numbering: {num_tup}")
return tuple(map(roman2int, num_tup))
else:
return m
class TocTitle(str):
def __init__(self, t):
self.title_text = t
ch_num_str = TocChapNum.from_target_str(t)
if ch_num_str is None:
# Retry as Roman numeral instead
ch_num_str = TocChapRomNum.from_target_str(t)
self.ch_num = ch_num_str
title_postnum = None if self.ch_num is None else t[len(self.ch_num.substr):]
self.ch_title_postnum = title_postnum
if self.ch_title_postnum is not None:
try:
t_str = self.ch_title_postnum
ch_symbol_substrings = SymbolGroup.from_target_str(t_str)
except Exception as e:
# Don't raise, just store exception instead of the SymbolGroup object
ch_symbol_substrings = e
else:
ch_symbol_substrings = []
self.symbol_groups = ch_symbol_substrings # `.formula.parsed.statement`
def __repr__(self):
if self.ch_num is None:
return self.title_text
else:
return f"({self.ch_num.substr}){self.ch_title_postnum}"
class TocEntry:
def __init__(self, li_item):
title = next(li_item.select_one("span.t-toc-title").stripped_strings)
self.title = TocTitle(title)
logical_pageno = li_item.select_one("span.t-toc-logical-pageno")
self.logical_pageno = None if logical_pageno is None else logical_pageno.text
pageno = li_item.select_one("span.t-toc-pageno")
self.pageno = None if pageno is None else pageno.text
self.is_free = li_item.select_one("span.t-toc-range-free") is not None
def __repr__(self):
s = f"{self.title!r} --- {self.logical_pageno}"
if self.logical_pageno != self.pageno:
s += f" ({self.pageno})"
if self.is_free:
s += " (free)"
return s
def get_TocEntry_or_TocEntries(li_item):
li_ch = li_item.findChild()
if li_ch is None:
listpager(li_item) # DEBUGGING: print out the li item with no child `ul` element
if li_ch.name == "ul":
entry_list = [TocEntry(l) for l in li_ch.findChildren("li", recursive=False)]
else:
entry_list = [TocEntry(li_item)]
return entry_list
class TocEntries(list):
def __init__(self, li_list):
for li in li_list:
try:
self.extend(get_TocEntry_or_TocEntries(li))
except Exception as e:
listpager(li)
listpager(li_list)
print_tb(e.__traceback__)
@property
def has_distinct_pagenums(self):
return any([e.pageno != e.logical_pageno for e in self])
class TocInfo:
def __init__(self, toc_ul):
self.toc_entries = TocEntries(toc_ul.findChildren("li", recursive=False))
|
x = {
['method'](): void ->
a
b
}
|
dojo.provide("dojo.nls.dojo-ext_pl");dojo.provide("dojo.nls.colors");dojo.nls.colors._built=true;dojo.provide("dojo.nls.colors.pl");dojo.nls.colors.pl={"lightsteelblue":"jasnostalowoniebieski","orangered":"pomarańczowoczerwony","midnightblue":"granatowoczarny","cadetblue":"niebieskoszary","seashell":"matowoliliowy","slategrey":"mysi","coral":"koralowy","darkturquoise":"mlecznoturkusowy","antiquewhite":"biel antyczna","mediumspringgreen":"średnia wiosenna zieleń","salmon":"łososiowy","darkgrey":"ciemnoszary","ivory":"kość słoniowa","greenyellow":"zielonożółty","mistyrose":"bladoróżany","lightsalmon":"jasnołososiowy","silver":"srebrny","dimgrey":"przyciemniony szary","orange":"pomarańczowy","white":"biały","navajowhite":"piaskowy","royalblue":"królewski błękit","deeppink":"głęboki różowy","lime":"limonkowy","oldlace":"bladopomarańczowy","chartreuse":"żółtooliwkowy","darkcyan":"ciemnoniebieskozielony","yellow":"żółty","linen":"lniany","olive":"oliwkowy","gold":"złoty","lawngreen":"trawiasty","lightyellow":"jasnożółty","tan":"śniady","darkviolet":"ciemnofiołkowy","lightslategrey":"jasny mysi","grey":"szary","darkkhaki":"ciemny khaki","green":"zielony","deepskyblue":"intensywny błękit nieba","aqua":"morski","sienna":"siena","mintcream":"jasnomiętowy","rosybrown":"różowobrązowy","mediumslateblue":"średni gołębi","magenta":"amarantowy","lightseagreen":"jasna zieleń morska","cyan":"niebieskozielony","olivedrab":"oliwkowa zieleń","darkgoldenrod":"ciemne stare złoto","slateblue":"gołębi","mediumaquamarine":"średnia akwamaryna","lavender":"lawendowy","mediumseagreen":"średnia zieleń morska","maroon":"kasztanowy","darkslategray":"ciemny mysi","mediumturquoise":"średni turkusowy","ghostwhite":"bladobiały","darkblue":"ciemnoniebieski","mediumvioletred":"średni fiołkowowoczerwony","brown":"brązowy","lightgray":"jasnoszary","sandybrown":"piaskowy brąz","pink":"różowy","firebrick":"ceglasty","indigo":"indygo","snow":"śnieżny","darkorchid":"ciemna orchidea","turquoise":"turkusowy","chocolate":"czekoladowy","springgreen":"wiosenna zieleń","moccasin":"mokasynowy","navy":"granatowy","lemonchiffon":"cytrynowy","teal":"zielonomodry","floralwhite":"kwiatowa biel","cornflowerblue":"chabrowy","paleturquoise":"bladoturkusowy","purple":"fioletowy","gainsboro":"bladoszary","plum":"śliwkowy","red":"czerwony","blue":"niebieski","forestgreen":"leśna zieleń","darkgreen":"ciemnozielony","honeydew":"miodowy","darkseagreen":"ciemna zieleń morska","lightcoral":"jasnokoralowy","palevioletred":"blady fiołkowoczerwony","mediumpurple":"średni fioletowy","saddlebrown":"brąz skórzany","darkmagenta":"ciemnoamarantowy","thistle":"bladofioletowy","whitesmoke":"przydymiony biały","wheat":"pszeniczny","violet":"fiołkowy","lightskyblue":"jasny błękit nieba","goldenrod":"stare złoto","mediumblue":"ciemnochabrowy","skyblue":"błękit nieba","crimson":"karmazynowy","darksalmon":"ciemnołososiowy","darkred":"ciemnoczerwony","darkslategrey":"ciemny mysi","peru":"jasnobrązowy","lightgrey":"jasnoszary","lightgoldenrodyellow":"jasnożółte stare złoto","blanchedalmond":"migdałowy","aliceblue":"bladoniebieski","bisque":"biszkoptowy","slategray":"mysi","palegoldenrod":"blade stare złoto","darkorange":"ciemnopomarańczowy","aquamarine":"akwamaryna","lightgreen":"jasnozielony","burlywood":"kolor drewna","dodgerblue":"błękit Dodgers","darkgray":"ciemnoszary","lightcyan":"jasnoniebieskozielony","powderblue":"jasnobladobłękitny","blueviolet":"błękitnofiołkowy","orchid":"orchidea","dimgray":"przyciemniony szary","beige":"beżowy","fuchsia":"fuksjowy","lavenderblush":"lawendowocielisty","hotpink":"odblaskoworóżowy","steelblue":"stalowoniebieski","tomato":"pomidorowy","lightpink":"jasnoróżowy","limegreen":"zielony limonkowy","indianred":"kasztanowy","papayawhip":"papaja","lightslategray":"jasny mysi","gray":"szary","mediumorchid":"średnia orchidea","cornsilk":"kukurydziany","black":"czarny","seagreen":"zieleń morska","darkslateblue":"ciemny gołębi","khaki":"khaki","lightblue":"jasnoniebieski","palegreen":"bladozielony","azure":"lazurowy","peachpuff":"brzoskwiniowy","darkolivegreen":"ciemnooliwkowy","yellowgreen":"żółtozielony"};dojo.provide("dijit.nls.loading");dijit.nls.loading._built=true;dojo.provide("dijit.nls.loading.pl");dijit.nls.loading.pl={"loadingState":"Ładowanie...","errorState":"Niestety, wystąpił błąd"};dojo.provide("dijit.nls.common");dijit.nls.common._built=true;dojo.provide("dijit.nls.common.pl");dijit.nls.common.pl={"buttonOk":"OK","buttonCancel":"Anuluj","buttonSave":"Zapisz","itemClose":"Zamknij"};dojo.provide("dijit._editor.nls.commands");dijit._editor.nls.commands._built=true;dojo.provide("dijit._editor.nls.commands.pl");dijit._editor.nls.commands.pl={"removeFormat":"Usuń formatowanie","copy":"Kopiuj","paste":"Wklej","selectAll":"Wybierz wszystko","insertOrderedList":"Lista numerowana","insertTable":"Wstaw/edytuj tabelę","print":"Drukuj","underline":"Podkreślenie","foreColor":"Kolor pierwszego planu","htmlToggle":"Kod źródłowy HTML","formatBlock":"Styl akapitu","newPage":"Nowa strona","insertHorizontalRule":"Linia pozioma","delete":"Usuń","insertUnorderedList":"Lista wypunktowana","tableProp":"Właściwość tabeli","insertImage":"Wstaw obraz","superscript":"Indeks górny","subscript":"Indeks dolny","createLink":"Utwórz odsyłacz","undo":"Cofnij","fullScreen":"Przełącz pełny ekran","italic":"Kursywa","fontName":"Nazwa czcionki","justifyLeft":"Wyrównaj do lewej","unlink":"Usuń odsyłacz","toggleTableBorder":"Przełącz ramkę tabeli","viewSource":"Wyświetl kod źródłowy HTML","ctrlKey":"Ctrl+${0}","fontSize":"Wielkość czcionki","systemShortcut":"Działanie ${0} jest dostępne w tej przeglądarce wyłącznie przy użyciu skrótu klawiaturowego. Należy użyć klawiszy ${1}.","indent":"Wcięcie","redo":"Ponów","strikethrough":"Przekreślenie","justifyFull":"Wyrównaj do lewej i prawej","justifyCenter":"Wyrównaj do środka","hiliteColor":"Kolor tła","deleteTable":"Usuń tabelę","outdent":"Usuń wcięcie","cut":"Wytnij","plainFormatBlock":"Styl akapitu","toggleDir":"Przełącz kierunek","bold":"Pogrubienie","tabIndent":"Wcięcie o tabulator","justifyRight":"Wyrównaj do prawej","appleKey":"⌘${0}"};dojo.provide("dijit.form.nls.validate");dijit.form.nls.validate._built=true;dojo.provide("dijit.form.nls.validate.pl");dijit.form.nls.validate.pl={"rangeMessage":"Ta wartość jest spoza zakresu.","invalidMessage":"Wprowadzona wartość jest niepoprawna.","missingMessage":"Ta wartość jest wymagana."};dojo.provide("dijit.form.nls.ComboBox");dijit.form.nls.ComboBox._built=true;dojo.provide("dijit.form.nls.ComboBox.pl");dijit.form.nls.ComboBox.pl={"previousMessage":"Poprzednie wybory","nextMessage":"Więcej wyborów"};dojo.provide("dijit._editor.nls.FontChoice");dijit._editor.nls.FontChoice._built=true;dojo.provide("dijit._editor.nls.FontChoice.pl");dijit._editor.nls.FontChoice.pl={"noFormat":"Brak","1":"najmniejsza","2":"mniejsza","formatBlock":"Format","3":"mała","4":"średnia","5":"duża","6":"większa","7":"największa","fantasy":"fantazyjna","serif":"szeryfowa","p":"Akapit","pre":"Wstępnie sformatowane","sans-serif":"bezszeryfowa","fontName":"Czcionka","h1":"Nagłówek","h2":"Nagłówek 2-go poziomu","h3":"Nagłówek 3-go poziomu","monospace":"czcionka o stałej szerokości","fontSize":"Wielkość","cursive":"kursywa"};dojo.provide("dijit._editor.nls.LinkDialog");dijit._editor.nls.LinkDialog._built=true;dojo.provide("dijit._editor.nls.LinkDialog.pl");dijit._editor.nls.LinkDialog.pl={"text":"Opis:","insertImageTitle":"Właściwości obrazu","set":"Ustaw","newWindow":"Nowe okno","topWindow":"Okno najwyższego poziomu","target":"Cel:","createLinkTitle":"Właściwości odsyłacza","parentWindow":"Okno macierzyste","currentWindow":"Bieżące okno","url":"Adres URL:"};dojo.provide("dojo.cldr.nls.number");dojo.cldr.nls.number._built=true;dojo.provide("dojo.cldr.nls.number.pl");dojo.cldr.nls.number.pl={"group":" ","percentSign":"%","exponential":"E","percentFormat":"#,##0%","scientificFormat":"#E0","list":";","infinity":"∞","patternDigit":"#","minusSign":"-","decimal":",","nan":"NaN","nativeZeroDigit":"0","perMille":"‰","decimalFormat":"#,##0.###","currencyFormat":"#,##0.00 ¤","plusSign":"+","currencySpacing-afterCurrency-currencyMatch":"[:letter:]","currencySpacing-beforeCurrency-surroundingMatch":"[:digit:]","decimalFormat-short":"000T","currencySpacing-afterCurrency-insertBetween":" ","currencySpacing-afterCurrency-surroundingMatch":"[:digit:]","currencySpacing-beforeCurrency-currencyMatch":"[:letter:]","currencySpacing-beforeCurrency-insertBetween":" "};dojo.provide("dojo.cldr.nls.currency");dojo.cldr.nls.currency._built=true;dojo.provide("dojo.cldr.nls.currency.pl");dojo.cldr.nls.currency.pl={"HKD_displayName":"dolar hongkoński","CHF_displayName":"frank szwajcarski","CAD_displayName":"dolar kanadyjski","CNY_displayName":"juan renminbi","AUD_displayName":"dolar australijski","JPY_displayName":"jen japoński","USD_displayName":"dolar amerykański ","GBP_displayName":"funt szterling","EUR_displayName":"euro","USD_symbol":"US$","CAD_symbol":"CA$","GBP_symbol":"£","HKD_symbol":"HK$","JPY_symbol":"JP¥","AUD_symbol":"AU$","CNY_symbol":"CN¥","EUR_symbol":"€"};dojo.provide("dojo.cldr.nls.gregorian");dojo.cldr.nls.gregorian._built=true;dojo.provide("dojo.cldr.nls.gregorian.pl");dojo.cldr.nls.gregorian.pl={"months-format-narrow":["s","l","m","k","m","c","l","s","w","p","l","g"],"field-weekday":"Dzień tygodnia","dateFormatItem-yQQQ":"y QQQ","dateFormatItem-yMEd":"EEE, d.MM.yyyy","dateFormatItem-MMMEd":"E, d MMM","eraNarrow":["p.n.e.","n.e."],"dayPeriods-format-wide-earlyMorning":"nad ranem","dayPeriods-format-wide-morning":"rano","dateFormat-long":"d MMMM y","months-format-wide":["stycznia","lutego","marca","kwietnia","maja","czerwca","lipca","sierpnia","września","października","listopada","grudnia"],"dayPeriods-format-wide-evening":"wieczorem","dayPeriods-format-wide-pm":"PM","dateFormat-full":"EEEE, d MMMM y","dateFormatItem-Md":"d.MM","dayPeriods-format-wide-noon":"w południe","field-era":"Era","dateFormatItem-yM":"MM.yyyy","months-standAlone-wide":["styczeń","luty","marzec","kwiecień","maj","czerwiec","lipiec","sierpień","wrzesień","październik","listopad","grudzień"],"timeFormat-short":"HH:mm","quarters-format-wide":["I kwartał","II kwartał","III kwartał","IV kwartał"],"timeFormat-long":"HH:mm:ss z","field-year":"Rok","dateFormatItem-yQ":"yyyy Q","dateFormatItem-yyyyMMMM":"LLLL y","field-hour":"Godzina","dateFormatItem-MMdd":"d.MM","months-format-abbr":["sty","lut","mar","kwi","maj","cze","lip","sie","wrz","paź","lis","gru"],"dateFormatItem-yyQ":"Q yy","timeFormat-full":"HH:mm:ss zzzz","field-day-relative+0":"Dzisiaj","field-day-relative+1":"Jutro","field-day-relative+2":"Pojutrze","field-day-relative+3":"Za trzy dni","months-standAlone-abbr":["sty","lut","mar","kwi","maj","cze","lip","sie","wrz","paź","lis","gru"],"quarters-format-abbr":["K1","K2","K3","K4"],"quarters-standAlone-wide":["I kwartał","II kwartał","III kwartał","IV kwartał"],"dateFormatItem-M":"L","days-standAlone-wide":["niedziela","poniedziałek","wtorek","środa","czwartek","piątek","sobota"],"dateFormatItem-MMMMd":"d MMMM","dateFormatItem-yyMMM":"MMM yy","timeFormat-medium":"HH:mm:ss","dateFormatItem-Hm":"HH:mm","quarters-standAlone-abbr":["1 kw.","2 kw.","3 kw.","4 kw."],"eraAbbr":["p.n.e.","n.e."],"field-minute":"Minuta","field-dayperiod":"Dayperiod","days-standAlone-abbr":["niedz.","pon.","wt.","śr.","czw.","pt.","sob."],"dayPeriods-format-wide-night":"w nocy","dateFormatItem-d":"d","dateFormatItem-ms":"mm:ss","field-day-relative+-1":"Wczoraj","dateFormatItem-h":"hh a","field-day-relative+-2":"Przedwczoraj","field-day-relative+-3":"Trzy dni temu","dateFormatItem-MMMd":"d MMM","dateFormatItem-MEd":"E, d.MM","dayPeriods-format-wide-lateMorning":"przed południem","dateFormatItem-yMMMM":"LLLL y","field-day":"Dzień","days-format-wide":["niedziela","poniedziałek","wtorek","środa","czwartek","piątek","sobota"],"field-zone":"Strefa","dateFormatItem-yyyyMM":"MM.yyyy","dateFormatItem-y":"y","months-standAlone-narrow":["s","l","m","k","m","c","l","s","w","p","l","g"],"dateFormatItem-hm":"hh:mm a","days-format-abbr":["niedz.","pon.","wt.","śr.","czw.","pt.","sob."],"eraNames":["p.n.e.","n.e."],"days-format-narrow":["N","P","W","Ś","C","P","S"],"field-month":"Miesiąc","days-standAlone-narrow":["N","P","W","Ś","C","P","S"],"dateFormatItem-MMM":"LLL","dayPeriods-format-wide-am":"AM","dateFormat-short":"dd.MM.yyyy","dayPeriods-format-wide-afternoon":"po południu","field-second":"Sekunda","dateFormatItem-yMMMEd":"EEE, d MMM y","dateFormatItem-Ed":"E, d","field-week":"Tydzień","dateFormat-medium":"d MMM y","dateFormatItem-Hms":"HH:mm:ss","dateFormatItem-hms":"hh:mm:ss a","quarters-standAlone-narrow":["1","2","3","4"],"dateTimeFormats-appendItem-Day-Of-Week":"{0} {1}","dateTimeFormat-medium":"{1} {0}","dateFormatItem-EEEd":"d EEE","dayPeriods-format-abbr-am":"AM","dateTimeFormats-appendItem-Second":"{0} ({2}: {1})","dateFormatItem-yMMM":"y MMM","dateTimeFormats-appendItem-Era":"{0} {1}","dateTimeFormats-appendItem-Week":"{0} ({2}: {1})","dateFormatItem-H":"HH","quarters-format-narrow":["1","2","3","4"],"dateTimeFormat-long":"{1} {0}","dayPeriods-format-narrow-am":"AM","dateTimeFormat-full":"{1} {0}","dateTimeFormats-appendItem-Day":"{0} ({2}: {1})","dateTimeFormats-appendItem-Year":"{0} {1}","dateTimeFormats-appendItem-Hour":"{0} ({2}: {1})","dayPeriods-format-abbr-pm":"PM","dateTimeFormats-appendItem-Quarter":"{0} ({2}: {1})","dateTimeFormats-appendItem-Month":"{0} ({2}: {1})","dateTimeFormats-appendItem-Minute":"{0} ({2}: {1})","dateTimeFormats-appendItem-Timezone":"{0} {1}","dayPeriods-format-narrow-pm":"PM","dateTimeFormat-short":"{1} {0}"};dojo.provide("dojox.widget.nls.ColorPicker");dojox.widget.nls.ColorPicker._built=true;dojo.provide("dojox.widget.nls.ColorPicker.pl");dojox.widget.nls.ColorPicker.pl={"saturationPickerTitle":"Selektor nasycenia","valueLabel":"jas.","blueLabel":"n","saturationLabel":"nas.","greenLabel":"z","redLabel":"c","hueLabel":"barwa","hexLabel":"szesnastkowe","huePickerTitle":"Selektor barwy","degLabel":"°"};
|
import numpy as np
from openmdao.core.group import Group
from openmdao.core.component import Component
from openmdao.solvers.newton import Newton
# from scipy.optimize import brentq
from pycycle.constants import AIR_MIX
from pycycle.thermo_static import SetStaticMN
from pycycle import species_data
from pycycle.flowstation import FlowIn, PassThrough
class TransmogrifierCalc(Component):
def __init__(self):
super(TransmogrifierCalc, self).__init__()
self.add_param('MN_out_target', 1.0, desc='Mach at exit')
def solve_nonlinear(self, params, unknowns, resids):
pass
def apply_nonlinear(self, params, unknowns, resids):
if self.mode == 'MN':
pass
class Transmogrifier(Group):
"""Calculates statics for a component with changing area (e.g. a diffuser or converging nozzle); can be used alone or in sequence with a compressor, splitter, etc. to model area change across the component"""
def __init__(self, thermo_data=species_data.janaf, elements=AIR_MIX):
super(Transmogrifier, self).__init__()
self.thermo_data = thermo_data
self.elements = elements
gas_thermo = species_data.Thermo(thermo_data, init_reacts=elements)
self.gas_prods = gas_thermo.products
self.num_prod = len(self.gas_prods)
# Create inlet flowstation
flow_in = FlowIn('Fl_I', self.num_prod)
self.add('flow_in', flow_in, promotes=flow_in.flow_in_vars)
self.add('calc', TransmogrifierCalc(), promotes=['*'])
# Calculate statics based on MN
set_stat = SetStaticMN(thermo_data, elements, 'Fl_O:stat')
self.add('set_stat', set_stat, promotes=set_stat.flow_out_vars)
self.connect('Fl_I:tot:h', 'set_stat.ht')
self.connect('Fl_I:tot:S', 'set_stat.S')
self.connect('Fl_I:stat:W', 'set_stat.W')
self.connect('Fl_I:tot:n', 'set_stat.n_guess')
self.connect('MN_out_target', 'set_stat.MN_target')
for v_name in ('h', 'T', 'P', 'rho', 'gamma', 'Cp', 'Cv', 'S', 'n_moles'):
self.add('passthru_tot:%s' % v_name, PassThrough('Fl_I:tot:%s' % v_name, 'Fl_O:tot:%s' % v_name, 0.0), promotes=['*'])
self.add('passthru_tot:n', PassThrough('Fl_I:tot:n', 'Fl_O:tot:n', np.zeros(self.num_prod)), promotes=['*'])
self.add('passthru_FAR', PassThrough('Fl_I:FAR', 'Fl_O:FAR', 0.0), promotes=['*'])
|
'use strict'
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const bcrypt = require('bcrypt-nodejs');
const CourseEmbeddedTutorSchema = new Schema({
courseId: {
type: Schema.Types.ObjectId,
required: true
},
gpa: {
type: Number
},
score:{
type: Number
},
initialDate:{
type: Date
}
});
const EventEmbeddedTutorSchema = new Schema({
title: {
type: String
},
start:{
type: String
},
end:{
type: String
},
color:{
type: String
},
textColor:{
type: String
},
overlap:{
type: String
},
selectable:{
type: String
}
});
const StudentSchema = new Schema({
name: {
type: String
},
lastName: {
type: String
},
email: {
type: String
},
password: {
type: String
},
career: {
type: String
},
gpa: {
type: Number
},
phoneNumber: {
type: String
},
courses:[CourseEmbeddedTutorSchema],
events:[EventEmbeddedTutorSchema],
profilePicture: {
data: {
type: Buffer
},
contentType: {
type: String
}
},
description: {
type: String
},
price: {
type: Number
},
isTutor : {
type: Boolean
},
availability: [{
initialDate: {
type: Date
},
finalDate: {
type: Date
}
}],
chat: [{
receiverId: {
type: Schema.Types.ObjectId
},
chatId: {
type: Schema.Types.ObjectId
}
}],
dateCreated: {
type: Date
},
career:{
type:String
},
dateCreatedTutor:{
type:Date
},
profilePicture:{
type:String
},
googleUser:{
type:Boolean
}
});
StudentSchema.methods.encryptPassword = (password) => {
return bcrypt.hashSync(password, bcrypt.genSaltSync(10));
};
StudentSchema.methods.comparePassword = function (password){
return bcrypt.compareSync(password, this.password);
};
StudentSchema.pre('save', function(next){
let student=this;
if(!student.isModified('password')) return next();
student.password = student.encryptPassword(student.password);
next();
});
module.exports = mongoose.model('Student', StudentSchema);
|
from thinplate.hybrid import *
try:
import torch
import thinplate.pytorch as torch
except ImportError:
pass
__version__ = '1.0.0'
|
# Use of this source code is governed by the MIT license.
__license__ = "MIT"
from collections import defaultdict
import itertools
import sys
from bs4.element import (
CharsetMetaAttributeValue,
ContentMetaAttributeValue,
Stylesheet,
Script,
TemplateString,
nonwhitespace_re
)
__all__ = [
'HTMLTreeBuilder',
'SAXTreeBuilder',
'TreeBuilder',
'TreeBuilderRegistry',
]
# Some useful features for a TreeBuilder to have.
FAST = 'fast'
PERMISSIVE = 'permissive'
STRICT = 'strict'
XML = 'xml'
HTML = 'html'
HTML_5 = 'html5'
class TreeBuilderRegistry(object):
"""A way of looking up TreeBuilder subclasses by their name or by desired
features.
"""
def __init__(self):
self.builders_for_feature = defaultdict(list)
self.builders = []
def register(self, treebuilder_class):
"""Register a treebuilder based on its advertised features.
:param treebuilder_class: A subclass of Treebuilder. its .features
attribute should list its features.
"""
for feature in treebuilder_class.features:
self.builders_for_feature[feature].insert(0, treebuilder_class)
self.builders.insert(0, treebuilder_class)
def lookup(self, *features):
"""Look up a TreeBuilder subclass with the desired features.
:param features: A list of features to look for. If none are
provided, the most recently registered TreeBuilder subclass
will be used.
:return: A TreeBuilder subclass, or None if there's no
registered subclass with all the requested features.
"""
if len(self.builders) == 0:
# There are no builders at all.
return None
if len(features) == 0:
# They didn't ask for any features. Give them the most
# recently registered builder.
return self.builders[0]
# Go down the list of features in order, and eliminate any builders
# that don't match every feature.
features = list(features)
features.reverse()
candidates = None
candidate_set = None
while len(features) > 0:
feature = features.pop()
we_have_the_feature = self.builders_for_feature.get(feature, [])
if len(we_have_the_feature) > 0:
if candidates is None:
candidates = we_have_the_feature
candidate_set = set(candidates)
else:
# Eliminate any candidates that don't have this feature.
candidate_set = candidate_set.intersection(
set(we_have_the_feature))
# The only valid candidates are the ones in candidate_set.
# Go through the original list of candidates and pick the first one
# that's in candidate_set.
if candidate_set is None:
return None
for candidate in candidates:
if candidate in candidate_set:
return candidate
return None
# The BeautifulSoup class will take feature lists from developers and use them
# to look up builders in this registry.
builder_registry = TreeBuilderRegistry()
class TreeBuilder(object):
"""Turn a textual document into a Beautiful Soup object tree."""
NAME = "[Unknown tree builder]"
ALTERNATE_NAMES = []
features = []
is_xml = False
picklable = False
empty_element_tags = None # A tag will be considered an empty-element
# tag when and only when it has no contents.
# A value for these tag/attribute combinations is a space- or
# comma-separated list of CDATA, rather than a single CDATA.
DEFAULT_CDATA_LIST_ATTRIBUTES = {}
# Whitespace should be preserved inside these tags.
DEFAULT_PRESERVE_WHITESPACE_TAGS = set()
# The textual contents of tags with these names should be
# instantiated with some class other than NavigableString.
DEFAULT_STRING_CONTAINERS = {}
USE_DEFAULT = object()
# Most parsers don't keep track of line numbers.
TRACKS_LINE_NUMBERS = False
def __init__(self, multi_valued_attributes=USE_DEFAULT,
preserve_whitespace_tags=USE_DEFAULT,
store_line_numbers=USE_DEFAULT,
string_containers=USE_DEFAULT,
):
"""Constructor.
:param multi_valued_attributes: If this is set to None, the
TreeBuilder will not turn any values for attributes like
'class' into lists. Setting this to a dictionary will
customize this behavior; look at DEFAULT_CDATA_LIST_ATTRIBUTES
for an example.
Internally, these are called "CDATA list attributes", but that
probably doesn't make sense to an end-user, so the argument name
is `multi_valued_attributes`.
:param preserve_whitespace_tags: A list of tags to treat
the way <pre> tags are treated in HTML. Tags in this list
are immune from pretty-printing; their contents will always be
output as-is.
:param string_containers: A dictionary mapping tag names to
the classes that should be instantiated to contain the textual
contents of those tags. The default is to use NavigableString
for every tag, no matter what the name. You can override the
default by changing DEFAULT_STRING_CONTAINERS.
:param store_line_numbers: If the parser keeps track of the
line numbers and positions of the original markup, that
information will, by default, be stored in each corresponding
`Tag` object. You can turn this off by passing
store_line_numbers=False. If the parser you're using doesn't
keep track of this information, then setting store_line_numbers=True
will do nothing.
"""
self.soup = None
if multi_valued_attributes is self.USE_DEFAULT:
multi_valued_attributes = self.DEFAULT_CDATA_LIST_ATTRIBUTES
self.cdata_list_attributes = multi_valued_attributes
if preserve_whitespace_tags is self.USE_DEFAULT:
preserve_whitespace_tags = self.DEFAULT_PRESERVE_WHITESPACE_TAGS
self.preserve_whitespace_tags = preserve_whitespace_tags
if store_line_numbers == self.USE_DEFAULT:
store_line_numbers = self.TRACKS_LINE_NUMBERS
self.store_line_numbers = store_line_numbers
if string_containers == self.USE_DEFAULT:
string_containers = self.DEFAULT_STRING_CONTAINERS
self.string_containers = string_containers
def initialize_soup(self, soup):
"""The BeautifulSoup object has been initialized and is now
being associated with the TreeBuilder.
:param soup: A BeautifulSoup object.
"""
self.soup = soup
def reset(self):
"""Do any work necessary to reset the underlying parser
for a new document.
By default, this does nothing.
"""
pass
def can_be_empty_element(self, tag_name):
"""Might a tag with this name be an empty-element tag?
The final markup may or may not actually present this tag as
self-closing.
For instance: an HTMLBuilder does not consider a <p> tag to be
an empty-element tag (it's not in
HTMLBuilder.empty_element_tags). This means an empty <p> tag
will be presented as "<p></p>", not "<p/>" or "<p>".
The default implementation has no opinion about which tags are
empty-element tags, so a tag will be presented as an
empty-element tag if and only if it has no children.
"<foo></foo>" will become "<foo/>", and "<foo>bar</foo>" will
be left alone.
:param tag_name: The name of a markup tag.
"""
if self.empty_element_tags is None:
return True
return tag_name in self.empty_element_tags
def feed(self, markup):
"""Run some incoming markup through some parsing process,
populating the `BeautifulSoup` object in self.soup.
This method is not implemented in TreeBuilder; it must be
implemented in subclasses.
:return: None.
"""
raise NotImplementedError()
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None, exclude_encodings=None):
"""Run any preliminary steps necessary to make incoming markup
acceptable to the parser.
:param markup: Some markup -- probably a bytestring.
:param user_specified_encoding: The user asked to try this encoding.
:param document_declared_encoding: The markup itself claims to be
in this encoding. NOTE: This argument is not used by the
calling code and can probably be removed.
:param exclude_encodings: The user asked _not_ to try any of
these encodings.
:yield: A series of 4-tuples:
(markup, encoding, declared encoding,
has undergone character replacement)
Each 4-tuple represents a strategy for converting the
document to Unicode and parsing it. Each strategy will be tried
in turn.
By default, the only strategy is to parse the markup
as-is. See `LXMLTreeBuilderForXML` and
`HTMLParserTreeBuilder` for implementations that take into
account the quirks of particular parsers.
"""
yield markup, None, None, False
def test_fragment_to_document(self, fragment):
"""Wrap an HTML fragment to make it look like a document.
Different parsers do this differently. For instance, lxml
introduces an empty <head> tag, and html5lib
doesn't. Abstracting this away lets us write simple tests
which run HTML fragments through the parser and compare the
results against other HTML fragments.
This method should not be used outside of tests.
:param fragment: A string -- fragment of HTML.
:return: A string -- a full HTML document.
"""
return fragment
def set_up_substitutions(self, tag):
"""Set up any substitutions that will need to be performed on
a `Tag` when it's output as a string.
By default, this does nothing. See `HTMLTreeBuilder` for a
case where this is used.
:param tag: A `Tag`
:return: Whether or not a substitution was performed.
"""
return False
def _replace_cdata_list_attribute_values(self, tag_name, attrs):
"""When an attribute value is associated with a tag that can
have multiple values for that attribute, convert the string
value to a list of strings.
Basically, replaces class="foo bar" with class=["foo", "bar"]
NOTE: This method modifies its input in place.
:param tag_name: The name of a tag.
:param attrs: A dictionary containing the tag's attributes.
Any appropriate attribute values will be modified in place.
"""
if not attrs:
return attrs
if self.cdata_list_attributes:
universal = self.cdata_list_attributes.get('*', [])
tag_specific = self.cdata_list_attributes.get(
tag_name.lower(), None)
for attr in list(attrs.keys()):
if attr in universal or (tag_specific and attr in tag_specific):
# We have a "class"-type attribute whose string
# value is a whitespace-separated list of
# values. Split it into a list.
value = attrs[attr]
if isinstance(value, str):
values = nonwhitespace_re.findall(value)
else:
# html5lib sometimes calls setAttributes twice
# for the same tag when rearranging the parse
# tree. On the second call the attribute value
# here is already a list. If this happens,
# leave the value alone rather than trying to
# split it again.
values = value
attrs[attr] = values
return attrs
class SAXTreeBuilder(TreeBuilder):
"""A Beautiful Soup treebuilder that listens for SAX events.
This is not currently used for anything, but it demonstrates
how a simple TreeBuilder would work.
"""
def feed(self, markup):
raise NotImplementedError()
def close(self):
pass
def startElement(self, name, attrs):
attrs = dict((key[1], value) for key, value in list(attrs.items()))
#print("Start %s, %r" % (name, attrs))
self.soup.handle_starttag(name, attrs)
def endElement(self, name):
#print("End %s" % name)
self.soup.handle_endtag(name)
def startElementNS(self, nsTuple, nodeName, attrs):
# Throw away (ns, nodeName) for now.
self.startElement(nodeName, attrs)
def endElementNS(self, nsTuple, nodeName):
# Throw away (ns, nodeName) for now.
self.endElement(nodeName)
#handler.endElementNS((ns, node.nodeName), node.nodeName)
def startPrefixMapping(self, prefix, nodeValue):
# Ignore the prefix for now.
pass
def endPrefixMapping(self, prefix):
# Ignore the prefix for now.
# handler.endPrefixMapping(prefix)
pass
def characters(self, content):
self.soup.handle_data(content)
def startDocument(self):
pass
def endDocument(self):
pass
class HTMLTreeBuilder(TreeBuilder):
"""This TreeBuilder knows facts about HTML.
Such as which tags are empty-element tags.
"""
empty_element_tags = set([
# These are from HTML5.
'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr',
# These are from earlier versions of HTML and are removed in HTML5.
'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer'
])
# The HTML standard defines these as block-level elements. Beautiful
# Soup does not treat these elements differently from other elements,
# but it may do so eventually, and this information is available if
# you need to use it.
block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"])
# The HTML standard defines an unusual content model for these tags.
# We represent this by using a string class other than NavigableString
# inside these tags.
#
# I made this list by going through the HTML spec
# (https://html.spec.whatwg.org/#metadata-content) and looking for
# "metadata content" elements that can contain strings.
#
# TODO: Arguably <noscript> could go here but it seems
# qualitatively different from the other tags.
DEFAULT_STRING_CONTAINERS = {
'style': Stylesheet,
'script': Script,
'template': TemplateString,
}
# The HTML standard defines these attributes as containing a
# space-separated list of values, not a single value. That is,
# class="foo bar" means that the 'class' attribute has two values,
# 'foo' and 'bar', not the single value 'foo bar'. When we
# encounter one of these attributes, we will parse its value into
# a list of values if possible. Upon output, the list will be
# converted back into a string.
DEFAULT_CDATA_LIST_ATTRIBUTES = {
"*" : ['class', 'accesskey', 'dropzone'],
"a" : ['rel', 'rev'],
"link" : ['rel', 'rev'],
"td" : ["headers"],
"th" : ["headers"],
"td" : ["headers"],
"form" : ["accept-charset"],
"object" : ["archive"],
# These are HTML5 specific, as are *.accesskey and *.dropzone above.
"area" : ["rel"],
"icon" : ["sizes"],
"iframe" : ["sandbox"],
"output" : ["for"],
}
DEFAULT_PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea'])
def set_up_substitutions(self, tag):
"""Replace the declared encoding in a <meta> tag with a placeholder,
to be substituted when the tag is output to a string.
An HTML document may come in to Beautiful Soup as one
encoding, but exit in a different encoding, and the <meta> tag
needs to be changed to reflect this.
:param tag: A `Tag`
:return: Whether or not a substitution was performed.
"""
# We are only interested in <meta> tags
if tag.name != 'meta':
return False
http_equiv = tag.get('http-equiv')
content = tag.get('content')
charset = tag.get('charset')
# We are interested in <meta> tags that say what encoding the
# document was originally in. This means HTML 5-style <meta>
# tags that provide the "charset" attribute. It also means
# HTML 4-style <meta> tags that provide the "content"
# attribute and have "http-equiv" set to "content-type".
#
# In both cases we will replace the value of the appropriate
# attribute with a standin object that can take on any
# encoding.
meta_encoding = None
if charset is not None:
# HTML 5 style:
# <meta charset="utf8">
meta_encoding = charset
tag['charset'] = CharsetMetaAttributeValue(charset)
elif (content is not None and http_equiv is not None
and http_equiv.lower() == 'content-type'):
# HTML 4 style:
# <meta http-equiv="content-type" content="text/html; charset=utf8">
tag['content'] = ContentMetaAttributeValue(content)
return (meta_encoding is not None)
def register_treebuilders_from(module):
"""Copy TreeBuilders from the given module into this module."""
this_module = sys.modules[__name__]
for name in module.__all__:
obj = getattr(module, name)
if issubclass(obj, TreeBuilder):
setattr(this_module, name, obj)
this_module.__all__.append(name)
# Register the builder while we're at it.
this_module.builder_registry.register(obj)
class ParserRejectedMarkup(Exception):
"""An Exception to be raised when the underlying parser simply
refuses to parse the given markup.
"""
def __init__(self, message_or_exception):
"""Explain why the parser rejected the given markup, either
with a textual explanation or another exception.
"""
if isinstance(message_or_exception, Exception):
e = message_or_exception
message_or_exception = "%s: %s" % (e.__class__.__name__, str(e))
super(ParserRejectedMarkup, self).__init__(message_or_exception)
# Builders are registered in reverse order of priority, so that custom
# builder registrations will take precedence. In general, we want lxml
# to take precedence over html5lib, because it's faster. And we only
# want to use HTMLParser as a last resort.
from . import _htmlparser
register_treebuilders_from(_htmlparser)
try:
from . import _html5lib
register_treebuilders_from(_html5lib)
except ImportError:
# They don't have html5lib installed.
pass
try:
from . import _lxml
register_treebuilders_from(_lxml)
except ImportError:
# They don't have lxml installed.
pass
|
/**
* \file
*
* \brief Component description for TCC
*
* Copyright (c) 2014 Atmel Corporation. All rights reserved.
*
* \asf_license_start
*
* \page License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The name of Atmel may not be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* 4. This software may only be redistributed and used in connection with an
* Atmel microcontroller product.
*
* THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
* EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* \asf_license_stop
*
*/
#ifndef _SAMD21_TCC_COMPONENT_
#define _SAMD21_TCC_COMPONENT_
/* ========================================================================== */
/** SOFTWARE API DEFINITION FOR TCC */
/* ========================================================================== */
/** \addtogroup SAMD21_TCC Timer Counter Control */
/*@{*/
#define TCC_U2213
#define REV_TCC 0x101
/* -------- TCC_CTRLA : (TCC Offset: 0x00) (R/W 32) Control A -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t SWRST:1; /*!< bit: 0 Software Reset */
uint32_t ENABLE:1; /*!< bit: 1 Enable */
uint32_t :3; /*!< bit: 2.. 4 Reserved */
uint32_t RESOLUTION:2; /*!< bit: 5.. 6 Enhanced Resolution */
uint32_t :1; /*!< bit: 7 Reserved */
uint32_t PRESCALER:3; /*!< bit: 8..10 Prescaler */
uint32_t RUNSTDBY:1; /*!< bit: 11 Run in Standby */
uint32_t PRESCSYNC:2; /*!< bit: 12..13 Prescaler and Counter Synchronization Selection */
uint32_t ALOCK:1; /*!< bit: 14 Auto Lock */
uint32_t :9; /*!< bit: 15..23 Reserved */
uint32_t CPTEN0:1; /*!< bit: 24 Capture Channel 0 Enable */
uint32_t CPTEN1:1; /*!< bit: 25 Capture Channel 1 Enable */
uint32_t CPTEN2:1; /*!< bit: 26 Capture Channel 2 Enable */
uint32_t CPTEN3:1; /*!< bit: 27 Capture Channel 3 Enable */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :24; /*!< bit: 0..23 Reserved */
uint32_t CPTEN:4; /*!< bit: 24..27 Capture Channel x Enable */
uint32_t :4; /*!< bit: 28..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_CTRLA_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_CTRLA_OFFSET 0x00 /**< \brief (TCC_CTRLA offset) Control A */
#define TCC_CTRLA_RESETVALUE 0x00000000 /**< \brief (TCC_CTRLA reset_value) Control A */
#define TCC_CTRLA_SWRST_Pos 0 /**< \brief (TCC_CTRLA) Software Reset */
#define TCC_CTRLA_SWRST (0x1u << TCC_CTRLA_SWRST_Pos)
#define TCC_CTRLA_ENABLE_Pos 1 /**< \brief (TCC_CTRLA) Enable */
#define TCC_CTRLA_ENABLE (0x1u << TCC_CTRLA_ENABLE_Pos)
#define TCC_CTRLA_RESOLUTION_Pos 5 /**< \brief (TCC_CTRLA) Enhanced Resolution */
#define TCC_CTRLA_RESOLUTION_Msk (0x3u << TCC_CTRLA_RESOLUTION_Pos)
#define TCC_CTRLA_RESOLUTION(value) ((TCC_CTRLA_RESOLUTION_Msk & ((value) << TCC_CTRLA_RESOLUTION_Pos)))
#define TCC_CTRLA_RESOLUTION_NONE_Val 0x0u /**< \brief (TCC_CTRLA) Dithering is disabled */
#define TCC_CTRLA_RESOLUTION_DITH4_Val 0x1u /**< \brief (TCC_CTRLA) Dithering is done every 16 PWM frames. PER[3:0] and CCx[3:0] contain dithering pattern selection. */
#define TCC_CTRLA_RESOLUTION_DITH5_Val 0x2u /**< \brief (TCC_CTRLA) Dithering is done every 32 PWM frames. PER[4:0] and CCx[4:0] contain dithering pattern selection. */
#define TCC_CTRLA_RESOLUTION_DITH6_Val 0x3u /**< \brief (TCC_CTRLA) Dithering is done every 64 PWM frames. PER[5:0] and CCx[5:0] contain dithering pattern selection. */
#define TCC_CTRLA_RESOLUTION_NONE (TCC_CTRLA_RESOLUTION_NONE_Val << TCC_CTRLA_RESOLUTION_Pos)
#define TCC_CTRLA_RESOLUTION_DITH4 (TCC_CTRLA_RESOLUTION_DITH4_Val << TCC_CTRLA_RESOLUTION_Pos)
#define TCC_CTRLA_RESOLUTION_DITH5 (TCC_CTRLA_RESOLUTION_DITH5_Val << TCC_CTRLA_RESOLUTION_Pos)
#define TCC_CTRLA_RESOLUTION_DITH6 (TCC_CTRLA_RESOLUTION_DITH6_Val << TCC_CTRLA_RESOLUTION_Pos)
#define TCC_CTRLA_PRESCALER_Pos 8 /**< \brief (TCC_CTRLA) Prescaler */
#define TCC_CTRLA_PRESCALER_Msk (0x7u << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER(value) ((TCC_CTRLA_PRESCALER_Msk & ((value) << TCC_CTRLA_PRESCALER_Pos)))
#define TCC_CTRLA_PRESCALER_DIV1_Val 0x0u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC */
#define TCC_CTRLA_PRESCALER_DIV2_Val 0x1u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/2 */
#define TCC_CTRLA_PRESCALER_DIV4_Val 0x2u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/4 */
#define TCC_CTRLA_PRESCALER_DIV8_Val 0x3u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/8 */
#define TCC_CTRLA_PRESCALER_DIV16_Val 0x4u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/16 */
#define TCC_CTRLA_PRESCALER_DIV64_Val 0x5u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/64 */
#define TCC_CTRLA_PRESCALER_DIV256_Val 0x6u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/256 */
#define TCC_CTRLA_PRESCALER_DIV1024_Val 0x7u /**< \brief (TCC_CTRLA) Prescaler: GCLK_TCC/1024 */
#define TCC_CTRLA_PRESCALER_DIV1 (TCC_CTRLA_PRESCALER_DIV1_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV2 (TCC_CTRLA_PRESCALER_DIV2_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV4 (TCC_CTRLA_PRESCALER_DIV4_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV8 (TCC_CTRLA_PRESCALER_DIV8_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV16 (TCC_CTRLA_PRESCALER_DIV16_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV64 (TCC_CTRLA_PRESCALER_DIV64_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV256 (TCC_CTRLA_PRESCALER_DIV256_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_PRESCALER_DIV1024 (TCC_CTRLA_PRESCALER_DIV1024_Val << TCC_CTRLA_PRESCALER_Pos)
#define TCC_CTRLA_RUNSTDBY_Pos 11 /**< \brief (TCC_CTRLA) Run in Standby */
#define TCC_CTRLA_RUNSTDBY (0x1u << TCC_CTRLA_RUNSTDBY_Pos)
#define TCC_CTRLA_PRESCSYNC_Pos 12 /**< \brief (TCC_CTRLA) Prescaler and Counter Synchronization Selection */
#define TCC_CTRLA_PRESCSYNC_Msk (0x3u << TCC_CTRLA_PRESCSYNC_Pos)
#define TCC_CTRLA_PRESCSYNC(value) ((TCC_CTRLA_PRESCSYNC_Msk & ((value) << TCC_CTRLA_PRESCSYNC_Pos)))
#define TCC_CTRLA_PRESCSYNC_GCLK_Val 0x0u /**< \brief (TCC_CTRLA) Reload or reset Counter on next GCLK */
#define TCC_CTRLA_PRESCSYNC_PRESC_Val 0x1u /**< \brief (TCC_CTRLA) Reload or reset Counter on next prescaler clock */
#define TCC_CTRLA_PRESCSYNC_RESYNC_Val 0x2u /**< \brief (TCC_CTRLA) Reload or reset Counter on next GCLK */
#define TCC_CTRLA_PRESCSYNC_GCLK (TCC_CTRLA_PRESCSYNC_GCLK_Val << TCC_CTRLA_PRESCSYNC_Pos)
#define TCC_CTRLA_PRESCSYNC_PRESC (TCC_CTRLA_PRESCSYNC_PRESC_Val << TCC_CTRLA_PRESCSYNC_Pos)
#define TCC_CTRLA_PRESCSYNC_RESYNC (TCC_CTRLA_PRESCSYNC_RESYNC_Val << TCC_CTRLA_PRESCSYNC_Pos)
#define TCC_CTRLA_ALOCK_Pos 14 /**< \brief (TCC_CTRLA) Auto Lock */
#define TCC_CTRLA_ALOCK (0x1u << TCC_CTRLA_ALOCK_Pos)
#define TCC_CTRLA_CPTEN0_Pos 24 /**< \brief (TCC_CTRLA) Capture Channel 0 Enable */
#define TCC_CTRLA_CPTEN0 (1 << TCC_CTRLA_CPTEN0_Pos)
#define TCC_CTRLA_CPTEN1_Pos 25 /**< \brief (TCC_CTRLA) Capture Channel 1 Enable */
#define TCC_CTRLA_CPTEN1 (1 << TCC_CTRLA_CPTEN1_Pos)
#define TCC_CTRLA_CPTEN2_Pos 26 /**< \brief (TCC_CTRLA) Capture Channel 2 Enable */
#define TCC_CTRLA_CPTEN2 (1 << TCC_CTRLA_CPTEN2_Pos)
#define TCC_CTRLA_CPTEN3_Pos 27 /**< \brief (TCC_CTRLA) Capture Channel 3 Enable */
#define TCC_CTRLA_CPTEN3 (1 << TCC_CTRLA_CPTEN3_Pos)
#define TCC_CTRLA_CPTEN_Pos 24 /**< \brief (TCC_CTRLA) Capture Channel x Enable */
#define TCC_CTRLA_CPTEN_Msk (0xFu << TCC_CTRLA_CPTEN_Pos)
#define TCC_CTRLA_CPTEN(value) ((TCC_CTRLA_CPTEN_Msk & ((value) << TCC_CTRLA_CPTEN_Pos)))
#define TCC_CTRLA_MASK 0x0F007F63u /**< \brief (TCC_CTRLA) MASK Register */
/* -------- TCC_CTRLBCLR : (TCC Offset: 0x04) (R/W 8) Control B Clear -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint8_t DIR:1; /*!< bit: 0 Counter Direction */
uint8_t LUPD:1; /*!< bit: 1 Lock Update */
uint8_t ONESHOT:1; /*!< bit: 2 One-Shot */
uint8_t IDXCMD:2; /*!< bit: 3.. 4 Ramp Index Command */
uint8_t CMD:3; /*!< bit: 5.. 7 TCC Command */
} bit; /*!< Structure used for bit access */
uint8_t reg; /*!< Type used for register access */
} TCC_CTRLBCLR_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_CTRLBCLR_OFFSET 0x04 /**< \brief (TCC_CTRLBCLR offset) Control B Clear */
#define TCC_CTRLBCLR_RESETVALUE 0x00 /**< \brief (TCC_CTRLBCLR reset_value) Control B Clear */
#define TCC_CTRLBCLR_DIR_Pos 0 /**< \brief (TCC_CTRLBCLR) Counter Direction */
#define TCC_CTRLBCLR_DIR (0x1u << TCC_CTRLBCLR_DIR_Pos)
#define TCC_CTRLBCLR_LUPD_Pos 1 /**< \brief (TCC_CTRLBCLR) Lock Update */
#define TCC_CTRLBCLR_LUPD (0x1u << TCC_CTRLBCLR_LUPD_Pos)
#define TCC_CTRLBCLR_ONESHOT_Pos 2 /**< \brief (TCC_CTRLBCLR) One-Shot */
#define TCC_CTRLBCLR_ONESHOT (0x1u << TCC_CTRLBCLR_ONESHOT_Pos)
#define TCC_CTRLBCLR_IDXCMD_Pos 3 /**< \brief (TCC_CTRLBCLR) Ramp Index Command */
#define TCC_CTRLBCLR_IDXCMD_Msk (0x3u << TCC_CTRLBCLR_IDXCMD_Pos)
#define TCC_CTRLBCLR_IDXCMD(value) ((TCC_CTRLBCLR_IDXCMD_Msk & ((value) << TCC_CTRLBCLR_IDXCMD_Pos)))
#define TCC_CTRLBCLR_IDXCMD_DISABLE_Val 0x0u /**< \brief (TCC_CTRLBCLR) Command disabled: IDX toggles between cycles A and B */
#define TCC_CTRLBCLR_IDXCMD_SET_Val 0x1u /**< \brief (TCC_CTRLBCLR) Set IDX: cycle B will be forced in the next cycle */
#define TCC_CTRLBCLR_IDXCMD_CLEAR_Val 0x2u /**< \brief (TCC_CTRLBCLR) Clear IDX: cycle A will be forced in next cycle */
#define TCC_CTRLBCLR_IDXCMD_HOLD_Val 0x3u /**< \brief (TCC_CTRLBCLR) Hold IDX: the next cycle will be the same as the current cycle. */
#define TCC_CTRLBCLR_IDXCMD_DISABLE (TCC_CTRLBCLR_IDXCMD_DISABLE_Val << TCC_CTRLBCLR_IDXCMD_Pos)
#define TCC_CTRLBCLR_IDXCMD_SET (TCC_CTRLBCLR_IDXCMD_SET_Val << TCC_CTRLBCLR_IDXCMD_Pos)
#define TCC_CTRLBCLR_IDXCMD_CLEAR (TCC_CTRLBCLR_IDXCMD_CLEAR_Val << TCC_CTRLBCLR_IDXCMD_Pos)
#define TCC_CTRLBCLR_IDXCMD_HOLD (TCC_CTRLBCLR_IDXCMD_HOLD_Val << TCC_CTRLBCLR_IDXCMD_Pos)
#define TCC_CTRLBCLR_CMD_Pos 5 /**< \brief (TCC_CTRLBCLR) TCC Command */
#define TCC_CTRLBCLR_CMD_Msk (0x7u << TCC_CTRLBCLR_CMD_Pos)
#define TCC_CTRLBCLR_CMD(value) ((TCC_CTRLBCLR_CMD_Msk & ((value) << TCC_CTRLBCLR_CMD_Pos)))
#define TCC_CTRLBCLR_CMD_NONE_Val 0x0u /**< \brief (TCC_CTRLBCLR) No action */
#define TCC_CTRLBCLR_CMD_RETRIGGER_Val 0x1u /**< \brief (TCC_CTRLBCLR) Clear start, restart or retrigger */
#define TCC_CTRLBCLR_CMD_STOP_Val 0x2u /**< \brief (TCC_CTRLBCLR) Force stop */
#define TCC_CTRLBCLR_CMD_UPDATE_Val 0x3u /**< \brief (TCC_CTRLBCLR) Force update of double buffered registers */
#define TCC_CTRLBCLR_CMD_READSYNC_Val 0x4u /**< \brief (TCC_CTRLBCLR) Force COUNT read synchronization */
#define TCC_CTRLBCLR_CMD_NONE (TCC_CTRLBCLR_CMD_NONE_Val << TCC_CTRLBCLR_CMD_Pos)
#define TCC_CTRLBCLR_CMD_RETRIGGER (TCC_CTRLBCLR_CMD_RETRIGGER_Val << TCC_CTRLBCLR_CMD_Pos)
#define TCC_CTRLBCLR_CMD_STOP (TCC_CTRLBCLR_CMD_STOP_Val << TCC_CTRLBCLR_CMD_Pos)
#define TCC_CTRLBCLR_CMD_UPDATE (TCC_CTRLBCLR_CMD_UPDATE_Val << TCC_CTRLBCLR_CMD_Pos)
#define TCC_CTRLBCLR_CMD_READSYNC (TCC_CTRLBCLR_CMD_READSYNC_Val << TCC_CTRLBCLR_CMD_Pos)
#define TCC_CTRLBCLR_MASK 0xFFu /**< \brief (TCC_CTRLBCLR) MASK Register */
/* -------- TCC_CTRLBSET : (TCC Offset: 0x05) (R/W 8) Control B Set -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint8_t DIR:1; /*!< bit: 0 Counter Direction */
uint8_t LUPD:1; /*!< bit: 1 Lock update */
uint8_t ONESHOT:1; /*!< bit: 2 One-Shot */
uint8_t IDXCMD:2; /*!< bit: 3.. 4 Ramp Index Command */
uint8_t CMD:3; /*!< bit: 5.. 7 TCC Command */
} bit; /*!< Structure used for bit access */
uint8_t reg; /*!< Type used for register access */
} TCC_CTRLBSET_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_CTRLBSET_OFFSET 0x05 /**< \brief (TCC_CTRLBSET offset) Control B Set */
#define TCC_CTRLBSET_RESETVALUE 0x00 /**< \brief (TCC_CTRLBSET reset_value) Control B Set */
#define TCC_CTRLBSET_DIR_Pos 0 /**< \brief (TCC_CTRLBSET) Counter Direction */
#define TCC_CTRLBSET_DIR (0x1u << TCC_CTRLBSET_DIR_Pos)
#define TCC_CTRLBSET_LUPD_Pos 1 /**< \brief (TCC_CTRLBSET) Lock update */
#define TCC_CTRLBSET_LUPD (0x1u << TCC_CTRLBSET_LUPD_Pos)
#define TCC_CTRLBSET_ONESHOT_Pos 2 /**< \brief (TCC_CTRLBSET) One-Shot */
#define TCC_CTRLBSET_ONESHOT (0x1u << TCC_CTRLBSET_ONESHOT_Pos)
#define TCC_CTRLBSET_IDXCMD_Pos 3 /**< \brief (TCC_CTRLBSET) Ramp Index Command */
#define TCC_CTRLBSET_IDXCMD_Msk (0x3u << TCC_CTRLBSET_IDXCMD_Pos)
#define TCC_CTRLBSET_IDXCMD(value) ((TCC_CTRLBSET_IDXCMD_Msk & ((value) << TCC_CTRLBSET_IDXCMD_Pos)))
#define TCC_CTRLBSET_IDXCMD_DISABLE_Val 0x0u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_IDXCMD_SET_Val 0x1u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_IDXCMD_CLEAR_Val 0x2u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_IDXCMD_HOLD_Val 0x3u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_IDXCMD_DISABLE (TCC_CTRLBSET_IDXCMD_DISABLE_Val << TCC_CTRLBSET_IDXCMD_Pos)
#define TCC_CTRLBSET_IDXCMD_SET (TCC_CTRLBSET_IDXCMD_SET_Val << TCC_CTRLBSET_IDXCMD_Pos)
#define TCC_CTRLBSET_IDXCMD_CLEAR (TCC_CTRLBSET_IDXCMD_CLEAR_Val << TCC_CTRLBSET_IDXCMD_Pos)
#define TCC_CTRLBSET_IDXCMD_HOLD (TCC_CTRLBSET_IDXCMD_HOLD_Val << TCC_CTRLBSET_IDXCMD_Pos)
#define TCC_CTRLBSET_CMD_Pos 5 /**< \brief (TCC_CTRLBSET) TCC Command */
#define TCC_CTRLBSET_CMD_Msk (0x7u << TCC_CTRLBSET_CMD_Pos)
#define TCC_CTRLBSET_CMD(value) ((TCC_CTRLBSET_CMD_Msk & ((value) << TCC_CTRLBSET_CMD_Pos)))
#define TCC_CTRLBSET_CMD_NONE_Val 0x0u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_CMD_RETRIGGER_Val 0x1u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_CMD_STOP_Val 0x2u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_CMD_UPDATE_Val 0x3u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_CMD_READSYNC_Val 0x4u /**< \brief (TCC_CTRLBSET) */
#define TCC_CTRLBSET_CMD_NONE (TCC_CTRLBSET_CMD_NONE_Val << TCC_CTRLBSET_CMD_Pos)
#define TCC_CTRLBSET_CMD_RETRIGGER (TCC_CTRLBSET_CMD_RETRIGGER_Val << TCC_CTRLBSET_CMD_Pos)
#define TCC_CTRLBSET_CMD_STOP (TCC_CTRLBSET_CMD_STOP_Val << TCC_CTRLBSET_CMD_Pos)
#define TCC_CTRLBSET_CMD_UPDATE (TCC_CTRLBSET_CMD_UPDATE_Val << TCC_CTRLBSET_CMD_Pos)
#define TCC_CTRLBSET_CMD_READSYNC (TCC_CTRLBSET_CMD_READSYNC_Val << TCC_CTRLBSET_CMD_Pos)
#define TCC_CTRLBSET_MASK 0xFFu /**< \brief (TCC_CTRLBSET) MASK Register */
/* -------- TCC_SYNCBUSY : (TCC Offset: 0x08) (R/ 32) Synchronization Busy -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t SWRST:1; /*!< bit: 0 Swrst Busy */
uint32_t ENABLE:1; /*!< bit: 1 Enable Busy */
uint32_t CTRLB:1; /*!< bit: 2 Ctrlb Busy */
uint32_t STATUS:1; /*!< bit: 3 Status Busy */
uint32_t COUNT:1; /*!< bit: 4 Count Busy */
uint32_t PATT:1; /*!< bit: 5 Pattern Busy */
uint32_t WAVE:1; /*!< bit: 6 Wave Busy */
uint32_t PER:1; /*!< bit: 7 Period busy */
uint32_t CC0:1; /*!< bit: 8 Compare Channel Buffer 0 Busy */
uint32_t CC1:1; /*!< bit: 9 Compare Channel Buffer 1 Busy */
uint32_t CC2:1; /*!< bit: 10 Compare Channel Buffer 2 Busy */
uint32_t CC3:1; /*!< bit: 11 Compare Channel Buffer 3 Busy */
uint32_t :4; /*!< bit: 12..15 Reserved */
uint32_t PATTB:1; /*!< bit: 16 Pattern Buffer Busy */
uint32_t WAVEB:1; /*!< bit: 17 Wave Buffer Busy */
uint32_t PERB:1; /*!< bit: 18 Period Buffer Busy */
uint32_t CCB0:1; /*!< bit: 19 Compare Channel Buffer 0 Busy */
uint32_t CCB1:1; /*!< bit: 20 Compare Channel Buffer 1 Busy */
uint32_t CCB2:1; /*!< bit: 21 Compare Channel Buffer 2 Busy */
uint32_t CCB3:1; /*!< bit: 22 Compare Channel Buffer 3 Busy */
uint32_t :9; /*!< bit: 23..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :8; /*!< bit: 0.. 7 Reserved */
uint32_t CC:4; /*!< bit: 8..11 Compare Channel Buffer x Busy */
uint32_t :7; /*!< bit: 12..18 Reserved */
uint32_t CCB:4; /*!< bit: 19..22 Compare Channel Buffer x Busy */
uint32_t :9; /*!< bit: 23..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_SYNCBUSY_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_SYNCBUSY_OFFSET 0x08 /**< \brief (TCC_SYNCBUSY offset) Synchronization Busy */
#define TCC_SYNCBUSY_RESETVALUE 0x00000000 /**< \brief (TCC_SYNCBUSY reset_value) Synchronization Busy */
#define TCC_SYNCBUSY_SWRST_Pos 0 /**< \brief (TCC_SYNCBUSY) Swrst Busy */
#define TCC_SYNCBUSY_SWRST (0x1u << TCC_SYNCBUSY_SWRST_Pos)
#define TCC_SYNCBUSY_ENABLE_Pos 1 /**< \brief (TCC_SYNCBUSY) Enable Busy */
#define TCC_SYNCBUSY_ENABLE (0x1u << TCC_SYNCBUSY_ENABLE_Pos)
#define TCC_SYNCBUSY_CTRLB_Pos 2 /**< \brief (TCC_SYNCBUSY) Ctrlb Busy */
#define TCC_SYNCBUSY_CTRLB (0x1u << TCC_SYNCBUSY_CTRLB_Pos)
#define TCC_SYNCBUSY_STATUS_Pos 3 /**< \brief (TCC_SYNCBUSY) Status Busy */
#define TCC_SYNCBUSY_STATUS (0x1u << TCC_SYNCBUSY_STATUS_Pos)
#define TCC_SYNCBUSY_COUNT_Pos 4 /**< \brief (TCC_SYNCBUSY) Count Busy */
#define TCC_SYNCBUSY_COUNT (0x1u << TCC_SYNCBUSY_COUNT_Pos)
#define TCC_SYNCBUSY_PATT_Pos 5 /**< \brief (TCC_SYNCBUSY) Pattern Busy */
#define TCC_SYNCBUSY_PATT (0x1u << TCC_SYNCBUSY_PATT_Pos)
#define TCC_SYNCBUSY_WAVE_Pos 6 /**< \brief (TCC_SYNCBUSY) Wave Busy */
#define TCC_SYNCBUSY_WAVE (0x1u << TCC_SYNCBUSY_WAVE_Pos)
#define TCC_SYNCBUSY_PER_Pos 7 /**< \brief (TCC_SYNCBUSY) Period busy */
#define TCC_SYNCBUSY_PER (0x1u << TCC_SYNCBUSY_PER_Pos)
#define TCC_SYNCBUSY_CC0_Pos 8 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 0 Busy */
#define TCC_SYNCBUSY_CC0 (1 << TCC_SYNCBUSY_CC0_Pos)
#define TCC_SYNCBUSY_CC1_Pos 9 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 1 Busy */
#define TCC_SYNCBUSY_CC1 (1 << TCC_SYNCBUSY_CC1_Pos)
#define TCC_SYNCBUSY_CC2_Pos 10 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 2 Busy */
#define TCC_SYNCBUSY_CC2 (1 << TCC_SYNCBUSY_CC2_Pos)
#define TCC_SYNCBUSY_CC3_Pos 11 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 3 Busy */
#define TCC_SYNCBUSY_CC3 (1 << TCC_SYNCBUSY_CC3_Pos)
#define TCC_SYNCBUSY_CC_Pos 8 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer x Busy */
#define TCC_SYNCBUSY_CC_Msk (0xFu << TCC_SYNCBUSY_CC_Pos)
#define TCC_SYNCBUSY_CC(value) ((TCC_SYNCBUSY_CC_Msk & ((value) << TCC_SYNCBUSY_CC_Pos)))
#define TCC_SYNCBUSY_PATTB_Pos 16 /**< \brief (TCC_SYNCBUSY) Pattern Buffer Busy */
#define TCC_SYNCBUSY_PATTB (0x1u << TCC_SYNCBUSY_PATTB_Pos)
#define TCC_SYNCBUSY_WAVEB_Pos 17 /**< \brief (TCC_SYNCBUSY) Wave Buffer Busy */
#define TCC_SYNCBUSY_WAVEB (0x1u << TCC_SYNCBUSY_WAVEB_Pos)
#define TCC_SYNCBUSY_PERB_Pos 18 /**< \brief (TCC_SYNCBUSY) Period Buffer Busy */
#define TCC_SYNCBUSY_PERB (0x1u << TCC_SYNCBUSY_PERB_Pos)
#define TCC_SYNCBUSY_CCB0_Pos 19 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 0 Busy */
#define TCC_SYNCBUSY_CCB0 (1 << TCC_SYNCBUSY_CCB0_Pos)
#define TCC_SYNCBUSY_CCB1_Pos 20 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 1 Busy */
#define TCC_SYNCBUSY_CCB1 (1 << TCC_SYNCBUSY_CCB1_Pos)
#define TCC_SYNCBUSY_CCB2_Pos 21 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 2 Busy */
#define TCC_SYNCBUSY_CCB2 (1 << TCC_SYNCBUSY_CCB2_Pos)
#define TCC_SYNCBUSY_CCB3_Pos 22 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer 3 Busy */
#define TCC_SYNCBUSY_CCB3 (1 << TCC_SYNCBUSY_CCB3_Pos)
#define TCC_SYNCBUSY_CCB_Pos 19 /**< \brief (TCC_SYNCBUSY) Compare Channel Buffer x Busy */
#define TCC_SYNCBUSY_CCB_Msk (0xFu << TCC_SYNCBUSY_CCB_Pos)
#define TCC_SYNCBUSY_CCB(value) ((TCC_SYNCBUSY_CCB_Msk & ((value) << TCC_SYNCBUSY_CCB_Pos)))
#define TCC_SYNCBUSY_MASK 0x007F0FFFu /**< \brief (TCC_SYNCBUSY) MASK Register */
/* -------- TCC_FCTRLA : (TCC Offset: 0x0C) (R/W 32) Recoverable FaultA Configuration -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t SRC:2; /*!< bit: 0.. 1 FaultA Source */
uint32_t :1; /*!< bit: 2 Reserved */
uint32_t KEEP:1; /*!< bit: 3 FaultA Keeper */
uint32_t QUAL:1; /*!< bit: 4 FaultA Qualification */
uint32_t BLANK:2; /*!< bit: 5.. 6 FaultA Blanking Mode */
uint32_t RESTART:1; /*!< bit: 7 FaultA Restart */
uint32_t HALT:2; /*!< bit: 8.. 9 FaultA Halt Mode */
uint32_t CHSEL:2; /*!< bit: 10..11 FaultA Capture Channel */
uint32_t CAPTURE:3; /*!< bit: 12..14 FaultA Capture Action */
uint32_t :1; /*!< bit: 15 Reserved */
uint32_t BLANKVAL:8; /*!< bit: 16..23 FaultA Blanking Time */
uint32_t FILTERVAL:4; /*!< bit: 24..27 FaultA Filter Value */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_FCTRLA_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_FCTRLA_OFFSET 0x0C /**< \brief (TCC_FCTRLA offset) Recoverable FaultA Configuration */
#define TCC_FCTRLA_RESETVALUE 0x00000000 /**< \brief (TCC_FCTRLA reset_value) Recoverable FaultA Configuration */
#define TCC_FCTRLA_SRC_Pos 0 /**< \brief (TCC_FCTRLA) FaultA Source */
#define TCC_FCTRLA_SRC_Msk (0x3u << TCC_FCTRLA_SRC_Pos)
#define TCC_FCTRLA_SRC(value) ((TCC_FCTRLA_SRC_Msk & ((value) << TCC_FCTRLA_SRC_Pos)))
#define TCC_FCTRLA_SRC_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_SRC_ENABLE_Val 0x1u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_SRC_INVERT_Val 0x2u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_SRC_ALTFAULT_Val 0x3u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_SRC_DISABLE (TCC_FCTRLA_SRC_DISABLE_Val << TCC_FCTRLA_SRC_Pos)
#define TCC_FCTRLA_SRC_ENABLE (TCC_FCTRLA_SRC_ENABLE_Val << TCC_FCTRLA_SRC_Pos)
#define TCC_FCTRLA_SRC_INVERT (TCC_FCTRLA_SRC_INVERT_Val << TCC_FCTRLA_SRC_Pos)
#define TCC_FCTRLA_SRC_ALTFAULT (TCC_FCTRLA_SRC_ALTFAULT_Val << TCC_FCTRLA_SRC_Pos)
#define TCC_FCTRLA_KEEP_Pos 3 /**< \brief (TCC_FCTRLA) FaultA Keeper */
#define TCC_FCTRLA_KEEP (0x1u << TCC_FCTRLA_KEEP_Pos)
#define TCC_FCTRLA_QUAL_Pos 4 /**< \brief (TCC_FCTRLA) FaultA Qualification */
#define TCC_FCTRLA_QUAL (0x1u << TCC_FCTRLA_QUAL_Pos)
#define TCC_FCTRLA_BLANK_Pos 5 /**< \brief (TCC_FCTRLA) FaultA Blanking Mode */
#define TCC_FCTRLA_BLANK_Msk (0x3u << TCC_FCTRLA_BLANK_Pos)
#define TCC_FCTRLA_BLANK(value) ((TCC_FCTRLA_BLANK_Msk & ((value) << TCC_FCTRLA_BLANK_Pos)))
#define TCC_FCTRLA_BLANK_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_BLANK_RISE_Val 0x1u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_BLANK_FALL_Val 0x2u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_BLANK_BOTH_Val 0x3u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_BLANK_DISABLE (TCC_FCTRLA_BLANK_DISABLE_Val << TCC_FCTRLA_BLANK_Pos)
#define TCC_FCTRLA_BLANK_RISE (TCC_FCTRLA_BLANK_RISE_Val << TCC_FCTRLA_BLANK_Pos)
#define TCC_FCTRLA_BLANK_FALL (TCC_FCTRLA_BLANK_FALL_Val << TCC_FCTRLA_BLANK_Pos)
#define TCC_FCTRLA_BLANK_BOTH (TCC_FCTRLA_BLANK_BOTH_Val << TCC_FCTRLA_BLANK_Pos)
#define TCC_FCTRLA_RESTART_Pos 7 /**< \brief (TCC_FCTRLA) FaultA Restart */
#define TCC_FCTRLA_RESTART (0x1u << TCC_FCTRLA_RESTART_Pos)
#define TCC_FCTRLA_HALT_Pos 8 /**< \brief (TCC_FCTRLA) FaultA Halt Mode */
#define TCC_FCTRLA_HALT_Msk (0x3u << TCC_FCTRLA_HALT_Pos)
#define TCC_FCTRLA_HALT(value) ((TCC_FCTRLA_HALT_Msk & ((value) << TCC_FCTRLA_HALT_Pos)))
#define TCC_FCTRLA_HALT_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_HALT_HW_Val 0x1u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_HALT_SW_Val 0x2u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_HALT_NR_Val 0x3u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_HALT_DISABLE (TCC_FCTRLA_HALT_DISABLE_Val << TCC_FCTRLA_HALT_Pos)
#define TCC_FCTRLA_HALT_HW (TCC_FCTRLA_HALT_HW_Val << TCC_FCTRLA_HALT_Pos)
#define TCC_FCTRLA_HALT_SW (TCC_FCTRLA_HALT_SW_Val << TCC_FCTRLA_HALT_Pos)
#define TCC_FCTRLA_HALT_NR (TCC_FCTRLA_HALT_NR_Val << TCC_FCTRLA_HALT_Pos)
#define TCC_FCTRLA_CHSEL_Pos 10 /**< \brief (TCC_FCTRLA) FaultA Capture Channel */
#define TCC_FCTRLA_CHSEL_Msk (0x3u << TCC_FCTRLA_CHSEL_Pos)
#define TCC_FCTRLA_CHSEL(value) ((TCC_FCTRLA_CHSEL_Msk & ((value) << TCC_FCTRLA_CHSEL_Pos)))
#define TCC_FCTRLA_CHSEL_CC0_Val 0x0u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CHSEL_CC1_Val 0x1u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CHSEL_CC2_Val 0x2u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CHSEL_CC3_Val 0x3u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CHSEL_CC0 (TCC_FCTRLA_CHSEL_CC0_Val << TCC_FCTRLA_CHSEL_Pos)
#define TCC_FCTRLA_CHSEL_CC1 (TCC_FCTRLA_CHSEL_CC1_Val << TCC_FCTRLA_CHSEL_Pos)
#define TCC_FCTRLA_CHSEL_CC2 (TCC_FCTRLA_CHSEL_CC2_Val << TCC_FCTRLA_CHSEL_Pos)
#define TCC_FCTRLA_CHSEL_CC3 (TCC_FCTRLA_CHSEL_CC3_Val << TCC_FCTRLA_CHSEL_Pos)
#define TCC_FCTRLA_CAPTURE_Pos 12 /**< \brief (TCC_FCTRLA) FaultA Capture Action */
#define TCC_FCTRLA_CAPTURE_Msk (0x7u << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE(value) ((TCC_FCTRLA_CAPTURE_Msk & ((value) << TCC_FCTRLA_CAPTURE_Pos)))
#define TCC_FCTRLA_CAPTURE_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_CAPT_Val 0x1u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_CAPTMIN_Val 0x2u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_CAPTMAX_Val 0x3u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_LOCMIN_Val 0x4u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_LOCMAX_Val 0x5u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_DERIV0_Val 0x6u /**< \brief (TCC_FCTRLA) */
#define TCC_FCTRLA_CAPTURE_DISABLE (TCC_FCTRLA_CAPTURE_DISABLE_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE_CAPT (TCC_FCTRLA_CAPTURE_CAPT_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE_CAPTMIN (TCC_FCTRLA_CAPTURE_CAPTMIN_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE_CAPTMAX (TCC_FCTRLA_CAPTURE_CAPTMAX_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE_LOCMIN (TCC_FCTRLA_CAPTURE_LOCMIN_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE_LOCMAX (TCC_FCTRLA_CAPTURE_LOCMAX_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_CAPTURE_DERIV0 (TCC_FCTRLA_CAPTURE_DERIV0_Val << TCC_FCTRLA_CAPTURE_Pos)
#define TCC_FCTRLA_BLANKVAL_Pos 16 /**< \brief (TCC_FCTRLA) FaultA Blanking Time */
#define TCC_FCTRLA_BLANKVAL_Msk (0xFFu << TCC_FCTRLA_BLANKVAL_Pos)
#define TCC_FCTRLA_BLANKVAL(value) ((TCC_FCTRLA_BLANKVAL_Msk & ((value) << TCC_FCTRLA_BLANKVAL_Pos)))
#define TCC_FCTRLA_FILTERVAL_Pos 24 /**< \brief (TCC_FCTRLA) FaultA Filter Value */
#define TCC_FCTRLA_FILTERVAL_Msk (0xFu << TCC_FCTRLA_FILTERVAL_Pos)
#define TCC_FCTRLA_FILTERVAL(value) ((TCC_FCTRLA_FILTERVAL_Msk & ((value) << TCC_FCTRLA_FILTERVAL_Pos)))
#define TCC_FCTRLA_MASK 0x0FFF7FFBu /**< \brief (TCC_FCTRLA) MASK Register */
/* -------- TCC_FCTRLB : (TCC Offset: 0x10) (R/W 32) Recoverable FaultB Configuration -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t SRC:2; /*!< bit: 0.. 1 FaultB Source */
uint32_t :1; /*!< bit: 2 Reserved */
uint32_t KEEP:1; /*!< bit: 3 FaultB Keeper */
uint32_t QUAL:1; /*!< bit: 4 FaultB Qualification */
uint32_t BLANK:2; /*!< bit: 5.. 6 FaultB Blanking Mode */
uint32_t RESTART:1; /*!< bit: 7 FaultB Restart */
uint32_t HALT:2; /*!< bit: 8.. 9 FaultB Halt Mode */
uint32_t CHSEL:2; /*!< bit: 10..11 FaultB Capture Channel */
uint32_t CAPTURE:3; /*!< bit: 12..14 FaultB Capture Action */
uint32_t :1; /*!< bit: 15 Reserved */
uint32_t BLANKVAL:8; /*!< bit: 16..23 FaultB Blanking Time */
uint32_t FILTERVAL:4; /*!< bit: 24..27 FaultB Filter Value */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_FCTRLB_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_FCTRLB_OFFSET 0x10 /**< \brief (TCC_FCTRLB offset) Recoverable FaultB Configuration */
#define TCC_FCTRLB_RESETVALUE 0x00000000 /**< \brief (TCC_FCTRLB reset_value) Recoverable FaultB Configuration */
#define TCC_FCTRLB_SRC_Pos 0 /**< \brief (TCC_FCTRLB) FaultB Source */
#define TCC_FCTRLB_SRC_Msk (0x3u << TCC_FCTRLB_SRC_Pos)
#define TCC_FCTRLB_SRC(value) ((TCC_FCTRLB_SRC_Msk & ((value) << TCC_FCTRLB_SRC_Pos)))
#define TCC_FCTRLB_SRC_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_SRC_ENABLE_Val 0x1u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_SRC_INVERT_Val 0x2u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_SRC_ALTFAULT_Val 0x3u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_SRC_DISABLE (TCC_FCTRLB_SRC_DISABLE_Val << TCC_FCTRLB_SRC_Pos)
#define TCC_FCTRLB_SRC_ENABLE (TCC_FCTRLB_SRC_ENABLE_Val << TCC_FCTRLB_SRC_Pos)
#define TCC_FCTRLB_SRC_INVERT (TCC_FCTRLB_SRC_INVERT_Val << TCC_FCTRLB_SRC_Pos)
#define TCC_FCTRLB_SRC_ALTFAULT (TCC_FCTRLB_SRC_ALTFAULT_Val << TCC_FCTRLB_SRC_Pos)
#define TCC_FCTRLB_KEEP_Pos 3 /**< \brief (TCC_FCTRLB) FaultB Keeper */
#define TCC_FCTRLB_KEEP (0x1u << TCC_FCTRLB_KEEP_Pos)
#define TCC_FCTRLB_QUAL_Pos 4 /**< \brief (TCC_FCTRLB) FaultB Qualification */
#define TCC_FCTRLB_QUAL (0x1u << TCC_FCTRLB_QUAL_Pos)
#define TCC_FCTRLB_BLANK_Pos 5 /**< \brief (TCC_FCTRLB) FaultB Blanking Mode */
#define TCC_FCTRLB_BLANK_Msk (0x3u << TCC_FCTRLB_BLANK_Pos)
#define TCC_FCTRLB_BLANK(value) ((TCC_FCTRLB_BLANK_Msk & ((value) << TCC_FCTRLB_BLANK_Pos)))
#define TCC_FCTRLB_BLANK_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_BLANK_RISE_Val 0x1u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_BLANK_FALL_Val 0x2u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_BLANK_BOTH_Val 0x3u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_BLANK_DISABLE (TCC_FCTRLB_BLANK_DISABLE_Val << TCC_FCTRLB_BLANK_Pos)
#define TCC_FCTRLB_BLANK_RISE (TCC_FCTRLB_BLANK_RISE_Val << TCC_FCTRLB_BLANK_Pos)
#define TCC_FCTRLB_BLANK_FALL (TCC_FCTRLB_BLANK_FALL_Val << TCC_FCTRLB_BLANK_Pos)
#define TCC_FCTRLB_BLANK_BOTH (TCC_FCTRLB_BLANK_BOTH_Val << TCC_FCTRLB_BLANK_Pos)
#define TCC_FCTRLB_RESTART_Pos 7 /**< \brief (TCC_FCTRLB) FaultB Restart */
#define TCC_FCTRLB_RESTART (0x1u << TCC_FCTRLB_RESTART_Pos)
#define TCC_FCTRLB_HALT_Pos 8 /**< \brief (TCC_FCTRLB) FaultB Halt Mode */
#define TCC_FCTRLB_HALT_Msk (0x3u << TCC_FCTRLB_HALT_Pos)
#define TCC_FCTRLB_HALT(value) ((TCC_FCTRLB_HALT_Msk & ((value) << TCC_FCTRLB_HALT_Pos)))
#define TCC_FCTRLB_HALT_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_HALT_HW_Val 0x1u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_HALT_SW_Val 0x2u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_HALT_NR_Val 0x3u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_HALT_DISABLE (TCC_FCTRLB_HALT_DISABLE_Val << TCC_FCTRLB_HALT_Pos)
#define TCC_FCTRLB_HALT_HW (TCC_FCTRLB_HALT_HW_Val << TCC_FCTRLB_HALT_Pos)
#define TCC_FCTRLB_HALT_SW (TCC_FCTRLB_HALT_SW_Val << TCC_FCTRLB_HALT_Pos)
#define TCC_FCTRLB_HALT_NR (TCC_FCTRLB_HALT_NR_Val << TCC_FCTRLB_HALT_Pos)
#define TCC_FCTRLB_CHSEL_Pos 10 /**< \brief (TCC_FCTRLB) FaultB Capture Channel */
#define TCC_FCTRLB_CHSEL_Msk (0x3u << TCC_FCTRLB_CHSEL_Pos)
#define TCC_FCTRLB_CHSEL(value) ((TCC_FCTRLB_CHSEL_Msk & ((value) << TCC_FCTRLB_CHSEL_Pos)))
#define TCC_FCTRLB_CHSEL_CC0_Val 0x0u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CHSEL_CC1_Val 0x1u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CHSEL_CC2_Val 0x2u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CHSEL_CC3_Val 0x3u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CHSEL_CC0 (TCC_FCTRLB_CHSEL_CC0_Val << TCC_FCTRLB_CHSEL_Pos)
#define TCC_FCTRLB_CHSEL_CC1 (TCC_FCTRLB_CHSEL_CC1_Val << TCC_FCTRLB_CHSEL_Pos)
#define TCC_FCTRLB_CHSEL_CC2 (TCC_FCTRLB_CHSEL_CC2_Val << TCC_FCTRLB_CHSEL_Pos)
#define TCC_FCTRLB_CHSEL_CC3 (TCC_FCTRLB_CHSEL_CC3_Val << TCC_FCTRLB_CHSEL_Pos)
#define TCC_FCTRLB_CAPTURE_Pos 12 /**< \brief (TCC_FCTRLB) FaultB Capture Action */
#define TCC_FCTRLB_CAPTURE_Msk (0x7u << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE(value) ((TCC_FCTRLB_CAPTURE_Msk & ((value) << TCC_FCTRLB_CAPTURE_Pos)))
#define TCC_FCTRLB_CAPTURE_DISABLE_Val 0x0u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_CAPT_Val 0x1u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_CAPTMIN_Val 0x2u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_CAPTMAX_Val 0x3u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_LOCMIN_Val 0x4u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_LOCMAX_Val 0x5u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_DERIV0_Val 0x6u /**< \brief (TCC_FCTRLB) */
#define TCC_FCTRLB_CAPTURE_DISABLE (TCC_FCTRLB_CAPTURE_DISABLE_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE_CAPT (TCC_FCTRLB_CAPTURE_CAPT_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE_CAPTMIN (TCC_FCTRLB_CAPTURE_CAPTMIN_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE_CAPTMAX (TCC_FCTRLB_CAPTURE_CAPTMAX_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE_LOCMIN (TCC_FCTRLB_CAPTURE_LOCMIN_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE_LOCMAX (TCC_FCTRLB_CAPTURE_LOCMAX_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_CAPTURE_DERIV0 (TCC_FCTRLB_CAPTURE_DERIV0_Val << TCC_FCTRLB_CAPTURE_Pos)
#define TCC_FCTRLB_BLANKVAL_Pos 16 /**< \brief (TCC_FCTRLB) FaultB Blanking Time */
#define TCC_FCTRLB_BLANKVAL_Msk (0xFFu << TCC_FCTRLB_BLANKVAL_Pos)
#define TCC_FCTRLB_BLANKVAL(value) ((TCC_FCTRLB_BLANKVAL_Msk & ((value) << TCC_FCTRLB_BLANKVAL_Pos)))
#define TCC_FCTRLB_FILTERVAL_Pos 24 /**< \brief (TCC_FCTRLB) FaultB Filter Value */
#define TCC_FCTRLB_FILTERVAL_Msk (0xFu << TCC_FCTRLB_FILTERVAL_Pos)
#define TCC_FCTRLB_FILTERVAL(value) ((TCC_FCTRLB_FILTERVAL_Msk & ((value) << TCC_FCTRLB_FILTERVAL_Pos)))
#define TCC_FCTRLB_MASK 0x0FFF7FFBu /**< \brief (TCC_FCTRLB) MASK Register */
/* -------- TCC_WEXCTRL : (TCC Offset: 0x14) (R/W 32) Waveform Extension Configuration -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t OTMX:2; /*!< bit: 0.. 1 Output Matrix */
uint32_t :6; /*!< bit: 2.. 7 Reserved */
uint32_t DTIEN0:1; /*!< bit: 8 Dead-time Insertion Generator 0 Enable */
uint32_t DTIEN1:1; /*!< bit: 9 Dead-time Insertion Generator 1 Enable */
uint32_t DTIEN2:1; /*!< bit: 10 Dead-time Insertion Generator 2 Enable */
uint32_t DTIEN3:1; /*!< bit: 11 Dead-time Insertion Generator 3 Enable */
uint32_t :4; /*!< bit: 12..15 Reserved */
uint32_t DTLS:8; /*!< bit: 16..23 Dead-time Low Side Outputs Value */
uint32_t DTHS:8; /*!< bit: 24..31 Dead-time High Side Outputs Value */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :8; /*!< bit: 0.. 7 Reserved */
uint32_t DTIEN:4; /*!< bit: 8..11 Dead-time Insertion Generator x Enable */
uint32_t :20; /*!< bit: 12..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_WEXCTRL_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_WEXCTRL_OFFSET 0x14 /**< \brief (TCC_WEXCTRL offset) Waveform Extension Configuration */
#define TCC_WEXCTRL_RESETVALUE 0x00000000 /**< \brief (TCC_WEXCTRL reset_value) Waveform Extension Configuration */
#define TCC_WEXCTRL_OTMX_Pos 0 /**< \brief (TCC_WEXCTRL) Output Matrix */
#define TCC_WEXCTRL_OTMX_Msk (0x3u << TCC_WEXCTRL_OTMX_Pos)
#define TCC_WEXCTRL_OTMX(value) ((TCC_WEXCTRL_OTMX_Msk & ((value) << TCC_WEXCTRL_OTMX_Pos)))
#define TCC_WEXCTRL_DTIEN0_Pos 8 /**< \brief (TCC_WEXCTRL) Dead-time Insertion Generator 0 Enable */
#define TCC_WEXCTRL_DTIEN0 (1 << TCC_WEXCTRL_DTIEN0_Pos)
#define TCC_WEXCTRL_DTIEN1_Pos 9 /**< \brief (TCC_WEXCTRL) Dead-time Insertion Generator 1 Enable */
#define TCC_WEXCTRL_DTIEN1 (1 << TCC_WEXCTRL_DTIEN1_Pos)
#define TCC_WEXCTRL_DTIEN2_Pos 10 /**< \brief (TCC_WEXCTRL) Dead-time Insertion Generator 2 Enable */
#define TCC_WEXCTRL_DTIEN2 (1 << TCC_WEXCTRL_DTIEN2_Pos)
#define TCC_WEXCTRL_DTIEN3_Pos 11 /**< \brief (TCC_WEXCTRL) Dead-time Insertion Generator 3 Enable */
#define TCC_WEXCTRL_DTIEN3 (1 << TCC_WEXCTRL_DTIEN3_Pos)
#define TCC_WEXCTRL_DTIEN_Pos 8 /**< \brief (TCC_WEXCTRL) Dead-time Insertion Generator x Enable */
#define TCC_WEXCTRL_DTIEN_Msk (0xFu << TCC_WEXCTRL_DTIEN_Pos)
#define TCC_WEXCTRL_DTIEN(value) ((TCC_WEXCTRL_DTIEN_Msk & ((value) << TCC_WEXCTRL_DTIEN_Pos)))
#define TCC_WEXCTRL_DTLS_Pos 16 /**< \brief (TCC_WEXCTRL) Dead-time Low Side Outputs Value */
#define TCC_WEXCTRL_DTLS_Msk (0xFFu << TCC_WEXCTRL_DTLS_Pos)
#define TCC_WEXCTRL_DTLS(value) ((TCC_WEXCTRL_DTLS_Msk & ((value) << TCC_WEXCTRL_DTLS_Pos)))
#define TCC_WEXCTRL_DTHS_Pos 24 /**< \brief (TCC_WEXCTRL) Dead-time High Side Outputs Value */
#define TCC_WEXCTRL_DTHS_Msk (0xFFu << TCC_WEXCTRL_DTHS_Pos)
#define TCC_WEXCTRL_DTHS(value) ((TCC_WEXCTRL_DTHS_Msk & ((value) << TCC_WEXCTRL_DTHS_Pos)))
#define TCC_WEXCTRL_MASK 0xFFFF0F03u /**< \brief (TCC_WEXCTRL) MASK Register */
/* -------- TCC_DRVCTRL : (TCC Offset: 0x18) (R/W 32) Driver Configuration -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t NRE0:1; /*!< bit: 0 Non-Recoverable State 0 Output Enable */
uint32_t NRE1:1; /*!< bit: 1 Non-Recoverable State 1 Output Enable */
uint32_t NRE2:1; /*!< bit: 2 Non-Recoverable State 2 Output Enable */
uint32_t NRE3:1; /*!< bit: 3 Non-Recoverable State 3 Output Enable */
uint32_t NRE4:1; /*!< bit: 4 Non-Recoverable State 4 Output Enable */
uint32_t NRE5:1; /*!< bit: 5 Non-Recoverable State 5 Output Enable */
uint32_t NRE6:1; /*!< bit: 6 Non-Recoverable State 6 Output Enable */
uint32_t NRE7:1; /*!< bit: 7 Non-Recoverable State 7 Output Enable */
uint32_t NRV0:1; /*!< bit: 8 Non-Recoverable State 0 Output Value */
uint32_t NRV1:1; /*!< bit: 9 Non-Recoverable State 1 Output Value */
uint32_t NRV2:1; /*!< bit: 10 Non-Recoverable State 2 Output Value */
uint32_t NRV3:1; /*!< bit: 11 Non-Recoverable State 3 Output Value */
uint32_t NRV4:1; /*!< bit: 12 Non-Recoverable State 4 Output Value */
uint32_t NRV5:1; /*!< bit: 13 Non-Recoverable State 5 Output Value */
uint32_t NRV6:1; /*!< bit: 14 Non-Recoverable State 6 Output Value */
uint32_t NRV7:1; /*!< bit: 15 Non-Recoverable State 7 Output Value */
uint32_t INVEN0:1; /*!< bit: 16 Output Waveform 0 Inversion */
uint32_t INVEN1:1; /*!< bit: 17 Output Waveform 1 Inversion */
uint32_t INVEN2:1; /*!< bit: 18 Output Waveform 2 Inversion */
uint32_t INVEN3:1; /*!< bit: 19 Output Waveform 3 Inversion */
uint32_t INVEN4:1; /*!< bit: 20 Output Waveform 4 Inversion */
uint32_t INVEN5:1; /*!< bit: 21 Output Waveform 5 Inversion */
uint32_t INVEN6:1; /*!< bit: 22 Output Waveform 6 Inversion */
uint32_t INVEN7:1; /*!< bit: 23 Output Waveform 7 Inversion */
uint32_t FILTERVAL0:4; /*!< bit: 24..27 Non-Recoverable Fault Input 0 Filter Value */
uint32_t FILTERVAL1:4; /*!< bit: 28..31 Non-Recoverable Fault Input 1 Filter Value */
} bit; /*!< Structure used for bit access */
struct {
uint32_t NRE:8; /*!< bit: 0.. 7 Non-Recoverable State x Output Enable */
uint32_t NRV:8; /*!< bit: 8..15 Non-Recoverable State x Output Value */
uint32_t INVEN:8; /*!< bit: 16..23 Output Waveform x Inversion */
uint32_t :8; /*!< bit: 24..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_DRVCTRL_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_DRVCTRL_OFFSET 0x18 /**< \brief (TCC_DRVCTRL offset) Driver Configuration */
#define TCC_DRVCTRL_RESETVALUE 0x00000000 /**< \brief (TCC_DRVCTRL reset_value) Driver Configuration */
#define TCC_DRVCTRL_NRE0_Pos 0 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 0 Output Enable */
#define TCC_DRVCTRL_NRE0 (1 << TCC_DRVCTRL_NRE0_Pos)
#define TCC_DRVCTRL_NRE1_Pos 1 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 1 Output Enable */
#define TCC_DRVCTRL_NRE1 (1 << TCC_DRVCTRL_NRE1_Pos)
#define TCC_DRVCTRL_NRE2_Pos 2 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 2 Output Enable */
#define TCC_DRVCTRL_NRE2 (1 << TCC_DRVCTRL_NRE2_Pos)
#define TCC_DRVCTRL_NRE3_Pos 3 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 3 Output Enable */
#define TCC_DRVCTRL_NRE3 (1 << TCC_DRVCTRL_NRE3_Pos)
#define TCC_DRVCTRL_NRE4_Pos 4 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 4 Output Enable */
#define TCC_DRVCTRL_NRE4 (1 << TCC_DRVCTRL_NRE4_Pos)
#define TCC_DRVCTRL_NRE5_Pos 5 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 5 Output Enable */
#define TCC_DRVCTRL_NRE5 (1 << TCC_DRVCTRL_NRE5_Pos)
#define TCC_DRVCTRL_NRE6_Pos 6 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 6 Output Enable */
#define TCC_DRVCTRL_NRE6 (1 << TCC_DRVCTRL_NRE6_Pos)
#define TCC_DRVCTRL_NRE7_Pos 7 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 7 Output Enable */
#define TCC_DRVCTRL_NRE7 (1 << TCC_DRVCTRL_NRE7_Pos)
#define TCC_DRVCTRL_NRE_Pos 0 /**< \brief (TCC_DRVCTRL) Non-Recoverable State x Output Enable */
#define TCC_DRVCTRL_NRE_Msk (0xFFu << TCC_DRVCTRL_NRE_Pos)
#define TCC_DRVCTRL_NRE(value) ((TCC_DRVCTRL_NRE_Msk & ((value) << TCC_DRVCTRL_NRE_Pos)))
#define TCC_DRVCTRL_NRV0_Pos 8 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 0 Output Value */
#define TCC_DRVCTRL_NRV0 (1 << TCC_DRVCTRL_NRV0_Pos)
#define TCC_DRVCTRL_NRV1_Pos 9 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 1 Output Value */
#define TCC_DRVCTRL_NRV1 (1 << TCC_DRVCTRL_NRV1_Pos)
#define TCC_DRVCTRL_NRV2_Pos 10 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 2 Output Value */
#define TCC_DRVCTRL_NRV2 (1 << TCC_DRVCTRL_NRV2_Pos)
#define TCC_DRVCTRL_NRV3_Pos 11 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 3 Output Value */
#define TCC_DRVCTRL_NRV3 (1 << TCC_DRVCTRL_NRV3_Pos)
#define TCC_DRVCTRL_NRV4_Pos 12 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 4 Output Value */
#define TCC_DRVCTRL_NRV4 (1 << TCC_DRVCTRL_NRV4_Pos)
#define TCC_DRVCTRL_NRV5_Pos 13 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 5 Output Value */
#define TCC_DRVCTRL_NRV5 (1 << TCC_DRVCTRL_NRV5_Pos)
#define TCC_DRVCTRL_NRV6_Pos 14 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 6 Output Value */
#define TCC_DRVCTRL_NRV6 (1 << TCC_DRVCTRL_NRV6_Pos)
#define TCC_DRVCTRL_NRV7_Pos 15 /**< \brief (TCC_DRVCTRL) Non-Recoverable State 7 Output Value */
#define TCC_DRVCTRL_NRV7 (1 << TCC_DRVCTRL_NRV7_Pos)
#define TCC_DRVCTRL_NRV_Pos 8 /**< \brief (TCC_DRVCTRL) Non-Recoverable State x Output Value */
#define TCC_DRVCTRL_NRV_Msk (0xFFu << TCC_DRVCTRL_NRV_Pos)
#define TCC_DRVCTRL_NRV(value) ((TCC_DRVCTRL_NRV_Msk & ((value) << TCC_DRVCTRL_NRV_Pos)))
#define TCC_DRVCTRL_INVEN0_Pos 16 /**< \brief (TCC_DRVCTRL) Output Waveform 0 Inversion */
#define TCC_DRVCTRL_INVEN0 (1 << TCC_DRVCTRL_INVEN0_Pos)
#define TCC_DRVCTRL_INVEN1_Pos 17 /**< \brief (TCC_DRVCTRL) Output Waveform 1 Inversion */
#define TCC_DRVCTRL_INVEN1 (1 << TCC_DRVCTRL_INVEN1_Pos)
#define TCC_DRVCTRL_INVEN2_Pos 18 /**< \brief (TCC_DRVCTRL) Output Waveform 2 Inversion */
#define TCC_DRVCTRL_INVEN2 (1 << TCC_DRVCTRL_INVEN2_Pos)
#define TCC_DRVCTRL_INVEN3_Pos 19 /**< \brief (TCC_DRVCTRL) Output Waveform 3 Inversion */
#define TCC_DRVCTRL_INVEN3 (1 << TCC_DRVCTRL_INVEN3_Pos)
#define TCC_DRVCTRL_INVEN4_Pos 20 /**< \brief (TCC_DRVCTRL) Output Waveform 4 Inversion */
#define TCC_DRVCTRL_INVEN4 (1 << TCC_DRVCTRL_INVEN4_Pos)
#define TCC_DRVCTRL_INVEN5_Pos 21 /**< \brief (TCC_DRVCTRL) Output Waveform 5 Inversion */
#define TCC_DRVCTRL_INVEN5 (1 << TCC_DRVCTRL_INVEN5_Pos)
#define TCC_DRVCTRL_INVEN6_Pos 22 /**< \brief (TCC_DRVCTRL) Output Waveform 6 Inversion */
#define TCC_DRVCTRL_INVEN6 (1 << TCC_DRVCTRL_INVEN6_Pos)
#define TCC_DRVCTRL_INVEN7_Pos 23 /**< \brief (TCC_DRVCTRL) Output Waveform 7 Inversion */
#define TCC_DRVCTRL_INVEN7 (1 << TCC_DRVCTRL_INVEN7_Pos)
#define TCC_DRVCTRL_INVEN_Pos 16 /**< \brief (TCC_DRVCTRL) Output Waveform x Inversion */
#define TCC_DRVCTRL_INVEN_Msk (0xFFu << TCC_DRVCTRL_INVEN_Pos)
#define TCC_DRVCTRL_INVEN(value) ((TCC_DRVCTRL_INVEN_Msk & ((value) << TCC_DRVCTRL_INVEN_Pos)))
#define TCC_DRVCTRL_FILTERVAL0_Pos 24 /**< \brief (TCC_DRVCTRL) Non-Recoverable Fault Input 0 Filter Value */
#define TCC_DRVCTRL_FILTERVAL0_Msk (0xFu << TCC_DRVCTRL_FILTERVAL0_Pos)
#define TCC_DRVCTRL_FILTERVAL0(value) ((TCC_DRVCTRL_FILTERVAL0_Msk & ((value) << TCC_DRVCTRL_FILTERVAL0_Pos)))
#define TCC_DRVCTRL_FILTERVAL1_Pos 28 /**< \brief (TCC_DRVCTRL) Non-Recoverable Fault Input 1 Filter Value */
#define TCC_DRVCTRL_FILTERVAL1_Msk (0xFu << TCC_DRVCTRL_FILTERVAL1_Pos)
#define TCC_DRVCTRL_FILTERVAL1(value) ((TCC_DRVCTRL_FILTERVAL1_Msk & ((value) << TCC_DRVCTRL_FILTERVAL1_Pos)))
#define TCC_DRVCTRL_MASK 0xFFFFFFFFu /**< \brief (TCC_DRVCTRL) MASK Register */
/* -------- TCC_DBGCTRL : (TCC Offset: 0x1E) (R/W 8) Debug Control -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint8_t DBGRUN:1; /*!< bit: 0 Debug Running Mode */
uint8_t :1; /*!< bit: 1 Reserved */
uint8_t FDDBD:1; /*!< bit: 2 Fault Detection on Debug Break Detection */
uint8_t :5; /*!< bit: 3.. 7 Reserved */
} bit; /*!< Structure used for bit access */
uint8_t reg; /*!< Type used for register access */
} TCC_DBGCTRL_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_DBGCTRL_OFFSET 0x1E /**< \brief (TCC_DBGCTRL offset) Debug Control */
#define TCC_DBGCTRL_RESETVALUE 0x00 /**< \brief (TCC_DBGCTRL reset_value) Debug Control */
#define TCC_DBGCTRL_DBGRUN_Pos 0 /**< \brief (TCC_DBGCTRL) Debug Running Mode */
#define TCC_DBGCTRL_DBGRUN (0x1u << TCC_DBGCTRL_DBGRUN_Pos)
#define TCC_DBGCTRL_FDDBD_Pos 2 /**< \brief (TCC_DBGCTRL) Fault Detection on Debug Break Detection */
#define TCC_DBGCTRL_FDDBD (0x1u << TCC_DBGCTRL_FDDBD_Pos)
#define TCC_DBGCTRL_MASK 0x05u /**< \brief (TCC_DBGCTRL) MASK Register */
/* -------- TCC_EVCTRL : (TCC Offset: 0x20) (R/W 32) Event Control -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t EVACT0:3; /*!< bit: 0.. 2 Timer/counter Input Event0 Action */
uint32_t EVACT1:3; /*!< bit: 3.. 5 Timer/counter Input Event1 Action */
uint32_t CNTSEL:2; /*!< bit: 6.. 7 Timer/counter Output Event Mode */
uint32_t OVFEO:1; /*!< bit: 8 Overflow/Underflow Output Event Enable */
uint32_t TRGEO:1; /*!< bit: 9 Retrigger Output Event Enable */
uint32_t CNTEO:1; /*!< bit: 10 Timer/counter Output Event Enable */
uint32_t :1; /*!< bit: 11 Reserved */
uint32_t TCINV0:1; /*!< bit: 12 Inverted Event 0 Input Enable */
uint32_t TCINV1:1; /*!< bit: 13 Inverted Event 1 Input Enable */
uint32_t TCEI0:1; /*!< bit: 14 Timer/counter Event 0 Input Enable */
uint32_t TCEI1:1; /*!< bit: 15 Timer/counter Event 1 Input Enable */
uint32_t MCEI0:1; /*!< bit: 16 Match or Capture Channel 0 Event Input Enable */
uint32_t MCEI1:1; /*!< bit: 17 Match or Capture Channel 1 Event Input Enable */
uint32_t MCEI2:1; /*!< bit: 18 Match or Capture Channel 2 Event Input Enable */
uint32_t MCEI3:1; /*!< bit: 19 Match or Capture Channel 3 Event Input Enable */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t MCEO0:1; /*!< bit: 24 Match or Capture Channel 0 Event Output Enable */
uint32_t MCEO1:1; /*!< bit: 25 Match or Capture Channel 1 Event Output Enable */
uint32_t MCEO2:1; /*!< bit: 26 Match or Capture Channel 2 Event Output Enable */
uint32_t MCEO3:1; /*!< bit: 27 Match or Capture Channel 3 Event Output Enable */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :12; /*!< bit: 0..11 Reserved */
uint32_t TCINV:2; /*!< bit: 12..13 Inverted Event x Input Enable */
uint32_t TCEI:2; /*!< bit: 14..15 Timer/counter Event x Input Enable */
uint32_t MCEI:4; /*!< bit: 16..19 Match or Capture Channel x Event Input Enable */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t MCEO:4; /*!< bit: 24..27 Match or Capture Channel x Event Output Enable */
uint32_t :4; /*!< bit: 28..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_EVCTRL_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_EVCTRL_OFFSET 0x20 /**< \brief (TCC_EVCTRL offset) Event Control */
#define TCC_EVCTRL_RESETVALUE 0x00000000 /**< \brief (TCC_EVCTRL reset_value) Event Control */
#define TCC_EVCTRL_EVACT0_Pos 0 /**< \brief (TCC_EVCTRL) Timer/counter Input Event0 Action */
#define TCC_EVCTRL_EVACT0_Msk (0x7u << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0(value) ((TCC_EVCTRL_EVACT0_Msk & ((value) << TCC_EVCTRL_EVACT0_Pos)))
#define TCC_EVCTRL_EVACT0_OFF_Val 0x0u /**< \brief (TCC_EVCTRL) Event action disabled. */
#define TCC_EVCTRL_EVACT0_RETRIGGER_Val 0x1u /**< \brief (TCC_EVCTRL) Start restart or re-trigger TC on event */
#define TCC_EVCTRL_EVACT0_COUNTEV_Val 0x2u /**< \brief (TCC_EVCTRL) Count on event. Increment or decrement depending on count direction. */
#define TCC_EVCTRL_EVACT0_START_Val 0x3u /**< \brief (TCC_EVCTRL) Count on event.Start counting on the event rising edge. Further events will not restart the counter; it keeps on counting using prescaled GCLK_TCCx, until it reaches TOP or Zero depending on the direction. */
#define TCC_EVCTRL_EVACT0_INC_Val 0x4u /**< \brief (TCC_EVCTRL) Increment TC on EVENT. Increment the counter on event, irrespective of count direction */
#define TCC_EVCTRL_EVACT0_COUNT_Val 0x5u /**< \brief (TCC_EVCTRL) Count on active state of asynchronous event */
#define TCC_EVCTRL_EVACT0_FAULT_Val 0x7u /**< \brief (TCC_EVCTRL) Non-recoverable Fault */
#define TCC_EVCTRL_EVACT0_OFF (TCC_EVCTRL_EVACT0_OFF_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0_RETRIGGER (TCC_EVCTRL_EVACT0_RETRIGGER_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0_COUNTEV (TCC_EVCTRL_EVACT0_COUNTEV_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0_START (TCC_EVCTRL_EVACT0_START_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0_INC (TCC_EVCTRL_EVACT0_INC_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0_COUNT (TCC_EVCTRL_EVACT0_COUNT_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT0_FAULT (TCC_EVCTRL_EVACT0_FAULT_Val << TCC_EVCTRL_EVACT0_Pos)
#define TCC_EVCTRL_EVACT1_Pos 3 /**< \brief (TCC_EVCTRL) Timer/counter Input Event1 Action */
#define TCC_EVCTRL_EVACT1_Msk (0x7u << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1(value) ((TCC_EVCTRL_EVACT1_Msk & ((value) << TCC_EVCTRL_EVACT1_Pos)))
#define TCC_EVCTRL_EVACT1_OFF_Val 0x0u /**< \brief (TCC_EVCTRL) Event action disabled. */
#define TCC_EVCTRL_EVACT1_RETRIGGER_Val 0x1u /**< \brief (TCC_EVCTRL) Re-trigger TC on event */
#define TCC_EVCTRL_EVACT1_DIR_Val 0x2u /**< \brief (TCC_EVCTRL) Direction control */
#define TCC_EVCTRL_EVACT1_STOP_Val 0x3u /**< \brief (TCC_EVCTRL) Stop TC on event */
#define TCC_EVCTRL_EVACT1_DEC_Val 0x4u /**< \brief (TCC_EVCTRL) Decrement TC on event */
#define TCC_EVCTRL_EVACT1_PPW_Val 0x5u /**< \brief (TCC_EVCTRL) Period captured into CC0 Pulse Width on CC1 */
#define TCC_EVCTRL_EVACT1_PWP_Val 0x6u /**< \brief (TCC_EVCTRL) Period captured into CC1 Pulse Width on CC0 */
#define TCC_EVCTRL_EVACT1_FAULT_Val 0x7u /**< \brief (TCC_EVCTRL) Non-recoverable Fault */
#define TCC_EVCTRL_EVACT1_OFF (TCC_EVCTRL_EVACT1_OFF_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_RETRIGGER (TCC_EVCTRL_EVACT1_RETRIGGER_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_DIR (TCC_EVCTRL_EVACT1_DIR_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_STOP (TCC_EVCTRL_EVACT1_STOP_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_DEC (TCC_EVCTRL_EVACT1_DEC_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_PPW (TCC_EVCTRL_EVACT1_PPW_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_PWP (TCC_EVCTRL_EVACT1_PWP_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_EVACT1_FAULT (TCC_EVCTRL_EVACT1_FAULT_Val << TCC_EVCTRL_EVACT1_Pos)
#define TCC_EVCTRL_CNTSEL_Pos 6 /**< \brief (TCC_EVCTRL) Timer/counter Output Event Mode */
#define TCC_EVCTRL_CNTSEL_Msk (0x3u << TCC_EVCTRL_CNTSEL_Pos)
#define TCC_EVCTRL_CNTSEL(value) ((TCC_EVCTRL_CNTSEL_Msk & ((value) << TCC_EVCTRL_CNTSEL_Pos)))
#define TCC_EVCTRL_CNTSEL_START_Val 0x0u /**< \brief (TCC_EVCTRL) An interrupt/event is generated when a new counter cycle starts */
#define TCC_EVCTRL_CNTSEL_END_Val 0x1u /**< \brief (TCC_EVCTRL) An interrupt/event is generated when a counter cycle ends */
#define TCC_EVCTRL_CNTSEL_BETWEEN_Val 0x2u /**< \brief (TCC_EVCTRL) An interrupt/event is generated when a counter cycle ends, except for the first and last cycles. */
#define TCC_EVCTRL_CNTSEL_BOUNDARY_Val 0x3u /**< \brief (TCC_EVCTRL) An interrupt/event is generated when a new counter cycle starts or a counter cycle ends */
#define TCC_EVCTRL_CNTSEL_START (TCC_EVCTRL_CNTSEL_START_Val << TCC_EVCTRL_CNTSEL_Pos)
#define TCC_EVCTRL_CNTSEL_END (TCC_EVCTRL_CNTSEL_END_Val << TCC_EVCTRL_CNTSEL_Pos)
#define TCC_EVCTRL_CNTSEL_BETWEEN (TCC_EVCTRL_CNTSEL_BETWEEN_Val << TCC_EVCTRL_CNTSEL_Pos)
#define TCC_EVCTRL_CNTSEL_BOUNDARY (TCC_EVCTRL_CNTSEL_BOUNDARY_Val << TCC_EVCTRL_CNTSEL_Pos)
#define TCC_EVCTRL_OVFEO_Pos 8 /**< \brief (TCC_EVCTRL) Overflow/Underflow Output Event Enable */
#define TCC_EVCTRL_OVFEO (0x1u << TCC_EVCTRL_OVFEO_Pos)
#define TCC_EVCTRL_TRGEO_Pos 9 /**< \brief (TCC_EVCTRL) Retrigger Output Event Enable */
#define TCC_EVCTRL_TRGEO (0x1u << TCC_EVCTRL_TRGEO_Pos)
#define TCC_EVCTRL_CNTEO_Pos 10 /**< \brief (TCC_EVCTRL) Timer/counter Output Event Enable */
#define TCC_EVCTRL_CNTEO (0x1u << TCC_EVCTRL_CNTEO_Pos)
#define TCC_EVCTRL_TCINV0_Pos 12 /**< \brief (TCC_EVCTRL) Inverted Event 0 Input Enable */
#define TCC_EVCTRL_TCINV0 (1 << TCC_EVCTRL_TCINV0_Pos)
#define TCC_EVCTRL_TCINV1_Pos 13 /**< \brief (TCC_EVCTRL) Inverted Event 1 Input Enable */
#define TCC_EVCTRL_TCINV1 (1 << TCC_EVCTRL_TCINV1_Pos)
#define TCC_EVCTRL_TCINV_Pos 12 /**< \brief (TCC_EVCTRL) Inverted Event x Input Enable */
#define TCC_EVCTRL_TCINV_Msk (0x3u << TCC_EVCTRL_TCINV_Pos)
#define TCC_EVCTRL_TCINV(value) ((TCC_EVCTRL_TCINV_Msk & ((value) << TCC_EVCTRL_TCINV_Pos)))
#define TCC_EVCTRL_TCEI0_Pos 14 /**< \brief (TCC_EVCTRL) Timer/counter Event 0 Input Enable */
#define TCC_EVCTRL_TCEI0 (1 << TCC_EVCTRL_TCEI0_Pos)
#define TCC_EVCTRL_TCEI1_Pos 15 /**< \brief (TCC_EVCTRL) Timer/counter Event 1 Input Enable */
#define TCC_EVCTRL_TCEI1 (1 << TCC_EVCTRL_TCEI1_Pos)
#define TCC_EVCTRL_TCEI_Pos 14 /**< \brief (TCC_EVCTRL) Timer/counter Event x Input Enable */
#define TCC_EVCTRL_TCEI_Msk (0x3u << TCC_EVCTRL_TCEI_Pos)
#define TCC_EVCTRL_TCEI(value) ((TCC_EVCTRL_TCEI_Msk & ((value) << TCC_EVCTRL_TCEI_Pos)))
#define TCC_EVCTRL_MCEI0_Pos 16 /**< \brief (TCC_EVCTRL) Match or Capture Channel 0 Event Input Enable */
#define TCC_EVCTRL_MCEI0 (1 << TCC_EVCTRL_MCEI0_Pos)
#define TCC_EVCTRL_MCEI1_Pos 17 /**< \brief (TCC_EVCTRL) Match or Capture Channel 1 Event Input Enable */
#define TCC_EVCTRL_MCEI1 (1 << TCC_EVCTRL_MCEI1_Pos)
#define TCC_EVCTRL_MCEI2_Pos 18 /**< \brief (TCC_EVCTRL) Match or Capture Channel 2 Event Input Enable */
#define TCC_EVCTRL_MCEI2 (1 << TCC_EVCTRL_MCEI2_Pos)
#define TCC_EVCTRL_MCEI3_Pos 19 /**< \brief (TCC_EVCTRL) Match or Capture Channel 3 Event Input Enable */
#define TCC_EVCTRL_MCEI3 (1 << TCC_EVCTRL_MCEI3_Pos)
#define TCC_EVCTRL_MCEI_Pos 16 /**< \brief (TCC_EVCTRL) Match or Capture Channel x Event Input Enable */
#define TCC_EVCTRL_MCEI_Msk (0xFu << TCC_EVCTRL_MCEI_Pos)
#define TCC_EVCTRL_MCEI(value) ((TCC_EVCTRL_MCEI_Msk & ((value) << TCC_EVCTRL_MCEI_Pos)))
#define TCC_EVCTRL_MCEO0_Pos 24 /**< \brief (TCC_EVCTRL) Match or Capture Channel 0 Event Output Enable */
#define TCC_EVCTRL_MCEO0 (1 << TCC_EVCTRL_MCEO0_Pos)
#define TCC_EVCTRL_MCEO1_Pos 25 /**< \brief (TCC_EVCTRL) Match or Capture Channel 1 Event Output Enable */
#define TCC_EVCTRL_MCEO1 (1 << TCC_EVCTRL_MCEO1_Pos)
#define TCC_EVCTRL_MCEO2_Pos 26 /**< \brief (TCC_EVCTRL) Match or Capture Channel 2 Event Output Enable */
#define TCC_EVCTRL_MCEO2 (1 << TCC_EVCTRL_MCEO2_Pos)
#define TCC_EVCTRL_MCEO3_Pos 27 /**< \brief (TCC_EVCTRL) Match or Capture Channel 3 Event Output Enable */
#define TCC_EVCTRL_MCEO3 (1 << TCC_EVCTRL_MCEO3_Pos)
#define TCC_EVCTRL_MCEO_Pos 24 /**< \brief (TCC_EVCTRL) Match or Capture Channel x Event Output Enable */
#define TCC_EVCTRL_MCEO_Msk (0xFu << TCC_EVCTRL_MCEO_Pos)
#define TCC_EVCTRL_MCEO(value) ((TCC_EVCTRL_MCEO_Msk & ((value) << TCC_EVCTRL_MCEO_Pos)))
#define TCC_EVCTRL_MASK 0x0F0FF7FFu /**< \brief (TCC_EVCTRL) MASK Register */
/* -------- TCC_INTENCLR : (TCC Offset: 0x24) (R/W 32) Interrupt Enable Clear -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t OVF:1; /*!< bit: 0 Overflow Interrupt Enable */
uint32_t TRG:1; /*!< bit: 1 Retrigger Interrupt Enable */
uint32_t CNT:1; /*!< bit: 2 Counter Interrupt Enable */
uint32_t ERR:1; /*!< bit: 3 Error Interrupt Enable */
uint32_t :7; /*!< bit: 4..10 Reserved */
uint32_t DFS:1; /*!< bit: 11 Non-recoverable Debug Fault Interrupt Enable */
uint32_t FAULTA:1; /*!< bit: 12 Recoverable FaultA Interrupt Enable */
uint32_t FAULTB:1; /*!< bit: 13 Recoverable FaultB Interrupt Enable */
uint32_t FAULT0:1; /*!< bit: 14 Non-Recoverable Fault 0 Interrupt Enable */
uint32_t FAULT1:1; /*!< bit: 15 Non-Recoverable Fault 1 Interrupt Enable */
uint32_t MC0:1; /*!< bit: 16 Match or Capture Channel 0 Interrupt Enable */
uint32_t MC1:1; /*!< bit: 17 Match or Capture Channel 1 Interrupt Enable */
uint32_t MC2:1; /*!< bit: 18 Match or Capture Channel 2 Interrupt Enable */
uint32_t MC3:1; /*!< bit: 19 Match or Capture Channel 3 Interrupt Enable */
uint32_t :12; /*!< bit: 20..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :16; /*!< bit: 0..15 Reserved */
uint32_t MC:4; /*!< bit: 16..19 Match or Capture Channel x Interrupt Enable */
uint32_t :12; /*!< bit: 20..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_INTENCLR_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_INTENCLR_OFFSET 0x24 /**< \brief (TCC_INTENCLR offset) Interrupt Enable Clear */
#define TCC_INTENCLR_RESETVALUE 0x00000000 /**< \brief (TCC_INTENCLR reset_value) Interrupt Enable Clear */
#define TCC_INTENCLR_OVF_Pos 0 /**< \brief (TCC_INTENCLR) Overflow Interrupt Enable */
#define TCC_INTENCLR_OVF (0x1u << TCC_INTENCLR_OVF_Pos)
#define TCC_INTENCLR_TRG_Pos 1 /**< \brief (TCC_INTENCLR) Retrigger Interrupt Enable */
#define TCC_INTENCLR_TRG (0x1u << TCC_INTENCLR_TRG_Pos)
#define TCC_INTENCLR_CNT_Pos 2 /**< \brief (TCC_INTENCLR) Counter Interrupt Enable */
#define TCC_INTENCLR_CNT (0x1u << TCC_INTENCLR_CNT_Pos)
#define TCC_INTENCLR_ERR_Pos 3 /**< \brief (TCC_INTENCLR) Error Interrupt Enable */
#define TCC_INTENCLR_ERR (0x1u << TCC_INTENCLR_ERR_Pos)
#define TCC_INTENCLR_DFS_Pos 11 /**< \brief (TCC_INTENCLR) Non-recoverable Debug Fault Interrupt Enable */
#define TCC_INTENCLR_DFS (0x1u << TCC_INTENCLR_DFS_Pos)
#define TCC_INTENCLR_FAULTA_Pos 12 /**< \brief (TCC_INTENCLR) Recoverable FaultA Interrupt Enable */
#define TCC_INTENCLR_FAULTA (0x1u << TCC_INTENCLR_FAULTA_Pos)
#define TCC_INTENCLR_FAULTB_Pos 13 /**< \brief (TCC_INTENCLR) Recoverable FaultB Interrupt Enable */
#define TCC_INTENCLR_FAULTB (0x1u << TCC_INTENCLR_FAULTB_Pos)
#define TCC_INTENCLR_FAULT0_Pos 14 /**< \brief (TCC_INTENCLR) Non-Recoverable Fault 0 Interrupt Enable */
#define TCC_INTENCLR_FAULT0 (0x1u << TCC_INTENCLR_FAULT0_Pos)
#define TCC_INTENCLR_FAULT1_Pos 15 /**< \brief (TCC_INTENCLR) Non-Recoverable Fault 1 Interrupt Enable */
#define TCC_INTENCLR_FAULT1 (0x1u << TCC_INTENCLR_FAULT1_Pos)
#define TCC_INTENCLR_MC0_Pos 16 /**< \brief (TCC_INTENCLR) Match or Capture Channel 0 Interrupt Enable */
#define TCC_INTENCLR_MC0 (1 << TCC_INTENCLR_MC0_Pos)
#define TCC_INTENCLR_MC1_Pos 17 /**< \brief (TCC_INTENCLR) Match or Capture Channel 1 Interrupt Enable */
#define TCC_INTENCLR_MC1 (1 << TCC_INTENCLR_MC1_Pos)
#define TCC_INTENCLR_MC2_Pos 18 /**< \brief (TCC_INTENCLR) Match or Capture Channel 2 Interrupt Enable */
#define TCC_INTENCLR_MC2 (1 << TCC_INTENCLR_MC2_Pos)
#define TCC_INTENCLR_MC3_Pos 19 /**< \brief (TCC_INTENCLR) Match or Capture Channel 3 Interrupt Enable */
#define TCC_INTENCLR_MC3 (1 << TCC_INTENCLR_MC3_Pos)
#define TCC_INTENCLR_MC_Pos 16 /**< \brief (TCC_INTENCLR) Match or Capture Channel x Interrupt Enable */
#define TCC_INTENCLR_MC_Msk (0xFu << TCC_INTENCLR_MC_Pos)
#define TCC_INTENCLR_MC(value) ((TCC_INTENCLR_MC_Msk & ((value) << TCC_INTENCLR_MC_Pos)))
#define TCC_INTENCLR_MASK 0x000FF80Fu /**< \brief (TCC_INTENCLR) MASK Register */
/* -------- TCC_INTENSET : (TCC Offset: 0x28) (R/W 32) Interrupt Enable Set -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t OVF:1; /*!< bit: 0 Overflow Interrupt Enable */
uint32_t TRG:1; /*!< bit: 1 Retrigger Interrupt Enable */
uint32_t CNT:1; /*!< bit: 2 Counter Interrupt Enable */
uint32_t ERR:1; /*!< bit: 3 Error Interrupt Enable */
uint32_t :7; /*!< bit: 4..10 Reserved */
uint32_t DFS:1; /*!< bit: 11 Non-Recoverable Debug Fault Interrupt Enable */
uint32_t FAULTA:1; /*!< bit: 12 Recoverable FaultA Interrupt Enable */
uint32_t FAULTB:1; /*!< bit: 13 Recoverable FaultB Interrupt Enable */
uint32_t FAULT0:1; /*!< bit: 14 Non-Recoverable Fault 0 Interrupt Enable */
uint32_t FAULT1:1; /*!< bit: 15 Non-Recoverable Fault 1 Interrupt Enabl */
uint32_t MC0:1; /*!< bit: 16 Match or Capture Channel 0 Interrupt Enable */
uint32_t MC1:1; /*!< bit: 17 Match or Capture Channel 1 Interrupt Enable */
uint32_t MC2:1; /*!< bit: 18 Match or Capture Channel 2 Interrupt Enable */
uint32_t MC3:1; /*!< bit: 19 Match or Capture Channel 3 Interrupt Enable */
uint32_t :12; /*!< bit: 20..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :16; /*!< bit: 0..15 Reserved */
uint32_t MC:4; /*!< bit: 16..19 Match or Capture Channel x Interrupt Enable */
uint32_t :12; /*!< bit: 20..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_INTENSET_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_INTENSET_OFFSET 0x28 /**< \brief (TCC_INTENSET offset) Interrupt Enable Set */
#define TCC_INTENSET_RESETVALUE 0x00000000 /**< \brief (TCC_INTENSET reset_value) Interrupt Enable Set */
#define TCC_INTENSET_OVF_Pos 0 /**< \brief (TCC_INTENSET) Overflow Interrupt Enable */
#define TCC_INTENSET_OVF (0x1u << TCC_INTENSET_OVF_Pos)
#define TCC_INTENSET_TRG_Pos 1 /**< \brief (TCC_INTENSET) Retrigger Interrupt Enable */
#define TCC_INTENSET_TRG (0x1u << TCC_INTENSET_TRG_Pos)
#define TCC_INTENSET_CNT_Pos 2 /**< \brief (TCC_INTENSET) Counter Interrupt Enable */
#define TCC_INTENSET_CNT (0x1u << TCC_INTENSET_CNT_Pos)
#define TCC_INTENSET_ERR_Pos 3 /**< \brief (TCC_INTENSET) Error Interrupt Enable */
#define TCC_INTENSET_ERR (0x1u << TCC_INTENSET_ERR_Pos)
#define TCC_INTENSET_DFS_Pos 11 /**< \brief (TCC_INTENSET) Non-Recoverable Debug Fault Interrupt Enable */
#define TCC_INTENSET_DFS (0x1u << TCC_INTENSET_DFS_Pos)
#define TCC_INTENSET_FAULTA_Pos 12 /**< \brief (TCC_INTENSET) Recoverable FaultA Interrupt Enable */
#define TCC_INTENSET_FAULTA (0x1u << TCC_INTENSET_FAULTA_Pos)
#define TCC_INTENSET_FAULTB_Pos 13 /**< \brief (TCC_INTENSET) Recoverable FaultB Interrupt Enable */
#define TCC_INTENSET_FAULTB (0x1u << TCC_INTENSET_FAULTB_Pos)
#define TCC_INTENSET_FAULT0_Pos 14 /**< \brief (TCC_INTENSET) Non-Recoverable Fault 0 Interrupt Enable */
#define TCC_INTENSET_FAULT0 (0x1u << TCC_INTENSET_FAULT0_Pos)
#define TCC_INTENSET_FAULT1_Pos 15 /**< \brief (TCC_INTENSET) Non-Recoverable Fault 1 Interrupt Enabl */
#define TCC_INTENSET_FAULT1 (0x1u << TCC_INTENSET_FAULT1_Pos)
#define TCC_INTENSET_MC0_Pos 16 /**< \brief (TCC_INTENSET) Match or Capture Channel 0 Interrupt Enable */
#define TCC_INTENSET_MC0 (1 << TCC_INTENSET_MC0_Pos)
#define TCC_INTENSET_MC1_Pos 17 /**< \brief (TCC_INTENSET) Match or Capture Channel 1 Interrupt Enable */
#define TCC_INTENSET_MC1 (1 << TCC_INTENSET_MC1_Pos)
#define TCC_INTENSET_MC2_Pos 18 /**< \brief (TCC_INTENSET) Match or Capture Channel 2 Interrupt Enable */
#define TCC_INTENSET_MC2 (1 << TCC_INTENSET_MC2_Pos)
#define TCC_INTENSET_MC3_Pos 19 /**< \brief (TCC_INTENSET) Match or Capture Channel 3 Interrupt Enable */
#define TCC_INTENSET_MC3 (1 << TCC_INTENSET_MC3_Pos)
#define TCC_INTENSET_MC_Pos 16 /**< \brief (TCC_INTENSET) Match or Capture Channel x Interrupt Enable */
#define TCC_INTENSET_MC_Msk (0xFu << TCC_INTENSET_MC_Pos)
#define TCC_INTENSET_MC(value) ((TCC_INTENSET_MC_Msk & ((value) << TCC_INTENSET_MC_Pos)))
#define TCC_INTENSET_MASK 0x000FF80Fu /**< \brief (TCC_INTENSET) MASK Register */
/* -------- TCC_INTFLAG : (TCC Offset: 0x2C) (R/W 32) Interrupt Flag Status and Clear -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t OVF:1; /*!< bit: 0 Overflow */
uint32_t TRG:1; /*!< bit: 1 Retrigger */
uint32_t CNT:1; /*!< bit: 2 Counter */
uint32_t ERR:1; /*!< bit: 3 Error */
uint32_t :7; /*!< bit: 4..10 Reserved */
uint32_t DFS:1; /*!< bit: 11 Non-Recoverable Debug Fault */
uint32_t FAULTA:1; /*!< bit: 12 Recoverable FaultA */
uint32_t FAULTB:1; /*!< bit: 13 Recoverable FaultB */
uint32_t FAULT0:1; /*!< bit: 14 Non-Recoverable Fault 0 */
uint32_t FAULT1:1; /*!< bit: 15 Non-Recoverable Fault 1 */
uint32_t MC0:1; /*!< bit: 16 Match or Capture 0 */
uint32_t MC1:1; /*!< bit: 17 Match or Capture 1 */
uint32_t MC2:1; /*!< bit: 18 Match or Capture 2 */
uint32_t MC3:1; /*!< bit: 19 Match or Capture 3 */
uint32_t :12; /*!< bit: 20..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :16; /*!< bit: 0..15 Reserved */
uint32_t MC:4; /*!< bit: 16..19 Match or Capture x */
uint32_t :12; /*!< bit: 20..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_INTFLAG_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_INTFLAG_OFFSET 0x2C /**< \brief (TCC_INTFLAG offset) Interrupt Flag Status and Clear */
#define TCC_INTFLAG_RESETVALUE 0x00000000 /**< \brief (TCC_INTFLAG reset_value) Interrupt Flag Status and Clear */
#define TCC_INTFLAG_OVF_Pos 0 /**< \brief (TCC_INTFLAG) Overflow */
#define TCC_INTFLAG_OVF (0x1u << TCC_INTFLAG_OVF_Pos)
#define TCC_INTFLAG_TRG_Pos 1 /**< \brief (TCC_INTFLAG) Retrigger */
#define TCC_INTFLAG_TRG (0x1u << TCC_INTFLAG_TRG_Pos)
#define TCC_INTFLAG_CNT_Pos 2 /**< \brief (TCC_INTFLAG) Counter */
#define TCC_INTFLAG_CNT (0x1u << TCC_INTFLAG_CNT_Pos)
#define TCC_INTFLAG_ERR_Pos 3 /**< \brief (TCC_INTFLAG) Error */
#define TCC_INTFLAG_ERR (0x1u << TCC_INTFLAG_ERR_Pos)
#define TCC_INTFLAG_DFS_Pos 11 /**< \brief (TCC_INTFLAG) Non-Recoverable Debug Fault */
#define TCC_INTFLAG_DFS (0x1u << TCC_INTFLAG_DFS_Pos)
#define TCC_INTFLAG_FAULTA_Pos 12 /**< \brief (TCC_INTFLAG) Recoverable FaultA */
#define TCC_INTFLAG_FAULTA (0x1u << TCC_INTFLAG_FAULTA_Pos)
#define TCC_INTFLAG_FAULTB_Pos 13 /**< \brief (TCC_INTFLAG) Recoverable FaultB */
#define TCC_INTFLAG_FAULTB (0x1u << TCC_INTFLAG_FAULTB_Pos)
#define TCC_INTFLAG_FAULT0_Pos 14 /**< \brief (TCC_INTFLAG) Non-Recoverable Fault 0 */
#define TCC_INTFLAG_FAULT0 (0x1u << TCC_INTFLAG_FAULT0_Pos)
#define TCC_INTFLAG_FAULT1_Pos 15 /**< \brief (TCC_INTFLAG) Non-Recoverable Fault 1 */
#define TCC_INTFLAG_FAULT1 (0x1u << TCC_INTFLAG_FAULT1_Pos)
#define TCC_INTFLAG_MC0_Pos 16 /**< \brief (TCC_INTFLAG) Match or Capture 0 */
#define TCC_INTFLAG_MC0 (1 << TCC_INTFLAG_MC0_Pos)
#define TCC_INTFLAG_MC1_Pos 17 /**< \brief (TCC_INTFLAG) Match or Capture 1 */
#define TCC_INTFLAG_MC1 (1 << TCC_INTFLAG_MC1_Pos)
#define TCC_INTFLAG_MC2_Pos 18 /**< \brief (TCC_INTFLAG) Match or Capture 2 */
#define TCC_INTFLAG_MC2 (1 << TCC_INTFLAG_MC2_Pos)
#define TCC_INTFLAG_MC3_Pos 19 /**< \brief (TCC_INTFLAG) Match or Capture 3 */
#define TCC_INTFLAG_MC3 (1 << TCC_INTFLAG_MC3_Pos)
#define TCC_INTFLAG_MC_Pos 16 /**< \brief (TCC_INTFLAG) Match or Capture x */
#define TCC_INTFLAG_MC_Msk (0xFu << TCC_INTFLAG_MC_Pos)
#define TCC_INTFLAG_MC(value) ((TCC_INTFLAG_MC_Msk & ((value) << TCC_INTFLAG_MC_Pos)))
#define TCC_INTFLAG_MASK 0x000FF80Fu /**< \brief (TCC_INTFLAG) MASK Register */
/* -------- TCC_STATUS : (TCC Offset: 0x30) (R/W 32) Status -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t STOP:1; /*!< bit: 0 Stop */
uint32_t IDX:1; /*!< bit: 1 Ramp */
uint32_t :1; /*!< bit: 2 Reserved */
uint32_t DFS:1; /*!< bit: 3 Non-Recoverable Debug Fault State */
uint32_t :1; /*!< bit: 4 Reserved */
uint32_t PATTBV:1; /*!< bit: 5 Pattern Buffer Valid */
uint32_t WAVEBV:1; /*!< bit: 6 Wave Buffer Valid */
uint32_t PERBV:1; /*!< bit: 7 Period Buffer Valid */
uint32_t FAULTAIN:1; /*!< bit: 8 Recoverable FaultA Input */
uint32_t FAULTBIN:1; /*!< bit: 9 Recoverable FaultB Input */
uint32_t FAULT0IN:1; /*!< bit: 10 Non-Recoverable Fault0 Input */
uint32_t FAULT1IN:1; /*!< bit: 11 Non-Recoverable Fault1 Input */
uint32_t FAULTA:1; /*!< bit: 12 Recoverable FaultA State */
uint32_t FAULTB:1; /*!< bit: 13 Recoverable FaultB State */
uint32_t FAULT0:1; /*!< bit: 14 Non-Recoverable Fault 0 State */
uint32_t FAULT1:1; /*!< bit: 15 Non-Recoverable Fault 1 State */
uint32_t CCBV0:1; /*!< bit: 16 Compare Channel 0 Buffer Valid */
uint32_t CCBV1:1; /*!< bit: 17 Compare Channel 1 Buffer Valid */
uint32_t CCBV2:1; /*!< bit: 18 Compare Channel 2 Buffer Valid */
uint32_t CCBV3:1; /*!< bit: 19 Compare Channel 3 Buffer Valid */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t CMP0:1; /*!< bit: 24 Compare Channel 0 Value */
uint32_t CMP1:1; /*!< bit: 25 Compare Channel 1 Value */
uint32_t CMP2:1; /*!< bit: 26 Compare Channel 2 Value */
uint32_t CMP3:1; /*!< bit: 27 Compare Channel 3 Value */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :16; /*!< bit: 0..15 Reserved */
uint32_t CCBV:4; /*!< bit: 16..19 Compare Channel x Buffer Valid */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t CMP:4; /*!< bit: 24..27 Compare Channel x Value */
uint32_t :4; /*!< bit: 28..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_STATUS_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_STATUS_OFFSET 0x30 /**< \brief (TCC_STATUS offset) Status */
#define TCC_STATUS_RESETVALUE 0x00000001 /**< \brief (TCC_STATUS reset_value) Status */
#define TCC_STATUS_STOP_Pos 0 /**< \brief (TCC_STATUS) Stop */
#define TCC_STATUS_STOP (0x1u << TCC_STATUS_STOP_Pos)
#define TCC_STATUS_IDX_Pos 1 /**< \brief (TCC_STATUS) Ramp */
#define TCC_STATUS_IDX (0x1u << TCC_STATUS_IDX_Pos)
#define TCC_STATUS_DFS_Pos 3 /**< \brief (TCC_STATUS) Non-Recoverable Debug Fault State */
#define TCC_STATUS_DFS (0x1u << TCC_STATUS_DFS_Pos)
#define TCC_STATUS_PATTBV_Pos 5 /**< \brief (TCC_STATUS) Pattern Buffer Valid */
#define TCC_STATUS_PATTBV (0x1u << TCC_STATUS_PATTBV_Pos)
#define TCC_STATUS_WAVEBV_Pos 6 /**< \brief (TCC_STATUS) Wave Buffer Valid */
#define TCC_STATUS_WAVEBV (0x1u << TCC_STATUS_WAVEBV_Pos)
#define TCC_STATUS_PERBV_Pos 7 /**< \brief (TCC_STATUS) Period Buffer Valid */
#define TCC_STATUS_PERBV (0x1u << TCC_STATUS_PERBV_Pos)
#define TCC_STATUS_FAULTAIN_Pos 8 /**< \brief (TCC_STATUS) Recoverable FaultA Input */
#define TCC_STATUS_FAULTAIN (0x1u << TCC_STATUS_FAULTAIN_Pos)
#define TCC_STATUS_FAULTBIN_Pos 9 /**< \brief (TCC_STATUS) Recoverable FaultB Input */
#define TCC_STATUS_FAULTBIN (0x1u << TCC_STATUS_FAULTBIN_Pos)
#define TCC_STATUS_FAULT0IN_Pos 10 /**< \brief (TCC_STATUS) Non-Recoverable Fault0 Input */
#define TCC_STATUS_FAULT0IN (0x1u << TCC_STATUS_FAULT0IN_Pos)
#define TCC_STATUS_FAULT1IN_Pos 11 /**< \brief (TCC_STATUS) Non-Recoverable Fault1 Input */
#define TCC_STATUS_FAULT1IN (0x1u << TCC_STATUS_FAULT1IN_Pos)
#define TCC_STATUS_FAULTA_Pos 12 /**< \brief (TCC_STATUS) Recoverable FaultA State */
#define TCC_STATUS_FAULTA (0x1u << TCC_STATUS_FAULTA_Pos)
#define TCC_STATUS_FAULTB_Pos 13 /**< \brief (TCC_STATUS) Recoverable FaultB State */
#define TCC_STATUS_FAULTB (0x1u << TCC_STATUS_FAULTB_Pos)
#define TCC_STATUS_FAULT0_Pos 14 /**< \brief (TCC_STATUS) Non-Recoverable Fault 0 State */
#define TCC_STATUS_FAULT0 (0x1u << TCC_STATUS_FAULT0_Pos)
#define TCC_STATUS_FAULT1_Pos 15 /**< \brief (TCC_STATUS) Non-Recoverable Fault 1 State */
#define TCC_STATUS_FAULT1 (0x1u << TCC_STATUS_FAULT1_Pos)
#define TCC_STATUS_CCBV0_Pos 16 /**< \brief (TCC_STATUS) Compare Channel 0 Buffer Valid */
#define TCC_STATUS_CCBV0 (1 << TCC_STATUS_CCBV0_Pos)
#define TCC_STATUS_CCBV1_Pos 17 /**< \brief (TCC_STATUS) Compare Channel 1 Buffer Valid */
#define TCC_STATUS_CCBV1 (1 << TCC_STATUS_CCBV1_Pos)
#define TCC_STATUS_CCBV2_Pos 18 /**< \brief (TCC_STATUS) Compare Channel 2 Buffer Valid */
#define TCC_STATUS_CCBV2 (1 << TCC_STATUS_CCBV2_Pos)
#define TCC_STATUS_CCBV3_Pos 19 /**< \brief (TCC_STATUS) Compare Channel 3 Buffer Valid */
#define TCC_STATUS_CCBV3 (1 << TCC_STATUS_CCBV3_Pos)
#define TCC_STATUS_CCBV_Pos 16 /**< \brief (TCC_STATUS) Compare Channel x Buffer Valid */
#define TCC_STATUS_CCBV_Msk (0xFu << TCC_STATUS_CCBV_Pos)
#define TCC_STATUS_CCBV(value) ((TCC_STATUS_CCBV_Msk & ((value) << TCC_STATUS_CCBV_Pos)))
#define TCC_STATUS_CMP0_Pos 24 /**< \brief (TCC_STATUS) Compare Channel 0 Value */
#define TCC_STATUS_CMP0 (1 << TCC_STATUS_CMP0_Pos)
#define TCC_STATUS_CMP1_Pos 25 /**< \brief (TCC_STATUS) Compare Channel 1 Value */
#define TCC_STATUS_CMP1 (1 << TCC_STATUS_CMP1_Pos)
#define TCC_STATUS_CMP2_Pos 26 /**< \brief (TCC_STATUS) Compare Channel 2 Value */
#define TCC_STATUS_CMP2 (1 << TCC_STATUS_CMP2_Pos)
#define TCC_STATUS_CMP3_Pos 27 /**< \brief (TCC_STATUS) Compare Channel 3 Value */
#define TCC_STATUS_CMP3 (1 << TCC_STATUS_CMP3_Pos)
#define TCC_STATUS_CMP_Pos 24 /**< \brief (TCC_STATUS) Compare Channel x Value */
#define TCC_STATUS_CMP_Msk (0xFu << TCC_STATUS_CMP_Pos)
#define TCC_STATUS_CMP(value) ((TCC_STATUS_CMP_Msk & ((value) << TCC_STATUS_CMP_Pos)))
#define TCC_STATUS_MASK 0x0F0FFFEBu /**< \brief (TCC_STATUS) MASK Register */
/* -------- TCC_COUNT : (TCC Offset: 0x34) (R/W 32) Count -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t COUNT:24; /*!< bit: 0..23 Count Value */
uint32_t :8; /*!< bit: 24..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_COUNT_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_COUNT_OFFSET 0x34 /**< \brief (TCC_COUNT offset) Count */
#define TCC_COUNT_RESETVALUE 0x00000000 /**< \brief (TCC_COUNT reset_value) Count */
#define TCC_COUNT_COUNT_Pos 0 /**< \brief (TCC_COUNT) Count Value */
#define TCC_COUNT_COUNT_Msk (0xFFFFFFu << TCC_COUNT_COUNT_Pos)
#define TCC_COUNT_COUNT(value) ((TCC_COUNT_COUNT_Msk & ((value) << TCC_COUNT_COUNT_Pos)))
#define TCC_COUNT_MASK 0x00FFFFFFu /**< \brief (TCC_COUNT) MASK Register */
/* -------- TCC_PATT : (TCC Offset: 0x38) (R/W 16) Pattern -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint16_t PGE0:1; /*!< bit: 0 Pattern Generator 0 Output Enable */
uint16_t PGE1:1; /*!< bit: 1 Pattern Generator 1 Output Enable */
uint16_t PGE2:1; /*!< bit: 2 Pattern Generator 2 Output Enable */
uint16_t PGE3:1; /*!< bit: 3 Pattern Generator 3 Output Enable */
uint16_t PGE4:1; /*!< bit: 4 Pattern Generator 4 Output Enable */
uint16_t PGE5:1; /*!< bit: 5 Pattern Generator 5 Output Enable */
uint16_t PGE6:1; /*!< bit: 6 Pattern Generator 6 Output Enable */
uint16_t PGE7:1; /*!< bit: 7 Pattern Generator 7 Output Enable */
uint16_t PGV0:1; /*!< bit: 8 Pattern Generator 0 Output Value */
uint16_t PGV1:1; /*!< bit: 9 Pattern Generator 1 Output Value */
uint16_t PGV2:1; /*!< bit: 10 Pattern Generator 2 Output Value */
uint16_t PGV3:1; /*!< bit: 11 Pattern Generator 3 Output Value */
uint16_t PGV4:1; /*!< bit: 12 Pattern Generator 4 Output Value */
uint16_t PGV5:1; /*!< bit: 13 Pattern Generator 5 Output Value */
uint16_t PGV6:1; /*!< bit: 14 Pattern Generator 6 Output Value */
uint16_t PGV7:1; /*!< bit: 15 Pattern Generator 7 Output Value */
} bit; /*!< Structure used for bit access */
struct {
uint16_t PGE:8; /*!< bit: 0.. 7 Pattern Generator x Output Enable */
uint16_t PGV:8; /*!< bit: 8..15 Pattern Generator x Output Value */
} vec; /*!< Structure used for vec access */
uint16_t reg; /*!< Type used for register access */
} TCC_PATT_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_PATT_OFFSET 0x38 /**< \brief (TCC_PATT offset) Pattern */
#define TCC_PATT_RESETVALUE 0x0000 /**< \brief (TCC_PATT reset_value) Pattern */
#define TCC_PATT_PGE0_Pos 0 /**< \brief (TCC_PATT) Pattern Generator 0 Output Enable */
#define TCC_PATT_PGE0 (1 << TCC_PATT_PGE0_Pos)
#define TCC_PATT_PGE1_Pos 1 /**< \brief (TCC_PATT) Pattern Generator 1 Output Enable */
#define TCC_PATT_PGE1 (1 << TCC_PATT_PGE1_Pos)
#define TCC_PATT_PGE2_Pos 2 /**< \brief (TCC_PATT) Pattern Generator 2 Output Enable */
#define TCC_PATT_PGE2 (1 << TCC_PATT_PGE2_Pos)
#define TCC_PATT_PGE3_Pos 3 /**< \brief (TCC_PATT) Pattern Generator 3 Output Enable */
#define TCC_PATT_PGE3 (1 << TCC_PATT_PGE3_Pos)
#define TCC_PATT_PGE4_Pos 4 /**< \brief (TCC_PATT) Pattern Generator 4 Output Enable */
#define TCC_PATT_PGE4 (1 << TCC_PATT_PGE4_Pos)
#define TCC_PATT_PGE5_Pos 5 /**< \brief (TCC_PATT) Pattern Generator 5 Output Enable */
#define TCC_PATT_PGE5 (1 << TCC_PATT_PGE5_Pos)
#define TCC_PATT_PGE6_Pos 6 /**< \brief (TCC_PATT) Pattern Generator 6 Output Enable */
#define TCC_PATT_PGE6 (1 << TCC_PATT_PGE6_Pos)
#define TCC_PATT_PGE7_Pos 7 /**< \brief (TCC_PATT) Pattern Generator 7 Output Enable */
#define TCC_PATT_PGE7 (1 << TCC_PATT_PGE7_Pos)
#define TCC_PATT_PGE_Pos 0 /**< \brief (TCC_PATT) Pattern Generator x Output Enable */
#define TCC_PATT_PGE_Msk (0xFFu << TCC_PATT_PGE_Pos)
#define TCC_PATT_PGE(value) ((TCC_PATT_PGE_Msk & ((value) << TCC_PATT_PGE_Pos)))
#define TCC_PATT_PGV0_Pos 8 /**< \brief (TCC_PATT) Pattern Generator 0 Output Value */
#define TCC_PATT_PGV0 (1 << TCC_PATT_PGV0_Pos)
#define TCC_PATT_PGV1_Pos 9 /**< \brief (TCC_PATT) Pattern Generator 1 Output Value */
#define TCC_PATT_PGV1 (1 << TCC_PATT_PGV1_Pos)
#define TCC_PATT_PGV2_Pos 10 /**< \brief (TCC_PATT) Pattern Generator 2 Output Value */
#define TCC_PATT_PGV2 (1 << TCC_PATT_PGV2_Pos)
#define TCC_PATT_PGV3_Pos 11 /**< \brief (TCC_PATT) Pattern Generator 3 Output Value */
#define TCC_PATT_PGV3 (1 << TCC_PATT_PGV3_Pos)
#define TCC_PATT_PGV4_Pos 12 /**< \brief (TCC_PATT) Pattern Generator 4 Output Value */
#define TCC_PATT_PGV4 (1 << TCC_PATT_PGV4_Pos)
#define TCC_PATT_PGV5_Pos 13 /**< \brief (TCC_PATT) Pattern Generator 5 Output Value */
#define TCC_PATT_PGV5 (1 << TCC_PATT_PGV5_Pos)
#define TCC_PATT_PGV6_Pos 14 /**< \brief (TCC_PATT) Pattern Generator 6 Output Value */
#define TCC_PATT_PGV6 (1 << TCC_PATT_PGV6_Pos)
#define TCC_PATT_PGV7_Pos 15 /**< \brief (TCC_PATT) Pattern Generator 7 Output Value */
#define TCC_PATT_PGV7 (1 << TCC_PATT_PGV7_Pos)
#define TCC_PATT_PGV_Pos 8 /**< \brief (TCC_PATT) Pattern Generator x Output Value */
#define TCC_PATT_PGV_Msk (0xFFu << TCC_PATT_PGV_Pos)
#define TCC_PATT_PGV(value) ((TCC_PATT_PGV_Msk & ((value) << TCC_PATT_PGV_Pos)))
#define TCC_PATT_MASK 0xFFFFu /**< \brief (TCC_PATT) MASK Register */
/* -------- TCC_WAVE : (TCC Offset: 0x3C) (R/W 32) Waveform Control -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t WAVEGEN:3; /*!< bit: 0.. 2 Waveform Generation */
uint32_t :1; /*!< bit: 3 Reserved */
uint32_t RAMP:2; /*!< bit: 4.. 5 Ramp Mode */
uint32_t :1; /*!< bit: 6 Reserved */
uint32_t CIPEREN:1; /*!< bit: 7 Circular period Enable */
uint32_t CICCEN0:1; /*!< bit: 8 Circular Channel 0 Enable */
uint32_t CICCEN1:1; /*!< bit: 9 Circular Channel 1 Enable */
uint32_t CICCEN2:1; /*!< bit: 10 Circular Channel 2 Enable */
uint32_t CICCEN3:1; /*!< bit: 11 Circular Channel 3 Enable */
uint32_t :4; /*!< bit: 12..15 Reserved */
uint32_t POL0:1; /*!< bit: 16 Channel 0 Polarity */
uint32_t POL1:1; /*!< bit: 17 Channel 1 Polarity */
uint32_t POL2:1; /*!< bit: 18 Channel 2 Polarity */
uint32_t POL3:1; /*!< bit: 19 Channel 3 Polarity */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t SWAP0:1; /*!< bit: 24 Swap DTI Output Pair 0 */
uint32_t SWAP1:1; /*!< bit: 25 Swap DTI Output Pair 1 */
uint32_t SWAP2:1; /*!< bit: 26 Swap DTI Output Pair 2 */
uint32_t SWAP3:1; /*!< bit: 27 Swap DTI Output Pair 3 */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :8; /*!< bit: 0.. 7 Reserved */
uint32_t CICCEN:4; /*!< bit: 8..11 Circular Channel x Enable */
uint32_t :4; /*!< bit: 12..15 Reserved */
uint32_t POL:4; /*!< bit: 16..19 Channel x Polarity */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t SWAP:4; /*!< bit: 24..27 Swap DTI Output Pair x */
uint32_t :4; /*!< bit: 28..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_WAVE_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_WAVE_OFFSET 0x3C /**< \brief (TCC_WAVE offset) Waveform Control */
#define TCC_WAVE_RESETVALUE 0x00000000 /**< \brief (TCC_WAVE reset_value) Waveform Control */
#define TCC_WAVE_WAVEGEN_Pos 0 /**< \brief (TCC_WAVE) Waveform Generation */
#define TCC_WAVE_WAVEGEN_Msk (0x7u << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN(value) ((TCC_WAVE_WAVEGEN_Msk & ((value) << TCC_WAVE_WAVEGEN_Pos)))
#define TCC_WAVE_WAVEGEN_NFRQ_Val 0x0u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_MFRQ_Val 0x1u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_NPWM_Val 0x2u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_DSCRITICAL_Val 0x4u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_DSBOTTOM_Val 0x5u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_DSBOTH_Val 0x6u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_DSTOP_Val 0x7u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_WAVEGEN_NFRQ (TCC_WAVE_WAVEGEN_NFRQ_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN_MFRQ (TCC_WAVE_WAVEGEN_MFRQ_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN_NPWM (TCC_WAVE_WAVEGEN_NPWM_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN_DSCRITICAL (TCC_WAVE_WAVEGEN_DSCRITICAL_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN_DSBOTTOM (TCC_WAVE_WAVEGEN_DSBOTTOM_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN_DSBOTH (TCC_WAVE_WAVEGEN_DSBOTH_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_WAVEGEN_DSTOP (TCC_WAVE_WAVEGEN_DSTOP_Val << TCC_WAVE_WAVEGEN_Pos)
#define TCC_WAVE_RAMP_Pos 4 /**< \brief (TCC_WAVE) Ramp Mode */
#define TCC_WAVE_RAMP_Msk (0x3u << TCC_WAVE_RAMP_Pos)
#define TCC_WAVE_RAMP(value) ((TCC_WAVE_RAMP_Msk & ((value) << TCC_WAVE_RAMP_Pos)))
#define TCC_WAVE_RAMP_RAMP1_Val 0x0u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_RAMP_RAMP2A_Val 0x1u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_RAMP_RAMP2_Val 0x2u /**< \brief (TCC_WAVE) */
#define TCC_WAVE_RAMP_RAMP1 (TCC_WAVE_RAMP_RAMP1_Val << TCC_WAVE_RAMP_Pos)
#define TCC_WAVE_RAMP_RAMP2A (TCC_WAVE_RAMP_RAMP2A_Val << TCC_WAVE_RAMP_Pos)
#define TCC_WAVE_RAMP_RAMP2 (TCC_WAVE_RAMP_RAMP2_Val << TCC_WAVE_RAMP_Pos)
#define TCC_WAVE_CIPEREN_Pos 7 /**< \brief (TCC_WAVE) Circular period Enable */
#define TCC_WAVE_CIPEREN (0x1u << TCC_WAVE_CIPEREN_Pos)
#define TCC_WAVE_CICCEN0_Pos 8 /**< \brief (TCC_WAVE) Circular Channel 0 Enable */
#define TCC_WAVE_CICCEN0 (1 << TCC_WAVE_CICCEN0_Pos)
#define TCC_WAVE_CICCEN1_Pos 9 /**< \brief (TCC_WAVE) Circular Channel 1 Enable */
#define TCC_WAVE_CICCEN1 (1 << TCC_WAVE_CICCEN1_Pos)
#define TCC_WAVE_CICCEN2_Pos 10 /**< \brief (TCC_WAVE) Circular Channel 2 Enable */
#define TCC_WAVE_CICCEN2 (1 << TCC_WAVE_CICCEN2_Pos)
#define TCC_WAVE_CICCEN3_Pos 11 /**< \brief (TCC_WAVE) Circular Channel 3 Enable */
#define TCC_WAVE_CICCEN3 (1 << TCC_WAVE_CICCEN3_Pos)
#define TCC_WAVE_CICCEN_Pos 8 /**< \brief (TCC_WAVE) Circular Channel x Enable */
#define TCC_WAVE_CICCEN_Msk (0xFu << TCC_WAVE_CICCEN_Pos)
#define TCC_WAVE_CICCEN(value) ((TCC_WAVE_CICCEN_Msk & ((value) << TCC_WAVE_CICCEN_Pos)))
#define TCC_WAVE_POL0_Pos 16 /**< \brief (TCC_WAVE) Channel 0 Polarity */
#define TCC_WAVE_POL0 (1 << TCC_WAVE_POL0_Pos)
#define TCC_WAVE_POL1_Pos 17 /**< \brief (TCC_WAVE) Channel 1 Polarity */
#define TCC_WAVE_POL1 (1 << TCC_WAVE_POL1_Pos)
#define TCC_WAVE_POL2_Pos 18 /**< \brief (TCC_WAVE) Channel 2 Polarity */
#define TCC_WAVE_POL2 (1 << TCC_WAVE_POL2_Pos)
#define TCC_WAVE_POL3_Pos 19 /**< \brief (TCC_WAVE) Channel 3 Polarity */
#define TCC_WAVE_POL3 (1 << TCC_WAVE_POL3_Pos)
#define TCC_WAVE_POL_Pos 16 /**< \brief (TCC_WAVE) Channel x Polarity */
#define TCC_WAVE_POL_Msk (0xFu << TCC_WAVE_POL_Pos)
#define TCC_WAVE_POL(value) ((TCC_WAVE_POL_Msk & ((value) << TCC_WAVE_POL_Pos)))
#define TCC_WAVE_SWAP0_Pos 24 /**< \brief (TCC_WAVE) Swap DTI Output Pair 0 */
#define TCC_WAVE_SWAP0 (1 << TCC_WAVE_SWAP0_Pos)
#define TCC_WAVE_SWAP1_Pos 25 /**< \brief (TCC_WAVE) Swap DTI Output Pair 1 */
#define TCC_WAVE_SWAP1 (1 << TCC_WAVE_SWAP1_Pos)
#define TCC_WAVE_SWAP2_Pos 26 /**< \brief (TCC_WAVE) Swap DTI Output Pair 2 */
#define TCC_WAVE_SWAP2 (1 << TCC_WAVE_SWAP2_Pos)
#define TCC_WAVE_SWAP3_Pos 27 /**< \brief (TCC_WAVE) Swap DTI Output Pair 3 */
#define TCC_WAVE_SWAP3 (1 << TCC_WAVE_SWAP3_Pos)
#define TCC_WAVE_SWAP_Pos 24 /**< \brief (TCC_WAVE) Swap DTI Output Pair x */
#define TCC_WAVE_SWAP_Msk (0xFu << TCC_WAVE_SWAP_Pos)
#define TCC_WAVE_SWAP(value) ((TCC_WAVE_SWAP_Msk & ((value) << TCC_WAVE_SWAP_Pos)))
#define TCC_WAVE_MASK 0x0F0F0FB7u /**< \brief (TCC_WAVE) MASK Register */
/* -------- TCC_PER : (TCC Offset: 0x40) (R/W 32) Period -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t PER:24; /*!< bit: 0..23 Period Value */
uint32_t :8; /*!< bit: 24..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_PER_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_PER_OFFSET 0x40 /**< \brief (TCC_PER offset) Period */
#define TCC_PER_RESETVALUE 0xFFFFFFFF /**< \brief (TCC_PER reset_value) Period */
#define TCC_PER_PER_Pos 0 /**< \brief (TCC_PER) Period Value */
#define TCC_PER_PER_Msk (0xFFFFFFu << TCC_PER_PER_Pos)
#define TCC_PER_PER(value) ((TCC_PER_PER_Msk & ((value) << TCC_PER_PER_Pos)))
#define TCC_PER_MASK 0x00FFFFFFu /**< \brief (TCC_PER) MASK Register */
/* -------- TCC_CC : (TCC Offset: 0x44) (R/W 32) Compare and Capture -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t CC:24; /*!< bit: 0..23 Compare and Capture value */
uint32_t :8; /*!< bit: 24..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_CC_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_CC_OFFSET 0x44 /**< \brief (TCC_CC offset) Compare and Capture */
#define TCC_CC_RESETVALUE 0x00000000 /**< \brief (TCC_CC reset_value) Compare and Capture */
#define TCC_CC_CC_Pos 0 /**< \brief (TCC_CC) Compare and Capture value */
#define TCC_CC_CC_Msk (0xFFFFFFu << TCC_CC_CC_Pos)
#define TCC_CC_CC(value) ((TCC_CC_CC_Msk & ((value) << TCC_CC_CC_Pos)))
#define TCC_CC_MASK 0x00FFFFFFu /**< \brief (TCC_CC) MASK Register */
/* -------- TCC_PATTB : (TCC Offset: 0x64) (R/W 16) Pattern Buffer -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint16_t PGEB0:1; /*!< bit: 0 Pattern Generator 0 Output Enable Buffer */
uint16_t PGEB1:1; /*!< bit: 1 Pattern Generator 1 Output Enable Buffer */
uint16_t PGEB2:1; /*!< bit: 2 Pattern Generator 2 Output Enable Buffer */
uint16_t PGEB3:1; /*!< bit: 3 Pattern Generator 3 Output Enable Buffer */
uint16_t PGEB4:1; /*!< bit: 4 Pattern Generator 4 Output Enable Buffer */
uint16_t PGEB5:1; /*!< bit: 5 Pattern Generator 5 Output Enable Buffer */
uint16_t PGEB6:1; /*!< bit: 6 Pattern Generator 6 Output Enable Buffer */
uint16_t PGEB7:1; /*!< bit: 7 Pattern Generator 7 Output Enable Buffer */
uint16_t PGVB0:1; /*!< bit: 8 Pattern Generator 0 Output Enable */
uint16_t PGVB1:1; /*!< bit: 9 Pattern Generator 1 Output Enable */
uint16_t PGVB2:1; /*!< bit: 10 Pattern Generator 2 Output Enable */
uint16_t PGVB3:1; /*!< bit: 11 Pattern Generator 3 Output Enable */
uint16_t PGVB4:1; /*!< bit: 12 Pattern Generator 4 Output Enable */
uint16_t PGVB5:1; /*!< bit: 13 Pattern Generator 5 Output Enable */
uint16_t PGVB6:1; /*!< bit: 14 Pattern Generator 6 Output Enable */
uint16_t PGVB7:1; /*!< bit: 15 Pattern Generator 7 Output Enable */
} bit; /*!< Structure used for bit access */
struct {
uint16_t PGEB:8; /*!< bit: 0.. 7 Pattern Generator x Output Enable Buffer */
uint16_t PGVB:8; /*!< bit: 8..15 Pattern Generator x Output Enable */
} vec; /*!< Structure used for vec access */
uint16_t reg; /*!< Type used for register access */
} TCC_PATTB_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_PATTB_OFFSET 0x64 /**< \brief (TCC_PATTB offset) Pattern Buffer */
#define TCC_PATTB_RESETVALUE 0x0000 /**< \brief (TCC_PATTB reset_value) Pattern Buffer */
#define TCC_PATTB_PGEB0_Pos 0 /**< \brief (TCC_PATTB) Pattern Generator 0 Output Enable Buffer */
#define TCC_PATTB_PGEB0 (1 << TCC_PATTB_PGEB0_Pos)
#define TCC_PATTB_PGEB1_Pos 1 /**< \brief (TCC_PATTB) Pattern Generator 1 Output Enable Buffer */
#define TCC_PATTB_PGEB1 (1 << TCC_PATTB_PGEB1_Pos)
#define TCC_PATTB_PGEB2_Pos 2 /**< \brief (TCC_PATTB) Pattern Generator 2 Output Enable Buffer */
#define TCC_PATTB_PGEB2 (1 << TCC_PATTB_PGEB2_Pos)
#define TCC_PATTB_PGEB3_Pos 3 /**< \brief (TCC_PATTB) Pattern Generator 3 Output Enable Buffer */
#define TCC_PATTB_PGEB3 (1 << TCC_PATTB_PGEB3_Pos)
#define TCC_PATTB_PGEB4_Pos 4 /**< \brief (TCC_PATTB) Pattern Generator 4 Output Enable Buffer */
#define TCC_PATTB_PGEB4 (1 << TCC_PATTB_PGEB4_Pos)
#define TCC_PATTB_PGEB5_Pos 5 /**< \brief (TCC_PATTB) Pattern Generator 5 Output Enable Buffer */
#define TCC_PATTB_PGEB5 (1 << TCC_PATTB_PGEB5_Pos)
#define TCC_PATTB_PGEB6_Pos 6 /**< \brief (TCC_PATTB) Pattern Generator 6 Output Enable Buffer */
#define TCC_PATTB_PGEB6 (1 << TCC_PATTB_PGEB6_Pos)
#define TCC_PATTB_PGEB7_Pos 7 /**< \brief (TCC_PATTB) Pattern Generator 7 Output Enable Buffer */
#define TCC_PATTB_PGEB7 (1 << TCC_PATTB_PGEB7_Pos)
#define TCC_PATTB_PGEB_Pos 0 /**< \brief (TCC_PATTB) Pattern Generator x Output Enable Buffer */
#define TCC_PATTB_PGEB_Msk (0xFFu << TCC_PATTB_PGEB_Pos)
#define TCC_PATTB_PGEB(value) ((TCC_PATTB_PGEB_Msk & ((value) << TCC_PATTB_PGEB_Pos)))
#define TCC_PATTB_PGVB0_Pos 8 /**< \brief (TCC_PATTB) Pattern Generator 0 Output Enable */
#define TCC_PATTB_PGVB0 (1 << TCC_PATTB_PGVB0_Pos)
#define TCC_PATTB_PGVB1_Pos 9 /**< \brief (TCC_PATTB) Pattern Generator 1 Output Enable */
#define TCC_PATTB_PGVB1 (1 << TCC_PATTB_PGVB1_Pos)
#define TCC_PATTB_PGVB2_Pos 10 /**< \brief (TCC_PATTB) Pattern Generator 2 Output Enable */
#define TCC_PATTB_PGVB2 (1 << TCC_PATTB_PGVB2_Pos)
#define TCC_PATTB_PGVB3_Pos 11 /**< \brief (TCC_PATTB) Pattern Generator 3 Output Enable */
#define TCC_PATTB_PGVB3 (1 << TCC_PATTB_PGVB3_Pos)
#define TCC_PATTB_PGVB4_Pos 12 /**< \brief (TCC_PATTB) Pattern Generator 4 Output Enable */
#define TCC_PATTB_PGVB4 (1 << TCC_PATTB_PGVB4_Pos)
#define TCC_PATTB_PGVB5_Pos 13 /**< \brief (TCC_PATTB) Pattern Generator 5 Output Enable */
#define TCC_PATTB_PGVB5 (1 << TCC_PATTB_PGVB5_Pos)
#define TCC_PATTB_PGVB6_Pos 14 /**< \brief (TCC_PATTB) Pattern Generator 6 Output Enable */
#define TCC_PATTB_PGVB6 (1 << TCC_PATTB_PGVB6_Pos)
#define TCC_PATTB_PGVB7_Pos 15 /**< \brief (TCC_PATTB) Pattern Generator 7 Output Enable */
#define TCC_PATTB_PGVB7 (1 << TCC_PATTB_PGVB7_Pos)
#define TCC_PATTB_PGVB_Pos 8 /**< \brief (TCC_PATTB) Pattern Generator x Output Enable */
#define TCC_PATTB_PGVB_Msk (0xFFu << TCC_PATTB_PGVB_Pos)
#define TCC_PATTB_PGVB(value) ((TCC_PATTB_PGVB_Msk & ((value) << TCC_PATTB_PGVB_Pos)))
#define TCC_PATTB_MASK 0xFFFFu /**< \brief (TCC_PATTB) MASK Register */
/* -------- TCC_WAVEB : (TCC Offset: 0x68) (R/W 32) Waveform Control Buffer -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t WAVEGENB:3; /*!< bit: 0.. 2 Waveform Generation Buffer */
uint32_t :1; /*!< bit: 3 Reserved */
uint32_t RAMPB:2; /*!< bit: 4.. 5 Ramp Mode Buffer */
uint32_t :1; /*!< bit: 6 Reserved */
uint32_t CIPERENB:1; /*!< bit: 7 Circular Period Enable Buffer */
uint32_t CICCENB0:1; /*!< bit: 8 Circular Channel 0 Enable Buffer */
uint32_t CICCENB1:1; /*!< bit: 9 Circular Channel 1 Enable Buffer */
uint32_t CICCENB2:1; /*!< bit: 10 Circular Channel 2 Enable Buffer */
uint32_t CICCENB3:1; /*!< bit: 11 Circular Channel 3 Enable Buffer */
uint32_t :4; /*!< bit: 12..15 Reserved */
uint32_t POLB0:1; /*!< bit: 16 Channel 0 Polarity Buffer */
uint32_t POLB1:1; /*!< bit: 17 Channel 1 Polarity Buffer */
uint32_t POLB2:1; /*!< bit: 18 Channel 2 Polarity Buffer */
uint32_t POLB3:1; /*!< bit: 19 Channel 3 Polarity Buffer */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t SWAPB0:1; /*!< bit: 24 Swap DTI Output Pair 0 Buffer */
uint32_t SWAPB1:1; /*!< bit: 25 Swap DTI Output Pair 1 Buffer */
uint32_t SWAPB2:1; /*!< bit: 26 Swap DTI Output Pair 2 Buffer */
uint32_t SWAPB3:1; /*!< bit: 27 Swap DTI Output Pair 3 Buffer */
uint32_t :4; /*!< bit: 28..31 Reserved */
} bit; /*!< Structure used for bit access */
struct {
uint32_t :8; /*!< bit: 0.. 7 Reserved */
uint32_t CICCENB:4; /*!< bit: 8..11 Circular Channel x Enable Buffer */
uint32_t :4; /*!< bit: 12..15 Reserved */
uint32_t POLB:4; /*!< bit: 16..19 Channel x Polarity Buffer */
uint32_t :4; /*!< bit: 20..23 Reserved */
uint32_t SWAPB:4; /*!< bit: 24..27 Swap DTI Output Pair x Buffer */
uint32_t :4; /*!< bit: 28..31 Reserved */
} vec; /*!< Structure used for vec access */
uint32_t reg; /*!< Type used for register access */
} TCC_WAVEB_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_WAVEB_OFFSET 0x68 /**< \brief (TCC_WAVEB offset) Waveform Control Buffer */
#define TCC_WAVEB_RESETVALUE 0x00000000 /**< \brief (TCC_WAVEB reset_value) Waveform Control Buffer */
#define TCC_WAVEB_WAVEGENB_Pos 0 /**< \brief (TCC_WAVEB) Waveform Generation Buffer */
#define TCC_WAVEB_WAVEGENB_Msk (0x7u << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB(value) ((TCC_WAVEB_WAVEGENB_Msk & ((value) << TCC_WAVEB_WAVEGENB_Pos)))
#define TCC_WAVEB_WAVEGENB_NFRQ_Val 0x0u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_MFRQ_Val 0x1u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_NPWM_Val 0x2u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_DSCRITICAL_Val 0x4u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_DSBOTTOM_Val 0x5u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_DSBOTH_Val 0x6u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_DSTOP_Val 0x7u /**< \brief (TCC_WAVEB) */
#define TCC_WAVEB_WAVEGENB_NFRQ (TCC_WAVEB_WAVEGENB_NFRQ_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB_MFRQ (TCC_WAVEB_WAVEGENB_MFRQ_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB_NPWM (TCC_WAVEB_WAVEGENB_NPWM_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB_DSCRITICAL (TCC_WAVEB_WAVEGENB_DSCRITICAL_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB_DSBOTTOM (TCC_WAVEB_WAVEGENB_DSBOTTOM_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB_DSBOTH (TCC_WAVEB_WAVEGENB_DSBOTH_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_WAVEGENB_DSTOP (TCC_WAVEB_WAVEGENB_DSTOP_Val << TCC_WAVEB_WAVEGENB_Pos)
#define TCC_WAVEB_RAMPB_Pos 4 /**< \brief (TCC_WAVEB) Ramp Mode Buffer */
#define TCC_WAVEB_RAMPB_Msk (0x3u << TCC_WAVEB_RAMPB_Pos)
#define TCC_WAVEB_RAMPB(value) ((TCC_WAVEB_RAMPB_Msk & ((value) << TCC_WAVEB_RAMPB_Pos)))
#define TCC_WAVEB_CIPERENB_Pos 7 /**< \brief (TCC_WAVEB) Circular Period Enable Buffer */
#define TCC_WAVEB_CIPERENB (0x1u << TCC_WAVEB_CIPERENB_Pos)
#define TCC_WAVEB_CICCENB0_Pos 8 /**< \brief (TCC_WAVEB) Circular Channel 0 Enable Buffer */
#define TCC_WAVEB_CICCENB0 (1 << TCC_WAVEB_CICCENB0_Pos)
#define TCC_WAVEB_CICCENB1_Pos 9 /**< \brief (TCC_WAVEB) Circular Channel 1 Enable Buffer */
#define TCC_WAVEB_CICCENB1 (1 << TCC_WAVEB_CICCENB1_Pos)
#define TCC_WAVEB_CICCENB2_Pos 10 /**< \brief (TCC_WAVEB) Circular Channel 2 Enable Buffer */
#define TCC_WAVEB_CICCENB2 (1 << TCC_WAVEB_CICCENB2_Pos)
#define TCC_WAVEB_CICCENB3_Pos 11 /**< \brief (TCC_WAVEB) Circular Channel 3 Enable Buffer */
#define TCC_WAVEB_CICCENB3 (1 << TCC_WAVEB_CICCENB3_Pos)
#define TCC_WAVEB_CICCENB_Pos 8 /**< \brief (TCC_WAVEB) Circular Channel x Enable Buffer */
#define TCC_WAVEB_CICCENB_Msk (0xFu << TCC_WAVEB_CICCENB_Pos)
#define TCC_WAVEB_CICCENB(value) ((TCC_WAVEB_CICCENB_Msk & ((value) << TCC_WAVEB_CICCENB_Pos)))
#define TCC_WAVEB_POLB0_Pos 16 /**< \brief (TCC_WAVEB) Channel 0 Polarity Buffer */
#define TCC_WAVEB_POLB0 (1 << TCC_WAVEB_POLB0_Pos)
#define TCC_WAVEB_POLB1_Pos 17 /**< \brief (TCC_WAVEB) Channel 1 Polarity Buffer */
#define TCC_WAVEB_POLB1 (1 << TCC_WAVEB_POLB1_Pos)
#define TCC_WAVEB_POLB2_Pos 18 /**< \brief (TCC_WAVEB) Channel 2 Polarity Buffer */
#define TCC_WAVEB_POLB2 (1 << TCC_WAVEB_POLB2_Pos)
#define TCC_WAVEB_POLB3_Pos 19 /**< \brief (TCC_WAVEB) Channel 3 Polarity Buffer */
#define TCC_WAVEB_POLB3 (1 << TCC_WAVEB_POLB3_Pos)
#define TCC_WAVEB_POLB_Pos 16 /**< \brief (TCC_WAVEB) Channel x Polarity Buffer */
#define TCC_WAVEB_POLB_Msk (0xFu << TCC_WAVEB_POLB_Pos)
#define TCC_WAVEB_POLB(value) ((TCC_WAVEB_POLB_Msk & ((value) << TCC_WAVEB_POLB_Pos)))
#define TCC_WAVEB_SWAPB0_Pos 24 /**< \brief (TCC_WAVEB) Swap DTI Output Pair 0 Buffer */
#define TCC_WAVEB_SWAPB0 (1 << TCC_WAVEB_SWAPB0_Pos)
#define TCC_WAVEB_SWAPB1_Pos 25 /**< \brief (TCC_WAVEB) Swap DTI Output Pair 1 Buffer */
#define TCC_WAVEB_SWAPB1 (1 << TCC_WAVEB_SWAPB1_Pos)
#define TCC_WAVEB_SWAPB2_Pos 26 /**< \brief (TCC_WAVEB) Swap DTI Output Pair 2 Buffer */
#define TCC_WAVEB_SWAPB2 (1 << TCC_WAVEB_SWAPB2_Pos)
#define TCC_WAVEB_SWAPB3_Pos 27 /**< \brief (TCC_WAVEB) Swap DTI Output Pair 3 Buffer */
#define TCC_WAVEB_SWAPB3 (1 << TCC_WAVEB_SWAPB3_Pos)
#define TCC_WAVEB_SWAPB_Pos 24 /**< \brief (TCC_WAVEB) Swap DTI Output Pair x Buffer */
#define TCC_WAVEB_SWAPB_Msk (0xFu << TCC_WAVEB_SWAPB_Pos)
#define TCC_WAVEB_SWAPB(value) ((TCC_WAVEB_SWAPB_Msk & ((value) << TCC_WAVEB_SWAPB_Pos)))
#define TCC_WAVEB_MASK 0x0F0F0FB7u /**< \brief (TCC_WAVEB) MASK Register */
/* -------- TCC_PERB : (TCC Offset: 0x6C) (R/W 32) Period Buffer -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t PERB:24; /*!< bit: 0..23 Period Value */
uint32_t :8; /*!< bit: 24..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_PERB_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_PERB_OFFSET 0x6C /**< \brief (TCC_PERB offset) Period Buffer */
#define TCC_PERB_RESETVALUE 0xFFFFFFFF /**< \brief (TCC_PERB reset_value) Period Buffer */
#define TCC_PERB_PERB_Pos 0 /**< \brief (TCC_PERB) Period Value */
#define TCC_PERB_PERB_Msk (0xFFFFFFu << TCC_PERB_PERB_Pos)
#define TCC_PERB_PERB(value) ((TCC_PERB_PERB_Msk & ((value) << TCC_PERB_PERB_Pos)))
#define TCC_PERB_MASK 0x00FFFFFFu /**< \brief (TCC_PERB) MASK Register */
/* -------- TCC_CCB : (TCC Offset: 0x70) (R/W 32) Compare and Capture Buffer -------- */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef union {
struct {
uint32_t CCB:24; /*!< bit: 0..23 Compare and Capture buffer value */
uint32_t :8; /*!< bit: 24..31 Reserved */
} bit; /*!< Structure used for bit access */
uint32_t reg; /*!< Type used for register access */
} TCC_CCB_Type;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
#define TCC_CCB_OFFSET 0x70 /**< \brief (TCC_CCB offset) Compare and Capture Buffer */
#define TCC_CCB_RESETVALUE 0x00000000 /**< \brief (TCC_CCB reset_value) Compare and Capture Buffer */
#define TCC_CCB_CCB_Pos 0 /**< \brief (TCC_CCB) Compare and Capture buffer value */
#define TCC_CCB_CCB_Msk (0xFFFFFFu << TCC_CCB_CCB_Pos)
#define TCC_CCB_CCB(value) ((TCC_CCB_CCB_Msk & ((value) << TCC_CCB_CCB_Pos)))
#define TCC_CCB_MASK 0x00FFFFFFu /**< \brief (TCC_CCB) MASK Register */
/** \brief TCC hardware registers */
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
typedef struct {
__IO TCC_CTRLA_Type CTRLA; /**< \brief Offset: 0x00 (R/W 32) Control A */
__IO TCC_CTRLBCLR_Type CTRLBCLR; /**< \brief Offset: 0x04 (R/W 8) Control B Clear */
__IO TCC_CTRLBSET_Type CTRLBSET; /**< \brief Offset: 0x05 (R/W 8) Control B Set */
RoReg8 Reserved1[0x2];
__I TCC_SYNCBUSY_Type SYNCBUSY; /**< \brief Offset: 0x08 (R/ 32) Synchronization Busy */
__IO TCC_FCTRLA_Type FCTRLA; /**< \brief Offset: 0x0C (R/W 32) Recoverable FaultA Configuration */
__IO TCC_FCTRLB_Type FCTRLB; /**< \brief Offset: 0x10 (R/W 32) Recoverable FaultB Configuration */
__IO TCC_WEXCTRL_Type WEXCTRL; /**< \brief Offset: 0x14 (R/W 32) Waveform Extension Configuration */
__IO TCC_DRVCTRL_Type DRVCTRL; /**< \brief Offset: 0x18 (R/W 32) Driver Configuration */
RoReg8 Reserved2[0x2];
__IO TCC_DBGCTRL_Type DBGCTRL; /**< \brief Offset: 0x1E (R/W 8) Debug Control */
RoReg8 Reserved3[0x1];
__IO TCC_EVCTRL_Type EVCTRL; /**< \brief Offset: 0x20 (R/W 32) Event Control */
__IO TCC_INTENCLR_Type INTENCLR; /**< \brief Offset: 0x24 (R/W 32) Interrupt Enable Clear */
__IO TCC_INTENSET_Type INTENSET; /**< \brief Offset: 0x28 (R/W 32) Interrupt Enable Set */
__IO TCC_INTFLAG_Type INTFLAG; /**< \brief Offset: 0x2C (R/W 32) Interrupt Flag Status and Clear */
__IO TCC_STATUS_Type STATUS; /**< \brief Offset: 0x30 (R/W 32) Status */
__IO TCC_COUNT_Type COUNT; /**< \brief Offset: 0x34 (R/W 32) Count */
__IO TCC_PATT_Type PATT; /**< \brief Offset: 0x38 (R/W 16) Pattern */
RoReg8 Reserved4[0x2];
__IO TCC_WAVE_Type WAVE; /**< \brief Offset: 0x3C (R/W 32) Waveform Control */
__IO TCC_PER_Type PER; /**< \brief Offset: 0x40 (R/W 32) Period */
__IO TCC_CC_Type CC[4]; /**< \brief Offset: 0x44 (R/W 32) Compare and Capture */
RoReg8 Reserved5[0x10];
__IO TCC_PATTB_Type PATTB; /**< \brief Offset: 0x64 (R/W 16) Pattern Buffer */
RoReg8 Reserved6[0x2];
__IO TCC_WAVEB_Type WAVEB; /**< \brief Offset: 0x68 (R/W 32) Waveform Control Buffer */
__IO TCC_PERB_Type PERB; /**< \brief Offset: 0x6C (R/W 32) Period Buffer */
__IO TCC_CCB_Type CCB[4]; /**< \brief Offset: 0x70 (R/W 32) Compare and Capture Buffer */
} Tcc;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
/*@}*/
#endif /* _SAMD21_TCC_COMPONENT_ */
|
export default {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 32,
height: 32,
},
content: [
{
elem: 'path',
attrs: { d: 'M4 21v2h3.586L2 28.586 3.414 30 9 24.414V28h2v-7H4z' },
},
{
elem: 'path',
attrs: {
d:
'M23.5 4h-15l-6 8-.784 1.045L6.877 19l1.324-1.526L5.19 14h5.112L16 29.527l14.284-16.482zm3.5 8h-5.446l-3.75-6H22.5zm-7.804 0h-6.392L16 6.887zM5 12l4.5-6h4.696l-3.75 6zm10.999 11.721L12.432 14h7.136zm1.944.51L21.698 14h5.112z',
},
},
],
name: 'model--reference',
size: 32,
};
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _CreateSvgIcon = require('./utils/CreateSvgIcon');
var _CreateSvgIcon2 = _interopRequireDefault(_CreateSvgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/* eslint react/prop-types: 0 */
var NavigationColored = function NavigationColored(props) {
return _react2.default.createElement(
_CreateSvgIcon2.default,
{
name: 'NavigationColored',
className: props.className,
color: props.color,
size: props.size,
viewBox: '0 0 512 512',
style: props.style
},
_react2.default.createElement('path', { fill: props.color || "#68544f", d: 'm256 0c-140.699219 0-256 115.300781-256 256s115.300781 256 256 256 256-115.300781 256-256-115.300781-256-256-256zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#53433f", d: 'm512 256c0 140.699219-115.300781 256-256 256v-512c140.699219 0 256 115.300781 256 256zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#ede9e8", d: 'm256 61c-107.402344 0-195 87.597656-195 195 0 107.398438 87.597656 195 195 195s195-87.601562 195-195c0-107.402344-87.597656-195-195-195zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#dfd7d5", d: 'm451 256c0 107.398438-87.597656 195-195 195v-390c107.402344 0 195 87.597656 195 195zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#68544f", d: 'm271 436v61c0 8.398438-6.597656 15-15 15s-15-6.601562-15-15v-61c0-8.402344 6.597656-15 15-15s15 6.597656 15 15zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#68544f", d: 'm271 15v61c0 8.398438-6.597656 15-15 15s-15-6.601562-15-15v-61c0-8.402344 6.597656-15 15-15s15 6.597656 15 15zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#53433f", d: 'm497 271h-61c-8.289062 0-15-6.710938-15-15s6.710938-15 15-15h61c8.289062 0 15 6.710938 15 15s-6.710938 15-15 15zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#68544f", d: 'm76 271h-61c-8.289062 0-15-6.710938-15-15s6.710938-15 15-15h61c8.289062 0 15 6.710938 15 15s-6.710938 15-15 15zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#53433f", d: 'm271 15v61c0 8.398438-6.597656 15-15 15v-91c8.402344 0 15 6.597656 15 15zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#53433f", d: 'm271 436v61c0 8.398438-6.597656 15-15 15v-91c8.402344 0 15 6.597656 15 15zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#70da40", d: 'm358.898438 339.398438-89.097657-207.898438c-2.402343-5.402344-8.101562-8.101562-13.800781-8.101562s-11.398438 2.699218-13.800781 8.101562l-89.097657 207.898438c-2.703124 5.699218-1.199218 12.300781 3.296876 16.800781 4.503906 4.199219 11.402343 5.398437 17.101562 2.402343l82.5-40.800781 82.5 40.800781c5.699219 2.996094 12.597656 1.796876 17.101562-2.402343 4.496094-4.5 6-11.101563 3.296876-16.800781zm0 0' }),
_react2.default.createElement('path', { fill: props.color || "#49a520", d: 'm355.601562 356.199219c-4.503906 4.199219-11.402343 5.398437-17.101562 2.402343l-82.5-40.800781v-194.402343c5.699219 0 11.398438 2.703124 13.800781 8.101562l89.097657 207.898438c2.703124 5.699218 1.199218 12.300781-3.296876 16.800781zm0 0' })
);
};
exports.default = NavigationColored;
|
//
// Created by Federico Pagnozzi on 28/11/14.
// Copyright (c) 2014 Federico Pagnozzi. All rights reserved.
// This file is distributed under the BSD 2-Clause License. See LICENSE.TXT
// for details.
#ifndef GENERALPARSER_H
#define GENERALPARSER_H
#include "emilibase.h"
/**
* All the classes that are involved in the parsing of the command line belongs to this namespace
*/
namespace prs
{
/**
* @brief emili_header
* This method prints the big emili at the beginning of the execution.
*/
void emili_header();
/**
* @brief info
* info prints the basic informations to use EMILI.
* It does not print specific informations about the AlgoBuilder,
* those have to be provided by the Algobuilder class.
*/
void info();
/**
* @brief check
* Checks that t is not nullptr otherwise
* it prints message and halts the execution.
* @param t
* @param message
*/
void check(char* t,const char* message);
/**
* @brief printTab
* printTab is used to print in a more readable fashion
* messages printed during the parsing process.
* when incrementTabLevel is called an additional \ t is added ad the beginning
* of the line before string. decrementTabLevel reduces the number of \ t
* displayed.
* @param string
*/
void printTab(const char* string);
/**
* @brief printTab
* printTabPlusOne is used to print messages one tabLevel more than printTab
* when incrementTabLevel is called an additional \ t is added ad the beginning
* of the line before string. decrementTabLevel reduces the number of \ t
* displayed.
* @param string
*/
void printTabPlusOne(const char* string);
int getTabLevel();
template <typename T >
void printTabPlusOne(const char* string,T value)
{
int tab_level = getTabLevel();
for(int i=0;i<=tab_level; i++)
{
std::cout << " ";
}
std::cout << string << " : " << value << std::endl;
}
/**
* @brief incrementTabLevel
* Increments the number of tabs added at the beginning of a line by printTab
*/
void incrementTabLevel();
/**
* @brief decrementTabLevel
* Decrements the number of tabs added at the beginning of a line by printTab
*/
void decrementTabLevel();
/**
* @brief The TokenManager class
* This class implements the Token Manager used
* to parse the arguments given to EMILI
*/
class TokenManager
{
protected:
/**
* @brief tokens
* The pointer to the command line arguments
*/
char** tokens;
/**
* @brief numberOfTokens
* Total number of tokens.
*/
int numberOfTokens;
/**
* @brief currentToken
* index to the current token.
*/
int currentToken;
/**
* @brief previousCurrentToken
* index to the previuos value assumed by currentToken
*/
int previousCurrentToken;
/**
* @brief empty
* returned in case there is no token
*/
char empty[3] = {'"',' ','"'};
public:
/**
* @brief TokenManager
* The constructor needs a pointer to the commandline arguments and the number of
* elements in the array.
* @param tokens
* Pointer to commandline arguments array.
* @param numberOfTokens
* Length of the array.
*/
TokenManager(char** tokens,int numberOfTokens):tokens(tokens),numberOfTokens(numberOfTokens),currentToken(0),previousCurrentToken(0) { }
/**
* @brief nextToken
* comsumes a token and return it
* @return
* char string representing the token
*/
char* nextToken();
/**
* @brief peek
* returns a token without consuming it
* @return
* char string representing the token
*/
char* peek();
/**
* @brief operator *
* Works like peek.
* returns a token without consuming it
* @return
* char string representing the token
*/
char* operator *();
/**
* @brief next
* consumes a token
* @return
* char string representing the token
*/
void next();
/**
* @brief operator ++
* consumes a token
* @return
* char string representing the token
*/
void operator ++(int);
/**
* @brief getInteger
*
* If current token can be parsed as an integer.
* otherwise it will show an error and end the execution.
* @return
* This method returns an integer
*/
int getInteger();
/**
* @brief getDecimal
* If current token can be parsed as a float.
* otherwise it will show an error and end the execution.
* @return
* This method returns a float
*/
float getDecimal();
/**
* @brief checkToken
* check if token matches currentToken
* if it does the token is consumed and it returns true.
* @param token
* @return
* true if token matches currentToken, false otherwise
*/
bool checkToken(std::string& token);
/**
* @brief checkToken
* check if token matches currentToken
* if it does the token is consumed and it returns true.
* @param token
* @return
* true if token matches currentToken, false otherwise
*/
bool checkToken(const char* );
/**
* @brief tokenAt
* @param i
* @return
* returns the token at position i
*/
char* tokenAt(int i);
/**
* @brief seek
* @return
* return the position of the string in the token list if present, -1 otherwise
*/
int seek(const char*);
/**
* @brief move
* moves the current token to position i and return true.
* @param i
* @return
* returns false if the position is less than zero or more than the number of tokens
*/
bool move(int i);
/**
* @brief restore
* restores the current token index to before the last move operation
*/
void restore();
/**
* @brief hasMoreTokens
* @return
* returns true if there are more tokens to parse
*/
bool hasMoreTokens();
};
/**
* @brief The AlgoBuilder class
* AlgoBuilder instantiate an algorithm starting from the parameters given at running time.
* This class ( and also GeneralParser ) is part of the old system for parsing the command line
* arguments. If you are implementing components for a new problem or planning to add other to
* an already imlpemented one you shoudl check the new system
* !these classes will be removed soon from the main branch!
*/
class AlgoBuilder
{
protected:
/**
* @brief availableProblems
* This method should return a string in which are listed all the problems supported
* @return
*/
virtual std::string availableProblems() const{ return std::string("Iamabstract!");}
public:
/**
* @brief isParsable
* This method should return true if the object is capable of building an algorithm
* to solve problem
* @param problem
* @return
*/
virtual bool isParsable(std::string& problem)=0 ;
/**
* @brief buildAlgo
* This method should return an algorithm ready to be run.
* The algorithm is built by parsing the configuration provided a run time
* and contained in tm.
* @param tm
* @return
*/
virtual emili::LocalSearch* buildAlgo(prs::TokenManager& tm) {return nullptr;}
/**
* @brief info
* this method should return a description of all the differents components
* that are supported by the object.
*/
virtual std::string info() {return std::string("Iamabstract!");}
/**
* @brief operator ==
* Overloading of the == operator for Algobuilder.
* By default two Algobuilder are considered equal if they support the same problem(s)
*/
virtual bool operator ==(const AlgoBuilder& b);
};
/**
* @brief The GeneralParser class
* This class as well as Algobuilder are part of the old system for parsing the command line
* arguments. If you are implementing components for a new problem or planning to add other to
* an already imlpemented one you shoudl check the new system
* !these classes will be removed soon from the main branch!
*/
class GeneralParser
{
protected:
std::vector< AlgoBuilder* > builders;
TokenManager tm;
public:
GeneralParser(char** tokens,int numberOfTokens):tm(tokens,numberOfTokens) { }
GeneralParser(TokenManager tokenmanager):tm(tokenmanager) { }
virtual emili::LocalSearch* parseParams();
virtual void registerBuilder(AlgoBuilder* builder);
virtual void removeBuilder(AlgoBuilder* builder);
};
/**
* TYPES TABLE
* In this "table" are defined the type values for the
* types defined in emilibase.h
*/
/** ---------------------------------------------------
* | Component | Value
---------------------------------------------------*/
#define COMPONENT_EMPTY 0xEE //This type model the empty symbol
#define COMPONENT_NULL 0xFF //Something was expected but nothing was found
#define COMPONENT_ALGORITHM 0xA0
#define COMPONENT_INITIAL_SOLUTION_GENERATOR 0xB1
#define COMPONENT_TERMINATION_CRITERION 0xB2
#define COMPONENT_NEIGHBORHOOD 0xB3
#define COMPONENT_TABU_TENURE 0xB4
#define COMPONENT_NEIGHBORHOOD_OR_EMPTY 0xBE
#define COMPONENT_PERTURBATION 0xC1
#define COMPONENT_ACCEPTANCE 0xC2
#define COMPONENT_SHAKE 0xC3
#define COMPONENT_NEIGHBORHOOD_CHANGE 0xC4
#define COMPONENT_PROBLEM 0x99 //Request to load a problem
/**
* @brief Component class
* A Component rapresent one of the parts in which the algorithm is divided.
* It can be of many types ( see types table ) from an Algorithm
* to a Termination criterion
*/
class Component
{
protected:
/**
* @brief type
* The type of the component (see types table)
---------------------------------------------------
| Component | Value
---------------------------------------------------
COMPONENT_EMPTY 0xEE This type model the empty symbol
COMPONENT_NULL 0xFF Something was expected but nothing was found
COMPONENT_ALGORITHM 0xA0
COMPONENT_INITIAL_SOLUTION_GENERATOR 0xB1
COMPONENT_TERMINATION_CRITERION 0xB2
COMPONENT_NEIGHBORHOOD 0xB3
COMPONENT_TABU_TENURE 0xB4
COMPONENT_NEIGHBORHOOD_OR_EMPTY 0xBE
COMPONENT_PERTURBATION 0xC1
COMPONENT_ACCEPTANCE 0xC2
COMPONENT_SHAKE 0xC3
COMPONENT_NEIGHBORHOOD_CHANGE 0xC4
*/
int type;
/** @brief rawComponent
* The pointer to the actual component
*/
void* rawComponent;
/** @brief token
* The string token that represents this component (not used)
*/
char* token;
public:
/**
* @brief Component
* Component constructor
* @param type
* The type of the Component
* @param rawData
* A void pointer that leads to an object of the type type
*/
Component(int type,void* rawData):type(type),rawComponent(rawData) { }
/**
* @brief Component
* The default component has type COMPONENT_NULL
*/
Component():type(COMPONENT_NULL),rawComponent(nullptr) { }
/**
* @brief operator =
* Component copy operator
* @param a
* The Component to copy
* @return
* The copy.
*/
virtual Component& operator=(const Component& a);
/**
* @brief is
* Check if the component is of type "type".
* @param type
* @return
* Return true if the component is of type "type".
*/
bool is(int type) {return this->type == type;}
/**
* @brief getType
* returns Component type
* @return
*/
int getType() {return type;}
/**
* @brief setType
* sets Component type
* @param type
*/
void setType(int type) {this->type = type;}
/**
* @brief setRawComponent
* set the pointer to rawComponent
* @param rawc
*/
void setRawComponent(void* rawc) {this->rawComponent = rawc;}
template<typename T>
/**
* @brief get
* Casts rawCompoent to type T
* @return
*/
T* get(){ return (T*) rawComponent;}
};
class GeneralParserE;
/**
* @brief The Builder class
* A Builder is a class that is capable of building components for
* one or more problems.
* The Builder gets enrolled in the parsing process if it is compatible with a date problem
* and the GeneralParser will ask the builder to load the instance if it's able to do it.
* All is required when extending this calss is the implementation of the method isCompatibleWith
* and the overloading of all the buildX methods for which it's able to build Components.
*/
class Builder
{
protected:
/**
* @brief gp
* The GeneralParserE instance
*/
GeneralParserE& gp;
/**
* @brief tm
* The TokenManager provided by GeneralParserE
*/
TokenManager& tm;
/**
* @brief retrieveComponent
* The method to call to retrieve a component when needed.
* @param type
* The type of the component to retrieve.
* See component documentation for the base types.
* @param allowEmpty
* If set to true it could return an empty component otherwise it will
* rise an error.
* @return
* A component of the type required ( if allowEmpty is true it could also
* be of type empty).
*/
virtual Component retrieveComponent(int type,bool allowEmpty=false);
/**
* @brief buildNeighborhoodVector
* This utility method builds a vector of 1 to n Neighborhood
* reading from the token manager.
* @return
* A vector of pointers to Neighborhood objects.
*/
virtual std::vector<emili::Neighborhood*> buildNeighborhoodVector();
/**
* @brief buildComponentVector
* This utility method builds a vector of 1 to n Components
* reading from the token manager.
* @return
* A vector of pointers to Components objects.
*/
template <class T>
std::vector<T*> buildComponentVector(int type);
public:
/**
* @brief Builder
* @param generalParser
* The general Parser object that will use this builder.
* @param tokenManager
* The tokenManager
*/
Builder(GeneralParserE& generalParser,TokenManager& tokenManager):gp(generalParser),tm(tokenManager) { }
/**
* @brief openInstance
* If the Builder returns true to conOpenInstance, General parser can call this
* method to load the instance from a file.
* If this method is implemented in a new Builder,
* canOpenInstance(char *problem_definition) also has to be redefined otherwise
* GeneralParserE will never call the method.
* @return
* A pointer to the problem instance object.
*/
virtual emili::Problem* openInstance() {return nullptr;}
/**
* @brief isCompatibleWith
* This method tells GeneralParserE if this builder is compatible with a problem.
*
* @param problem_definition
* A string or char pointer that represents the problem definition.
* @return
* true if the builder is compatible with the problem, false otherwise.
*/
virtual bool isCompatibleWith(char* problem_definition)=0;
/**
* @brief isCompatibleWith
* This method tells GeneralParserE if this builder is compatible with a problem.
* The implementation of this method is mandatory if your are exenting this class.
* @param problem_definition
* A string or char pointer that represents the problem definition.
* @return
* true if the builder is compatible with the problem, false otherwise.
*/
virtual bool isCompatibleWith(std::string& problem_definition) {return isCompatibleWith((char*)problem_definition.c_str());}
/**
* @brief canOpenInstance
* Tells GeneralParserE if the builder con load an instance of problem_definition
* @param problem_definition
* A string or char pointer that represents the problem definition.
* @return
* true if the builder can load the problem, false otherwise.
*/
virtual bool canOpenInstance(char* problem_definition) {return false;}
/**
* @brief canOpenInstance
* Tells GeneralParserE if the builder con load an instance of problem_definition.
* This method has to be overridden when extending this class with the code to properly
* check when a new defined Builder is compatible with a problem.
*
* @param problem_definition
* A string or char pointer that represents the problem definition.
* @return
* true if the builder can load the problem, false otherwise.
*/
virtual bool canOpenInstance(std::string& problem_definition) {return canOpenInstance((char*)problem_definition.c_str());}
/**
* @brief typeName
* Returns a string that describe type.
* This method should be modified in
* a class that extends Builder and uses components different from the base ones
* @param type
* @return
* A string representing type.
*/
virtual std::string typeName(int type){return std::string();}
/**
* @brief buildComponent
* Tries to build a component of type "type" using the token manager to
* retrieve the information about the specific class and parameters of the
* component.
* This method should be modified in a class that extends Builder and defines
* new component types.
* @param type
* @return
* A component of type "type" otherwise it returns a component of type COMPONENT_NULL
* if nothing is found or a component of type COMPONENT_EMPTY if the end of a rule
* that allows empty is reached.
*/
virtual Component buildComponent(int type);
/**
* @brief buildAlgo
* This method is called by buildComponent(type) if type is COMPONENT_ALGORITHM.
* @return
* a pointer to an object of type COMPONENT_ALGORITHM or, if nothing found, nullptr.
*/
virtual emili::LocalSearch* buildAlgo() {return nullptr;}
/**
* @brief buildInitialSolution
* This method is called by buildComponent(type) if type is COMPONENT_INITIAL_SOLUTION_GENERATOR.
* @return
* a pointer to an object of type COMPONENT_INITIAL_SOLUTION_GENERATOR or, if nothing found, nullptr.
*/
virtual emili::InitialSolution* buildInitialSolution(){return nullptr;}
/**
* @brief buildAlgo
* This method is called by buildComponent(type) if type is COMPONENT_NEIGHBORHOOD.
* @return
* a pointer to an object of type COMPONENT_NEIGHBORHOOD or, if nothing found, nullptr.
* At the end of a series of neighborhoods ( e.g. the specification of the neighborhoods
* used by a VNS) COMPONENT_EMPTY should be returned.
*/
virtual emili::Neighborhood* buildNeighborhood(){return nullptr;}
/**
* @brief buildAlgo
* This method is called by buildComponent(type) if type is COMPONENT_TERMINATION_CRITERION.
* @return
* a pointer to an object of type COMPONENT_TERMINATION_CRITERION or, if nothing found, nullptr.
*/
virtual emili::Termination* buildTermination(){return nullptr;}
/**
* @brief buildAlgo
* This method is called by buildComponent(type) if type is COMPONENT_PERTURBATION.
* @return
* a pointer to an object of type COMPONENT_PERTURBATION or, if nothing found, nullptr.
*/
virtual emili::Perturbation* buildPerturbation(){return nullptr;}
/**
* @brief buildAcceptance
* This method is called by buildComponent(type) if type is COMPONENT_ACCEPTANCE.
* @return
* a pointer to an object of type COMPONENT_ACCEPTANCE or, if nothing found, nullptr.
*/
virtual emili::Acceptance* buildAcceptance(){return nullptr;}
/**
* @brief buildShake
* This method is called by buildComponent(type) if type is COMPONENT_SHAKE.
* @return
* a pointer to an object of type COMPONENT_SHAKE or, if nothing found, nullptr.
*/
virtual emili::Shake* buildShake(){return nullptr;}
/**
* @brief buildNeighborhoodChange()
* This method is called by buildComponent(type) if type is COMPONENT_NEIGHBORHOOD_CHANGE.
* @return
* a pointer to an object of type COMPONENT_NEIGHBORHOOD_CHANGE or, if nothing found, nullptr.
*/
virtual emili::NeighborhoodChange* buildNeighborhoodChange(){return nullptr;}
/**
* @brief buildAlgo
* This method is called by buildComponent(type) if type is COMPONENT_TABU_TENURE.
* @return
* a pointer to an object of type COMPONENT_TABU_TENURE or, if nothing found, nullptr.
*/
virtual emili::TabuMemory* buildTabuTenure(){return nullptr;}
/**
* @brief buildProblem
* This method is called by buildComponent(type) if type is COMPONENT_PROBLEM.
* @return
* a pointer to an object of type COMPONENT_PROBLEM or, if nothing found, nullptr.
*/
virtual emili::Problem* buildProblem(){return nullptr;}
/**
* @brief cmd_info
* This method should return a string representing all the supported components
* as well as the syntax to call them (e.g. additional literal parameters)
*
* @return
* info string.
*/
virtual std::string cmd_info(){return std::string("");}
};
/**
* @brief The GeneralParserE class
* This class defines the object responsible for the parsing of the command line arguments.
* It reads the instance file path, the problem definition and selects the compatible builders
* and guides the parsing process by calling the builders when needed. At the end it reads and
* set up the running time limit, the random seed and the print flag.
*/
class GeneralParserE: public GeneralParser
{
protected:
/**
* @brief allbuilders
* All the builder available at the beginning of the execution
*/
std::vector< Builder* > allbuilders;
/**
* @brief activeBuilders
* This vector contains only the builders that are compatible with the problem definition.
*/
std::vector< Builder* > activeBuilders;
/**
* @brief instance
* The problem instance
*/
emili::Problem* instance;
/**
* @brief typeName
* Returns a string that describe type.
* @param type
* @return
*/
virtual std::string typeName(int type);
public:
/**
* @brief GeneralParserE
* This constructor uses the parameters to initialize a TokenManager
* @param tokens
* @param numberOfTokens
*/
GeneralParserE(char** tokens,int numberOfTokens):GeneralParser(tokens,numberOfTokens) { }
GeneralParserE(TokenManager tokenmanager):GeneralParser(tokenmanager) { }
/**
* @brief parseParams
* This method parse, builds and returns the algorithm represented by the
* command line arguments used to call emili.
* @return
* The algorithm ( if there is an error in the parsing the execution is terminated
* before the return)
*/
virtual emili::LocalSearch* parseParams();
/**
* @brief addBuilder
* Adds b to the vector of available builders
* @param b
*/
virtual void addBuilder(Builder* b);
/**
* @brief getInstance
* returns a pointer to the problem instance object.
* @return
*/
virtual emili::Problem* getInstance() {return instance;}
/**
* @brief setInstance
* Sets instance to ins.
* @param ins
*/
virtual void setInstance(emili::Problem* ins) {instance=ins;}
/**
* @brief getTokenManager
* @return
* returns a reference to the TokenManager.
*/
virtual TokenManager& getTokenManager() {return tm;}
/**
* @brief buildComponent
* This method cycles trough activeBuilders until it finds
* one Builder that loads a compoent of type "type" using the
* TokenManager.
* @param type
* @return
* The component requested otherwise it generates an error and halts the execution.
*
*/
virtual Component buildComponent(int type);
/**
* @brief fatalError
* Prints of stdout an error occured during the parsing.
* @param received_type
* @param expected_type
*/
virtual void fatalError(int received_type,int expected_type);
};
/**
* @brief The EmBaseBuilder class
* This class implements the Builder for the fundamental,
* problem indipendent components defined in emilibase.h.
*/
class EmBaseBuilder: public Builder
{
public:
/**
* @brief EmBaseBuilder
* EmBaseBuilder loads the base components described in emilibase.h
*/
EmBaseBuilder(GeneralParserE& generalParser,TokenManager& tokenManager):Builder(generalParser,tokenManager) { }
/**
* @brief isCompatibleWith
* This Builder is compatible with any problem,
* since all the components loaded are problem indipendent
* @return
* true
*/
virtual bool isCompatibleWith(char *problem_definition) { return true;}
virtual emili::InitialSolution* buildInitialSolution();
/**
* @brief buildAlgo
* This method will load emili::IteratedLocalSearch, emili::LocalSearch,
* emili::VNDSearch, emili::BestTabuSearch, emili::FirstTabuSearch and trigger the load of all the other
* components.
* @return
* an algorithm of type LocalSearch
*/
virtual emili::LocalSearch* buildAlgo();
/**
* @brief buildTermination
* This method will load emili::LocalMinimaTermination, emili::MaxStepsTermination,
* emili::WhileTrueTermination, emili::MaxStepsOrLocmin, emili::TimedTermination.
* @return
* a termination
*/
virtual emili::Termination* buildTermination();
/**
* @brief buildPerturbation
* This method will load emili::RandomMovePerturbation, emili::VNRandomMovePerturbation and
* emili::NoPerturbation.
* @return
* a Perturbation
*/
virtual emili::Perturbation* buildPerturbation();
/**
* @brief buildAcceptance
* This method will load the acceptance criteria: emili::ImproveAccept, emili::AlwaysAccept,
* emili::Metropolis, emili::AcceptPlateau.
* @return
*/
virtual emili::Acceptance* buildAcceptance();
/**
* @brief buildNeighborhood
* This method will load the general neighborhood emili::RandomConstructiveHeuristicNeighborhood *
* @return
* a neighborhood
*/
virtual emili::Neighborhood* buildNeighborhood();
/**
* @brief buildShake
* @return
* a Shake operator to be used in a VNS algorithm
*/
virtual emili::Shake* buildShake();
/**
* @brief buildNeighborhoodChange
* @return
* a NeighborhoodChange operator to be used in a VNS algorithm
*/
virtual emili::NeighborhoodChange* buildNeighborhoodChange();
};
}
#endif
|
var margin = {top: 53, right: 20, bottom: 30, left: 40},
width = 400 - margin.left - margin.right,
height = 285 - margin.top - margin.bottom;
var x = d3.scale.ordinal()
.rangeRoundBands([0, width], .1, 1);
var y = d3.scale.linear()
.range([height, 0]);
var xAxis = d3.svg.axis()
.scale(x)
.orient("bottom");
var yAxis = d3.svg.axis()
.scale(y)
.orient("left")
function run() {
var svg = d3.select("#sortablebarchart2").append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
var e = document.getElementById("category8");
var e1 = document.getElementById("category9");
var category = e.options[e.selectedIndex].value;
var player = e1.options[e1.selectedIndex].value;
d3.tsv("/data/steelers/passing.txt", function(error, data) {
data.forEach(function(d) {
if (d.name==player) {
if (category == "cmp") d.frequency =+ d.cmp;
else if (category == "att") d.frequency =+ d.att;
else if (category == "yds") d.frequency =+ d.yds;
else if (category == "td") d.frequency =+ d.td;
else if (category == "int") d.frequency =+ d.int;
}
else
d.frequency = 0;
});
x.domain(data.map(function(d) { return d.week; }));
y.domain([0, d3.max(data, function(d) { return d.frequency; })]);
svg.append("g")
.attr("class", "x axis")
.attr("fill", "white")
.attr("transform", "translate(0," + height + ")")
.call(xAxis)
.append("text")
.attr("x", 20)
.attr("y", 15)
.attr("dx", ".71em")
.attr("fill", "white")
.style("text-anchor", "end")
.text("week");
svg.append("g")
.attr("class", "y axis")
.attr("fill", "white")
.call(yAxis)
.append("text")
.attr("transform", "rotate(-90)")
.attr("y", 6)
.attr("dy", ".71em")
.style("text-anchor", "end")
svg.selectAll(".bar")
.data(data)
.enter().append("rect")
.attr("class", "bar")
.attr("x", function(d) { return x(d.week); })
.attr("width", x.rangeBand())
.attr("y", function(d) { return y(d.frequency); })
.attr("height", function(d) { return height - y(d.frequency); });
});
}
run();
function updateData2() {
var savedData = [];
var count = 0;
var e = document.getElementById("category8");
var e1 = document.getElementById("category9");
var category = e.options[e.selectedIndex].value;
var player = e1.options[e1.selectedIndex].value;
d3.tsv("/data/steelers/passing.txt", function(error, data) {
data.forEach(function(d) {
if (d.name==player) {
if (category == "cmp") {
d.frequency =+ d.cmp;
savedData[count++] = d.frequency;
}
else if (category == "att") {
d.frequency =+ d.att;
savedData[count++] = d.frequency;
}
else if (category == "yds") {
d.frequency =+ d.yds;
savedData[count++] = d.frequency;
}
else if (category == "td") {
d.frequency =+ d.td;
savedData[count++] = d.frequency;
}
else if (category == "int") {
d.frequency =+ d.int;
savedData[count++] = d.frequency;
}
}
else
d.frequency = 0;
});
x.domain(data.map(function(d) { return d.week; }));
y.domain([0, d3.max(data, function(d) { return d.frequency; })]);
var svg = d3.select("#sortablebarchart2").transition();
count = 0;
svg.selectAll(".bar")
.attr("x", function(d) { return x(d.week); })
.attr("width", x.rangeBand())
.attr("y", function(d) {
if(d.name==player){
d.frequency =+ savedData[count++];
return y(d.frequency);
}
})
.attr("height", function(d) {
if(d.name==player){
return height - y(d.frequency);
}
});
svg.select(".x.axis")
.duration(750)
.call(xAxis);
svg.select(".y.axis")
.duration(750)
.call(yAxis);
});
}
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google import auth
from google.api_core import client_options
from google.api_core import exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.auth import credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.asset_v1p4beta1.services.asset_service import AssetServiceAsyncClient
from google.cloud.asset_v1p4beta1.services.asset_service import AssetServiceClient
from google.cloud.asset_v1p4beta1.services.asset_service import transports
from google.cloud.asset_v1p4beta1.types import asset_service
from google.cloud.asset_v1p4beta1.types import assets
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import duration_pb2 as duration # type: ignore
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert AssetServiceClient._get_default_mtls_endpoint(None) is None
assert (
AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [AssetServiceClient, AssetServiceAsyncClient])
def test_asset_service_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client._transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
assert client._transport._credentials == creds
assert client._transport._host == "cloudasset.googleapis.com:443"
def test_asset_service_client_get_transport_class():
transport = AssetServiceClient.get_transport_class()
assert transport == transports.AssetServiceGrpcTransport
transport = AssetServiceClient.get_transport_class("grpc")
assert transport == transports.AssetServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"),
(
AssetServiceAsyncClient,
transports.AssetServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)
)
@mock.patch.object(
AssetServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(AssetServiceAsyncClient),
)
def test_asset_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(AssetServiceClient, "get_transport_class") as gtc:
transport = transport_class(credentials=credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(AssetServiceClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
ssl_channel_credentials=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"),
(
AssetServiceAsyncClient,
transports.AssetServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"),
(
AssetServiceAsyncClient,
transports.AssetServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)
)
@mock.patch.object(
AssetServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(AssetServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_asset_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
ssl_channel_creds = mock.Mock()
with mock.patch(
"grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_ssl_channel_creds = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_ssl_channel_creds = ssl_channel_creds
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
with mock.patch(
"google.auth.transport.grpc.SslCredentials.is_mtls",
new_callable=mock.PropertyMock,
) as is_mtls_mock:
with mock.patch(
"google.auth.transport.grpc.SslCredentials.ssl_credentials",
new_callable=mock.PropertyMock,
) as ssl_credentials_mock:
if use_client_cert_env == "false":
is_mtls_mock.return_value = False
ssl_credentials_mock.return_value = None
expected_host = client.DEFAULT_ENDPOINT
expected_ssl_channel_creds = None
else:
is_mtls_mock.return_value = True
ssl_credentials_mock.return_value = mock.Mock()
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_ssl_channel_creds = (
ssl_credentials_mock.return_value
)
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
with mock.patch(
"google.auth.transport.grpc.SslCredentials.is_mtls",
new_callable=mock.PropertyMock,
) as is_mtls_mock:
is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"),
(
AssetServiceAsyncClient,
transports.AssetServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_asset_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"),
(
AssetServiceAsyncClient,
transports.AssetServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_asset_service_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_asset_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.asset_v1p4beta1.services.asset_service.transports.AssetServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = AssetServiceClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_analyze_iam_policy(
transport: str = "grpc", request_type=asset_service.AnalyzeIamPolicyRequest
):
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.analyze_iam_policy), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = asset_service.AnalyzeIamPolicyResponse(fully_explored=True,)
response = client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.AnalyzeIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.AnalyzeIamPolicyResponse)
assert response.fully_explored is True
def test_analyze_iam_policy_from_dict():
test_analyze_iam_policy(request_type=dict)
@pytest.mark.asyncio
async def test_analyze_iam_policy_async(transport: str = "grpc_asyncio"):
client = AssetServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = asset_service.AnalyzeIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.analyze_iam_policy), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
asset_service.AnalyzeIamPolicyResponse(fully_explored=True,)
)
response = await client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, asset_service.AnalyzeIamPolicyResponse)
assert response.fully_explored is True
def test_analyze_iam_policy_field_headers():
client = AssetServiceClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.AnalyzeIamPolicyRequest()
request.analysis_query.parent = "analysis_query.parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.analyze_iam_policy), "__call__"
) as call:
call.return_value = asset_service.AnalyzeIamPolicyResponse()
client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"analysis_query.parent=analysis_query.parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_analyze_iam_policy_field_headers_async():
client = AssetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.AnalyzeIamPolicyRequest()
request.analysis_query.parent = "analysis_query.parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.analyze_iam_policy), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
asset_service.AnalyzeIamPolicyResponse()
)
await client.analyze_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"analysis_query.parent=analysis_query.parent/value",
) in kw["metadata"]
def test_export_iam_policy_analysis(
transport: str = "grpc", request_type=asset_service.ExportIamPolicyAnalysisRequest
):
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.export_iam_policy_analysis), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.export_iam_policy_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == asset_service.ExportIamPolicyAnalysisRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_export_iam_policy_analysis_from_dict():
test_export_iam_policy_analysis(request_type=dict)
@pytest.mark.asyncio
async def test_export_iam_policy_analysis_async(transport: str = "grpc_asyncio"):
client = AssetServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = asset_service.ExportIamPolicyAnalysisRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.export_iam_policy_analysis), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.export_iam_policy_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_export_iam_policy_analysis_field_headers():
client = AssetServiceClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ExportIamPolicyAnalysisRequest()
request.analysis_query.parent = "analysis_query.parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.export_iam_policy_analysis), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.export_iam_policy_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"analysis_query.parent=analysis_query.parent/value",
) in kw["metadata"]
@pytest.mark.asyncio
async def test_export_iam_policy_analysis_field_headers_async():
client = AssetServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = asset_service.ExportIamPolicyAnalysisRequest()
request.analysis_query.parent = "analysis_query.parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.export_iam_policy_analysis), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.export_iam_policy_analysis(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
"x-goog-request-params",
"analysis_query.parent=analysis_query.parent/value",
) in kw["metadata"]
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AssetServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = AssetServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
client = AssetServiceClient(transport=transport)
assert client._transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.AssetServiceGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.AssetServiceGrpcAsyncIOTransport(
credentials=credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = AssetServiceClient(credentials=credentials.AnonymousCredentials(),)
assert isinstance(client._transport, transports.AssetServiceGrpcTransport,)
def test_asset_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(exceptions.DuplicateCredentialArgs):
transport = transports.AssetServiceTransport(
credentials=credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_asset_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.asset_v1p4beta1.services.asset_service.transports.AssetServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.AssetServiceTransport(
credentials=credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"analyze_iam_policy",
"export_iam_policy_analysis",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
def test_asset_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
auth, "load_credentials_from_file"
) as load_creds, mock.patch(
"google.cloud.asset_v1p4beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.AssetServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_asset_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(auth, "default") as adc, mock.patch(
"google.cloud.asset_v1p4beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.AssetServiceTransport()
adc.assert_called_once()
def test_asset_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
AssetServiceClient()
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
def test_asset_service_transport_auth_adc():
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
transports.AssetServiceGrpcTransport(
host="squid.clam.whelk", quota_project_id="octopus"
)
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_asset_service_host_no_port():
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="cloudasset.googleapis.com"
),
)
assert client._transport._host == "cloudasset.googleapis.com:443"
def test_asset_service_host_with_port():
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="cloudasset.googleapis.com:8000"
),
)
assert client._transport._host == "cloudasset.googleapis.com:8000"
def test_asset_service_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
# Check that channel is used if provided.
transport = transports.AssetServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
def test_asset_service_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
# Check that channel is used if provided.
transport = transports.AssetServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
@pytest.mark.parametrize(
"transport_class",
[transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport],
)
def test_asset_service_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel", autospec=True
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=("https://www.googleapis.com/auth/cloud-platform",),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@pytest.mark.parametrize(
"transport_class",
[transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport],
)
def test_asset_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel", autospec=True
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=("https://www.googleapis.com/auth/cloud-platform",),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
def test_asset_service_grpc_lro_client():
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(), transport="grpc",
)
transport = client._transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_asset_service_grpc_lro_async_client():
client = AssetServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client._client._transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.AssetServiceTransport, "_prep_wrapped_messages"
) as prep:
client = AssetServiceClient(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.AssetServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = AssetServiceClient.get_transport_class()
transport = transport_class(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
|
module.exports = {
testURL: 'http://localhost/',
testEnvironment: 'jsdom',
reporters: ['default', ['jest-junit', { output: './coverage/tests-report.xml' }]],
preset: "ts-jest",
testEnvironment: "jsdom",
testPathIgnorePatterns: [
"dist/*"
],
moduleDirectories: [
'node_modules',
'./'
],
moduleNameMapper: {
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': 'jest-transform-stub',
'\\.(css|less|scss|sass)$': 'identity-obj-proxy',
'\\@/(.*)$': "<rootDir>/src/$1",
},
collectCoverageFrom: [
'<rootDir>/src/**/*.{ts,tsx}',
'!<rootDir>/src/index.ts',
'!<rootDir>/src/ts/**/*.ts',
'!<rootDir>/src/constants/**/*.{ts,tsx}',
'!<rootDir>/src/__tests__/**/*.{ts,tsx}',
'!<rootDir>/dist/__tests__/**/*.{ts,tsx}',
'!<rootDir>/src/stories/**/*.{ts,tsx}',
],
};
|
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const configParser_1 = require("./configParser");
const logger_1 = require("./logger");
let logger = new logger_1.Logger('plugins');
/**
* The plugin API for Protractor. Note that this API is unstable. See
* plugins/README.md for more information.
*
* @constructor
* @param {Object} config parsed from the config file
*/
class Plugins {
constructor(config) {
/**
* @see docs/plugins.md#writing-plugins for information on these functions
*/
this.setup = this.pluginFunFactory('setup');
this.onPrepare = this.pluginFunFactory('onPrepare');
this.teardown = this.pluginFunFactory('teardown');
this.postResults = this.pluginFunFactory('postResults');
this.postTest = this.pluginFunFactory('postTest');
this.onPageLoad = this.pluginFunFactory('onPageLoad');
this.onPageStable = this.pluginFunFactory('onPageStable');
this.waitForPromise = this.pluginFunFactory('waitForPromise');
this.waitForCondition = this.pluginFunFactory('waitForCondition', true);
this.pluginObjs = [];
this.assertions = {};
this.resultsReported = false;
if (config.plugins) {
config.plugins.forEach((pluginConf, i) => {
let path;
if (pluginConf.path) {
path = configParser_1.ConfigParser.resolveFilePatterns(pluginConf.path, true, config.configDir)[0];
if (!path) {
throw new Error('Invalid path to plugin: ' + pluginConf.path);
}
}
else {
path = pluginConf.package;
}
let pluginObj;
if (path) {
pluginObj = require(path);
}
else if (pluginConf.inline) {
pluginObj = pluginConf.inline;
}
else {
throw new Error('Plugin configuration did not contain a valid path or ' +
'inline definition.');
}
this.annotatePluginObj(pluginObj, pluginConf, i);
logger.debug('Plugin "' + pluginObj.name + '" loaded.');
this.pluginObjs.push(pluginObj);
});
}
}
/**
* Adds properties to a plugin's object
*
* @see docs/plugins.md#provided-properties-and-functions
*/
annotatePluginObj(obj, conf, i) {
let addAssertion = (info, passed, message) => {
if (this.resultsReported) {
throw new Error('Cannot add new tests results, since they were already ' +
'reported.');
}
info = info || {};
const specName = info.specName || (obj.name + ' Plugin Tests');
const assertion = { passed: passed };
if (!passed) {
assertion.errorMsg = message;
if (info.stackTrace) {
assertion.stackTrace = info.stackTrace;
}
}
this.assertions[specName] = this.assertions[specName] || [];
this.assertions[specName].push(assertion);
};
obj.name = obj.name || conf.name || conf.path || conf.package || ('Plugin #' + i);
obj.config = conf;
obj.addFailure = (message, info) => {
addAssertion(info, false, message);
};
obj.addSuccess = (options) => {
addAssertion(options, true);
};
obj.addWarning = (message, options) => {
options = options || {};
logger.warn('Warning ' +
(options.specName ? 'in ' + options.specName : 'from "' + obj.name + '" plugin') + ': ' +
message);
};
}
printPluginResults(specResults) {
const green = '\x1b[32m';
const red = '\x1b[31m';
const normalColor = '\x1b[39m';
const printResult = (message, pass) => {
logger.info(pass ? green : red, '\t', pass ? 'Pass: ' : 'Fail: ', message, normalColor);
};
for (const specResult of specResults) {
const passed = specResult.assertions.map(x => x.passed).reduce((x, y) => (x && y), true);
printResult(specResult.description, passed);
if (!passed) {
for (const assertion of specResult.assertions) {
if (!assertion.passed) {
logger.error('\t\t' + assertion.errorMsg);
if (assertion.stackTrace) {
logger.error('\t\t' + assertion.stackTrace.replace(/\n/g, '\n\t\t'));
}
}
}
}
}
}
/**
* Gets the tests results generated by any plugins
*
* @see lib/frameworks/README.md#requirements for a complete description of what
* the results object must look like
*
* @return {Object} The results object
*/
getResults() {
const results = { failedCount: 0, specResults: [] };
for (const specName in this.assertions) {
results.specResults.push({ description: specName, assertions: this.assertions[specName] });
results.failedCount +=
this.assertions[specName].filter(assertion => !assertion.passed).length;
}
this.printPluginResults(results.specResults);
this.resultsReported = true;
return results;
}
/**
* Returns true if any loaded plugin has skipAngularStability enabled.
*
* @return {boolean}
*/
skipAngularStability() {
const result = this.pluginObjs.some(pluginObj => pluginObj.skipAngularStability);
return result;
}
/**
* Calls a function from a plugin safely. If the plugin's function throws an
* exception or returns a rejected promise, that failure will be logged as a
* failed test result instead of crashing protractor. If the tests results have
* already been reported, the failure will be logged to the console.
*
* @param {Object} pluginObj The plugin object containing the function to be run
* @param {string} funName The name of the function we want to run
* @param {*[]} args The arguments we want to invoke the function with
* @param {boolean} resultsReported If the results have already been reported
* @param {*} failReturnVal The value to return if the function fails
*
* @return {Promise} A promise which resolves to the
* function's return value
*/
safeCallPluginFun(pluginObj, funName, args, failReturnVal) {
const resolver = (done) => __awaiter(this, void 0, void 0, function* () {
const logError = (e) => {
if (this.resultsReported) {
this.printPluginResults([{
description: pluginObj.name + ' Runtime',
assertions: [{
passed: false,
errorMsg: 'Failure during ' + funName + ': ' + (e.message || e),
stackTrace: e.stack
}]
}]);
}
else {
pluginObj.addFailure('Failure during ' + funName + ': ' + e.message || e, { stackTrace: e.stack });
}
done(failReturnVal);
};
try {
const result = yield pluginObj[funName].apply(pluginObj, args);
done(result);
}
catch (e) {
logError(e);
}
});
return new Promise(resolver);
}
pluginFunFactory(funName, failReturnVal) {
return (...args) => {
const promises = this.pluginObjs.filter(pluginObj => typeof pluginObj[funName] === 'function')
.map(pluginObj => this.safeCallPluginFun(pluginObj, funName, args, failReturnVal));
return Promise.all(promises);
};
}
}
exports.Plugins = Plugins;
//# sourceMappingURL=plugins.js.map
|
import logging
import traceback
from dexbot.ui import translate_error
from .ui.error_dialog_ui import Ui_Dialog
from dexbot.qt_queue.idle_queue import idle_add
from PyQt5 import QtWidgets, QtCore
class PyQtHandler(logging.Handler):
"""
Logging handler for Py Qt events.
Based on Vinay Sajip's DBHandler class (http://www.red-dove.com/python_logging.html)
"""
def __init__(self):
logging.Handler.__init__(self)
self.info_handler = None
def emit(self, record):
# Use default formatting:
self.format(record)
message = record.msg
if record.levelno > logging.WARNING:
extra = translate_error(message)
if record.exc_info:
if not extra:
extra = translate_error(repr(record.exc_info[1]))
detail = logging._defaultFormatter.formatException(record.exc_info)
else:
detail = None
if hasattr(record, "worker_name"):
title = "Error on {}".format(record.worker_name)
else:
title = "DEXBot Error"
idle_add(show_dialog, title, message, extra, detail)
else:
if self.info_handler and hasattr(record, "worker_name"):
idle_add(self.info_handler, record.worker_name, record.levelno, message)
def set_info_handler(self, info_handler):
self.info_handler = info_handler
class ErrorDialog(QtWidgets.QDialog, Ui_Dialog):
def __init__(self, title, message, extra=None, detail=None):
super().__init__()
self.setupUi(self)
self.resize(400, 1)
self.setWindowFlags(self.windowFlags() | QtCore.Qt.WindowCloseButtonHint | QtCore.Qt.WindowMinimizeButtonHint)
self.setWindowTitle('DEXBot - {}'.format(title))
self.message_label.setText(message)
self.hide_details.hide()
self.detail_box.hide()
if extra:
self.extra_label.setText(extra)
if detail:
self.detail_box.setText(detail)
else:
self.show_details.hide()
# Button actions
self.hide_details.clicked.connect(lambda: self.hide_details_func())
self.show_details.clicked.connect(lambda: self.show_details_func())
self.ok_button.clicked.connect(lambda: self.accept())
def show_details_func(self):
self.detail_box.show()
self.show_details.hide()
self.hide_details.show()
self.vertical_spacer.hide()
self.resize(self.geometry().width(), 300)
def hide_details_func(self):
self.detail_box.hide()
self.hide_details.hide()
self.show_details.show()
self.vertical_spacer.show()
self.resize(self.geometry().width(), 1)
def gui_error(func):
""" A decorator for GUI handler functions - traps all exceptions and displays the dialog
"""
def func_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except BaseException as exc:
show_dialog("DEXBot Error", "An error occurred with DEXBot: \n"+repr(exc), None, traceback.format_exc())
return func_wrapper
def show_dialog(title, message, extra=None, detail=None):
error_dialog = ErrorDialog(title, message, extra, detail)
error_dialog.exec_()
|
import ast
import io
import textwrap
import token
import tokenize
import traceback
from types import CodeType
from typing import Any, Dict, Iterable, List, Sequence, Tuple
from .util import debug, NoMoreRecords
def _split_last_statement(tokens: Iterable[tokenize.TokenInfo], prog: str) -> Tuple[int, int]:
def _line_pos_to_pos(linepos):
line, pos = linepos
offset = 0
for _ in range(1, line):
offset = prog.index('\n', offset) + 1
return pos + offset
started = False
for tok in reversed(list(tokens)):
if tok.exact_type in (token.SEMI, token.NEWLINE):
if started:
return _line_pos_to_pos(tok.start), _line_pos_to_pos(tok.end)
elif tok.type not in (token.ENDMARKER, token.COMMENT, token.NL):
started = True
return 0, 0
def _replace_double_semicolons(tokens: Iterable[tokenize.TokenInfo]) -> Iterable[tokenize.TokenInfo]:
double_semis = []
last_semi = False
tokens = list(tokens)
for i, tok in enumerate(tokens):
if tok.string == ';':
if last_semi:
double_semis.append(i-1)
else:
last_semi = True
else:
last_semi = False
for i in reversed(double_semis):
del tokens[i + 1]
_replace_with_newline(tokens, i)
debug('tokens', list(tokens), double_semis)
return tokens
def _replace_with_newline(tokens: List[tokenize.TokenInfo], pos: int) -> None:
tok = tokens[pos]
tokens[pos] = tokenize.TokenInfo(
token.NEWLINE,
'\n',
tok.start,
tok.end,
tok.line)
line_offset = 0
for i, tok2 in enumerate(tokens[pos + 1:]):
if i == 0:
line_offset = tok2.start[1]
if tok2.start[0] != tok.start[0]:
line_offset = 0
tokens[i + pos + 1] = tokenize.TokenInfo(
tok2.type, tok2.string,
start=(tok2.start[0] + 1, tok2.start[1] - line_offset),
end=(tok2.end[0] + 1, tok2.end[1] - line_offset),
line=tok2.line)
def _parse(prog: str) -> Tuple[ast.AST, ast.AST]:
'''
Parse the given pol program into the exec statements and eval statements that can be evaluated
directly, applying the necessary syntax transformations as necessary.
'''
prog_io = io.StringIO(prog)
tokens = tokenize.generate_tokens(prog_io.readline)
tokens = _replace_double_semicolons(tokens)
prog = tokenize.untokenize(tokens)
debug(f'newprog=\n{prog}')
split_start, split_end = _split_last_statement(tokens, prog)
prog_stmts = prog[:split_start]
prog_expr = prog[split_end:]
debug(f'stmt={prog_stmts} expr={prog_expr}')
try:
exec_statements = ast.parse(prog_stmts, mode='exec')
except SyntaxError as e:
raise RuntimeError(textwrap.dedent(
f'''\
Invalid syntax:
{prog_stmts}
{" "*(e.offset-1)}^'''))
try:
# Try to parse as generator expression (the common case)
eval_expr = ast.parse(f'({prog_expr})', mode='eval')
except SyntaxError as e:
# Try to parse as <expr> if <condition>
try:
eval_expr = ast.parse(f'({prog_expr} else _UNDEFINED_)', mode='eval')
except SyntaxError:
try:
ast.parse(f'{prog_expr}', mode='exec')
except SyntaxError:
raise RuntimeError(textwrap.dedent(
f'''\
Invalid syntax:
{prog_expr}
{" "*(e.offset-2)}^'''))
else:
raise RuntimeError(textwrap.dedent(f'''\
Cannot evaluate value from statement:
{prog_expr}'''))
debug(ast.dump(eval_expr))
return exec_statements, eval_expr
class UserError(RuntimeError):
def formatted_tb(self):
cause = self.__cause__
assert isinstance(cause, BaseException)
return traceback.format_exception(
cause.__class__,
cause,
cause.__traceback__.tb_next) # pylint: disable=no-member
def __str__(self):
return ''.join(self.formatted_tb()).rstrip('\n')
class Prog:
_exec: CodeType
_eval: CodeType
def __init__(self, prog: Any):
if hasattr(prog, '__code__'):
self._exec = compile('', filename='pol_user_prog.py', mode='exec')
self._eval = prog.__code__
else:
exec_code, eval_code = _parse(prog)
debug('Resulting AST', ast.dump(exec_code), ast.dump(eval_code))
self._exec = compile(exec_code, filename='pol_user_prog.py', mode='exec')
self._eval = compile(eval_code, filename='pol_user_prog.py', mode='eval')
def exec(self, globals: Dict[str, Any]) -> Any:
try:
exec(self._exec, globals)
return eval(self._eval, globals)
except NoMoreRecords:
raise
except Exception as e:
raise UserError() from e
|
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020, 2021 igo95862
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from os import environ
from setuptools import Extension, setup
from sys import platform
if environ.get('CFLAGS') is None:
if platform == "darwin":
environ['CFLAGS'] = (
"-Wextra -Wconversion -Wall")
else:
environ['CFLAGS'] = (
"-Werror "
"-Wextra -Wconversion -Wall")
if environ.get('LDFLAGS') is None:
if platform == "darwin":
environ['LDFLAGS'] = "-Wl,-all_load"
else:
environ['LDFLAGS'] = "-Wl,--no-as-needed"
setup(
name="notcurses",
version="2.4.4",
packages=['notcurses'],
ext_modules=[
Extension(
name='notcurses.notcurses',
sources=[
'notcurses/channels.c',
'notcurses/context.c',
'notcurses/main.c',
'notcurses/misc.c',
'notcurses/plane.c',
],
libraries=['notcurses'],
language='c',
),
],
author="Nick Black",
author_email="nickblack@linux.com",
description="Blingful TUI construction library (python bindings)",
keywords="ncurses curses tui console graphics",
license='Apache License, Version 2.0',
url='https://github.com/dankamongmen/notcurses',
zip_safe=True,
# see https://pypi.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
],
package_data={
'notcurses': [
'py.typed',
],
}
)
|
from dataclasses import dataclass
from typing import List, Set
from kgdata.wikidata.models.wdentity import WDEntity
from kgdata.wikidata.models.multilingual import (
MultiLingualString,
MultiLingualStringList,
)
@dataclass
class WDClass:
__slots__ = (
"id",
"label",
"description",
"aliases",
"parents",
"properties",
"different_froms",
"equivalent_classes",
"ancestors",
)
id: str
label: MultiLingualString
description: MultiLingualString
aliases: MultiLingualStringList
parents: List[str]
# properties of a type, "P1963"
properties: List[str]
different_froms: List[str]
equivalent_classes: List[str]
ancestors: Set[str]
@staticmethod
def from_dict(o):
o["label"] = MultiLingualString(**o["label"])
o["description"] = MultiLingualString(**o["description"])
o["aliases"] = MultiLingualStringList(**o["aliases"])
o["ancestors"] = set(o["ancestors"])
return WDClass(**o)
def to_dict(self):
return {
"id": self.id,
"label": self.label.to_dict(),
"description": self.description.to_dict(),
"aliases": self.aliases.to_dict(),
"parents": self.parents,
"properties": self.properties,
"different_froms": self.different_froms,
"equivalent_classes": self.equivalent_classes,
"ancestors": list(self.ancestors),
}
@staticmethod
def from_entity(ent: WDEntity):
assert ent.datatype is None
parents = []
for stmt in ent.props.get("P279", []):
assert stmt.value.is_entity_id(stmt.value)
parents.append(stmt.value.as_entity_id())
properties = []
for stmt in ent.props.get("P1963", []):
assert stmt.value.is_entity_id(stmt.value)
properties.append(stmt.value.as_entity_id())
different_froms = []
for stmt in ent.props.get("P1889", []):
assert stmt.value.is_entity_id(stmt.value)
different_froms.append(stmt.value.as_entity_id())
equivalent_classes = []
for stmt in ent.props.get("P1709", []):
assert stmt.value.is_string(stmt.value)
equivalent_classes.append(stmt.value.as_string())
return WDClass(
id=ent.id,
label=ent.label,
description=ent.description,
aliases=ent.aliases,
parents=sorted(parents),
properties=sorted(properties),
different_froms=sorted(different_froms),
equivalent_classes=sorted(equivalent_classes),
ancestors=set(),
)
|
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "NSObject.h"
#import "PLPhotoLibraryPathManager.h"
@class NSString, NSURL, PLFileSystemCapabilities;
@interface PLPhotoLibraryPathManagerCore : NSObject <PLPhotoLibraryPathManager>
{
struct os_unfair_lock_s _folderCreationAndCapabilitiesLock;
unsigned int _photoDirectoriesExists;
unsigned int _privateSubDirectoriesExists;
unsigned int _privateCacheSubDirectoriesExists;
unsigned int _externalDirectoriesExists;
unsigned int _persistedAlbumDataDirectoryExists;
PLFileSystemCapabilities *_capabilities;
NSURL *_libraryURL;
NSString *_baseDirectory;
NSString *_assetUUIDRecoveryMappingPath;
}
+ (id)basenameForSpatialOverCaptureFromOriginalBasename:(id)arg1;
+ (void)recordPrevSystemLibraryPath:(id)arg1;
+ (BOOL)isSystemLibraryURLDefined;
+ (BOOL)setSystemLibraryURL:(id)arg1 options:(unsigned short)arg2 error:(id *)arg3;
+ (id)systemLibraryURLIfResolvable;
+ (id)systemLibraryURL;
+ (id)_constructLegacySystemPhotoLibraryURLFromUnresolvableBookmark:(id)arg1;
+ (id)_legacySystemLibraryPath;
+ (id)_legacySystemLibraryBookmarkData;
+ (void)_updateSystemLibraryURLWithOldValue:(id)arg1;
+ (id)systemLibraryBaseDirectory;
+ (BOOL)isSupportedFileSystemAtURL:(id)arg1;
+ (BOOL)isSystemPhotoLibraryURL:(id)arg1;
+ (id)systemLibraryPathManager;
+ (void)listenForSystemPhotoLibraryURLChanges;
+ (BOOL)setTimeMachineExclusionAttribute:(BOOL)arg1 url:(id)arg2 error:(id *)arg3;
+ (void)initialize;
@property(copy) NSString *assetUUIDRecoveryMappingPath; // @synthesize assetUUIDRecoveryMappingPath=_assetUUIDRecoveryMappingPath;
@property(copy) NSString *baseDirectory; // @synthesize baseDirectory=_baseDirectory;
@property(copy) NSURL *libraryURL; // @synthesize libraryURL=_libraryURL;
- (void).cxx_destruct;
- (id)pathToAssetAlbumOrderStructure;
- (id)assetAbbreviatedMetadataDirectoryForDirectory:(id)arg1 type:(unsigned char)arg2;
- (void)setDataProtectionComplete:(BOOL)arg1;
- (BOOL)isDataProtectionComplete;
- (id)_dataProtectionIndicatorFilePath;
- (void)obtainAccessAndWaitWithFileWithIdentifier:(id)arg1 mode:(unsigned char)arg2 toURLWithHandler:(CDUnknownBlockType)arg3;
- (void)setExtendedAttributesWithIdentifier:(id)arg1 andURL:(id)arg2;
- (void)setExtendedAttributesWithIdentifier:(id)arg1;
- (id)readOnlyUrlWithIdentifier:(id)arg1;
- (id)temporaryDragAndDropDirectoryCreateIfNeeded:(BOOL)arg1 error:(id *)arg2;
- (id)temporaryFileBackedDebugDirectoryCreateIfNeeded:(BOOL)arg1 error:(id *)arg2;
- (id)knownDBPaths;
- (id)syncInfoPath;
- (id)pathToAssetsToAlbumsMapping;
- (id)externalDirectoryWithSubType:(unsigned char)arg1 leafType:(unsigned char)arg2 additionalPathComponents:(id)arg3 createIfNeeded:(BOOL)arg4 error:(id *)arg5;
- (id)externalDirectoryWithSubType:(unsigned char)arg1 createIfNeeded:(BOOL)arg2 error:(id *)arg3;
- (id)externalDirectoryWithSubType:(unsigned char)arg1 leafType:(unsigned char)arg2 additionalPathComponents:(id)arg3;
- (id)externalDirectoryWithSubType:(unsigned char)arg1;
- (id)privateCacheDirectoryWithSubType:(unsigned char)arg1 createIfNeeded:(BOOL)arg2 error:(id *)arg3;
- (id)privateCacheDirectoryWithSubType:(unsigned char)arg1;
- (id)privateDirectoryWithSubType:(unsigned char)arg1 leafType:(unsigned char)arg2 additionalPathComponents:(id)arg3 createIfNeeded:(BOOL)arg4 error:(id *)arg5;
- (id)privateDirectoryWithSubType:(unsigned char)arg1 leafType:(unsigned char)arg2 createIfNeeded:(BOOL)arg3 error:(id *)arg4;
- (id)privateDirectoryWithSubType:(unsigned char)arg1 createIfNeeded:(BOOL)arg2 error:(id *)arg3;
- (id)privateDirectoryWithSubType:(unsigned char)arg1 leafType:(unsigned char)arg2 additionalPathComponents:(id)arg3;
- (id)privateDirectoryWithSubType:(unsigned char)arg1;
- (id)pathsForExternalWriters;
- (BOOL)createPathsForNewLibrariesWithError:(id *)arg1;
- (id)pathsForPermissionCheck;
- (id)pathsForClientAccess:(id)arg1;
- (id)photoDirectoryWithType:(unsigned char)arg1 leafType:(unsigned char)arg2 additionalPathComponents:(id)arg3 createIfNeeded:(BOOL)arg4 error:(id *)arg5;
- (id)photoDirectoryWithType:(unsigned char)arg1 leafType:(unsigned char)arg2 createIfNeeded:(BOOL)arg3 error:(id *)arg4;
- (id)photoDirectoryWithType:(unsigned char)arg1 createIfNeeded:(BOOL)arg2 error:(id *)arg3;
- (id)photoDirectoryWithType:(unsigned char)arg1 leafType:(unsigned char)arg2 additionalPathComponents:(id)arg3;
- (id)photoDirectoryWithType:(unsigned char)arg1 additionalPathComponents:(id)arg2;
- (id)photoDirectoryWithType:(unsigned char)arg1;
- (id)photosAsideDatabasePath;
- (id)photosCPLDatabasePath;
- (id)photosDatabasePath;
- (id)baseDirectoryForBundleScope:(unsigned char)arg1;
- (id)convertPhotoLibraryPathType:(unsigned char)arg1;
- (BOOL)createDirectoryOnceWithPath:(id)arg1 mask:(unsigned char)arg2 type:(unsigned char)arg3 error:(id *)arg4;
- (BOOL)updateTimeMachineExclusionAttributeForExcludePath:(id)arg1 error:(id *)arg2;
- (void)externalDirectoryCreationMaskResetWithSubType:(unsigned char)arg1;
- (void)privateCacheDirectoryCreationMaskResetWithSubType:(unsigned char)arg1;
- (void)privateDirectoryCreationMaskResetWithSubType:(unsigned char)arg1;
- (void)photoDirectoryCreationMaskResetWithType:(unsigned char)arg1;
- (void)postInit;
@property(readonly, nonatomic) PLFileSystemCapabilities *capabilities; // @synthesize capabilities=_capabilities;
- (id)initWithLibraryURL:(id)arg1;
- (id)initWithBaseDirectory:(id)arg1;
- (id)init;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests.sessions import Session
from google import auth
from google.api_core import client_options
from google.api_core import exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.ssl_certificates import SslCertificatesClient
from google.cloud.compute_v1.services.ssl_certificates import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert SslCertificatesClient._get_default_mtls_endpoint(None) is None
assert (
SslCertificatesClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
SslCertificatesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
SslCertificatesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
SslCertificatesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
SslCertificatesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
)
def test_ssl_certificates_client_from_service_account_info():
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = SslCertificatesClient.from_service_account_info(info)
assert client.transport._credentials == creds
assert client.transport._host == "compute.googleapis.com:443"
@pytest.mark.parametrize("client_class", [SslCertificatesClient,])
def test_ssl_certificates_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert client.transport._host == "compute.googleapis.com:443"
def test_ssl_certificates_client_get_transport_class():
transport = SslCertificatesClient.get_transport_class()
available_transports = [
transports.SslCertificatesRestTransport,
]
assert transport in available_transports
transport = SslCertificatesClient.get_transport_class("rest")
assert transport == transports.SslCertificatesRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(SslCertificatesClient, transports.SslCertificatesRestTransport, "rest"),],
)
@mock.patch.object(
SslCertificatesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(SslCertificatesClient),
)
def test_ssl_certificates_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(SslCertificatesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(SslCertificatesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
SslCertificatesClient,
transports.SslCertificatesRestTransport,
"rest",
"true",
),
(
SslCertificatesClient,
transports.SslCertificatesRestTransport,
"rest",
"false",
),
],
)
@mock.patch.object(
SslCertificatesClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(SslCertificatesClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_ssl_certificates_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(SslCertificatesClient, transports.SslCertificatesRestTransport, "rest"),],
)
def test_ssl_certificates_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(SslCertificatesClient, transports.SslCertificatesRestTransport, "rest"),],
)
def test_ssl_certificates_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_aggregated_list_rest(
transport: str = "rest", request_type=compute.AggregatedListSslCertificatesRequest
):
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SslCertificateAggregatedList(
id="id_value",
items={
"key_value": compute.SslCertificatesScopedList(
ssl_certificates=[
compute.SslCertificate(certificate="certificate_value")
]
)
},
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
unreachables=["unreachables_value"],
warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED),
)
# Wrap the value into a proper Response obj
json_return_value = compute.SslCertificateAggregatedList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
assert response.raw_page is response
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SslCertificateAggregatedList)
assert response.id == "id_value"
assert response.items == {
"key_value": compute.SslCertificatesScopedList(
ssl_certificates=[compute.SslCertificate(certificate="certificate_value")]
)
}
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.unreachables == ["unreachables_value"]
assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED)
def test_aggregated_list_rest_from_dict():
test_aggregated_list_rest(request_type=dict)
def test_aggregated_list_rest_flattened():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SslCertificateAggregatedList()
# Wrap the value into a proper Response obj
json_return_value = compute.SslCertificateAggregatedList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.aggregated_list(project="project_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("json")
assert "project_value" in http_call[1] + str(body)
def test_aggregated_list_rest_flattened_error():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.aggregated_list(
compute.AggregatedListSslCertificatesRequest(), project="project_value",
)
def test_delete_rest(
transport: str = "rest", request_type=compute.DeleteSslCertificateRequest
):
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
error=compute.Error(errors=[compute.Errors(code="code_value")]),
http_error_message="http_error_message_value",
http_error_status_code=2374,
id="id_value",
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id="target_id_value",
target_link="target_link_value",
user="user_value",
warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)],
zone="zone_value",
)
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.error == compute.Error(errors=[compute.Errors(code="code_value")])
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == "id_value"
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == "target_id_value"
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.warnings == [
compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)
]
assert response.zone == "zone_value"
def test_delete_rest_from_dict():
test_delete_rest(request_type=dict)
def test_delete_rest_flattened():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete(
project="project_value", ssl_certificate="ssl_certificate_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("json")
assert "project_value" in http_call[1] + str(body)
assert "ssl_certificate_value" in http_call[1] + str(body)
def test_delete_rest_flattened_error():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete(
compute.DeleteSslCertificateRequest(),
project="project_value",
ssl_certificate="ssl_certificate_value",
)
def test_get_rest(
transport: str = "rest", request_type=compute.GetSslCertificateRequest
):
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SslCertificate(
certificate="certificate_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
expire_time="expire_time_value",
id="id_value",
kind="kind_value",
managed=compute.SslCertificateManagedSslCertificate(
domain_status={"key_value": "value_value"}
),
name="name_value",
private_key="private_key_value",
region="region_value",
self_link="self_link_value",
self_managed=compute.SslCertificateSelfManagedSslCertificate(
certificate="certificate_value"
),
subject_alternative_names=["subject_alternative_names_value"],
type_=compute.SslCertificate.Type.MANAGED,
)
# Wrap the value into a proper Response obj
json_return_value = compute.SslCertificate.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SslCertificate)
assert response.certificate == "certificate_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.expire_time == "expire_time_value"
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.managed == compute.SslCertificateManagedSslCertificate(
domain_status={"key_value": "value_value"}
)
assert response.name == "name_value"
assert response.private_key == "private_key_value"
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.self_managed == compute.SslCertificateSelfManagedSslCertificate(
certificate="certificate_value"
)
assert response.subject_alternative_names == ["subject_alternative_names_value"]
assert response.type_ == compute.SslCertificate.Type.MANAGED
def test_get_rest_from_dict():
test_get_rest(request_type=dict)
def test_get_rest_flattened():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SslCertificate()
# Wrap the value into a proper Response obj
json_return_value = compute.SslCertificate.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get(
project="project_value", ssl_certificate="ssl_certificate_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("json")
assert "project_value" in http_call[1] + str(body)
assert "ssl_certificate_value" in http_call[1] + str(body)
def test_get_rest_flattened_error():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetSslCertificateRequest(),
project="project_value",
ssl_certificate="ssl_certificate_value",
)
def test_insert_rest(
transport: str = "rest", request_type=compute.InsertSslCertificateRequest
):
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
error=compute.Error(errors=[compute.Errors(code="code_value")]),
http_error_message="http_error_message_value",
http_error_status_code=2374,
id="id_value",
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id="target_id_value",
target_link="target_link_value",
user="user_value",
warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)],
zone="zone_value",
)
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.error == compute.Error(errors=[compute.Errors(code="code_value")])
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == "id_value"
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == "target_id_value"
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.warnings == [
compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)
]
assert response.zone == "zone_value"
def test_insert_rest_from_dict():
test_insert_rest(request_type=dict)
def test_insert_rest_flattened():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
json_return_value = compute.Operation.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
ssl_certificate_resource = compute.SslCertificate(
certificate="certificate_value"
)
client.insert(
project="project_value", ssl_certificate_resource=ssl_certificate_resource,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("json")
assert "project_value" in http_call[1] + str(body)
assert compute.SslCertificate.to_json(
ssl_certificate_resource, including_default_value_fields=False
) in http_call[1] + str(body)
def test_insert_rest_flattened_error():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert(
compute.InsertSslCertificateRequest(),
project="project_value",
ssl_certificate_resource=compute.SslCertificate(
certificate="certificate_value"
),
)
def test_list_rest(
transport: str = "rest", request_type=compute.ListSslCertificatesRequest
):
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SslCertificateList(
id="id_value",
items=[compute.SslCertificate(certificate="certificate_value")],
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED),
)
# Wrap the value into a proper Response obj
json_return_value = compute.SslCertificateList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
assert response.raw_page is response
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SslCertificateList)
assert response.id == "id_value"
assert response.items == [compute.SslCertificate(certificate="certificate_value")]
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED)
def test_list_rest_from_dict():
test_list_rest(request_type=dict)
def test_list_rest_flattened():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SslCertificateList()
# Wrap the value into a proper Response obj
json_return_value = compute.SslCertificateList.to_json(return_value)
response_value = Response()
response_value.status_code = 200
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list(project="project_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, http_call, http_params = req.mock_calls[0]
body = http_params.get("json")
assert "project_value" in http_call[1] + str(body)
def test_list_rest_flattened_error():
client = SslCertificatesClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListSslCertificatesRequest(), project="project_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.SslCertificatesRestTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.SslCertificatesRestTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = SslCertificatesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.SslCertificatesRestTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = SslCertificatesClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.SslCertificatesRestTransport(
credentials=credentials.AnonymousCredentials(),
)
client = SslCertificatesClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize("transport_class", [transports.SslCertificatesRestTransport,])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_ssl_certificates_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(exceptions.DuplicateCredentialArgs):
transport = transports.SslCertificatesTransport(
credentials=credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_ssl_certificates_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.SslCertificatesTransport(
credentials=credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"aggregated_list",
"delete",
"get",
"insert",
"list",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
def test_ssl_certificates_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
auth, "load_credentials_from_file"
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.SslCertificatesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_ssl_certificates_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(auth, "default") as adc, mock.patch(
"google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.SslCertificatesTransport()
adc.assert_called_once()
def test_ssl_certificates_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
SslCertificatesClient()
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_ssl_certificates_http_transport_client_cert_source_for_mtls():
cred = credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.SslCertificatesRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
def test_ssl_certificates_host_no_port():
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
)
assert client.transport._host == "compute.googleapis.com:443"
def test_ssl_certificates_host_with_port():
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
)
assert client.transport._host == "compute.googleapis.com:8000"
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = SslCertificatesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = SslCertificatesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = SslCertificatesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = SslCertificatesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = SslCertificatesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = SslCertificatesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = SslCertificatesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = SslCertificatesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = SslCertificatesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = SslCertificatesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = SslCertificatesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = SslCertificatesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = SslCertificatesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = SslCertificatesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = SslCertificatesClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.SslCertificatesTransport, "_prep_wrapped_messages"
) as prep:
client = SslCertificatesClient(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.SslCertificatesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = SslCertificatesClient.get_transport_class()
transport = transport_class(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
import frappe
def get_data():
return [
{
"label":_("Transaction"),
"items":[
{
"type":"doctype",
"label":_("Lead"),
"name":"Lead"
},
{
"type":"doctype",
"label":_("Quotation"),
"name":"Quotation"
},
{
"type":"doctype",
"label":_("Sales Order"),
"name":"Sales Order"
},
{
"type":"doctype",
"label":_("Sales Invoice"),
"name":"Sales Invoice"
},
{
"type":"doctype",
"label":_("Delivery Note"),
"name":"Delivery Note"
}
]
},
{
"label": _("Car Setup"),
"icon": "fa fa-list",
"items": [
{
"type":"doctype",
"label":_("Car Size"),
"name":"Car Size CT"
},
{
"type":"doctype",
"label":_("Car Production Year"),
"name":"Car Production Year CT"
},
{
"type":"doctype",
"label":_("Car Model"),
"name":"Car Model CT"
},
{
"type":"doctype",
"label":_("Car Group"),
"name":"Car Group CT"
}
]
},
{
"label": _("Package Setup"),
"icon": "fa fa-list",
"items": [
{
"type":"doctype",
"label":_("Package"),
"name":"Package CT"
},
]
},
{
"label":_("Quotation Setup"),
"items":[
{
"type":"doctype",
"label":_("Sales Partner"),
"name":"Sales Partner"
},
{
"type":"doctype",
"label":_("Customer Group"),
"name":"Customer Group"
}
]
}
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Bertrand256
# Created on: 2018-03
import sys
import re
from typing import Optional
import simplejson
import logging
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QDialog, QMessageBox
from decimal import Decimal
from bitcoinrpc.authproxy import JSONRPCException
import app_cache
import app_utils
from app_config import AppConfig
from gobyted_intf import GobytedInterface
from ui.ui_transaction_dlg import Ui_TransactionDlg
from wnd_utils import WndUtils
CACHE_ITEM_DETAILS_WORD_WRAP = 'TransactionDlg_DetailsWordWrap'
class TransactionDlg(QDialog, Ui_TransactionDlg, WndUtils):
def __init__(self, parent: QDialog,
config: AppConfig,
gobyted_intf: GobytedInterface,
raw_transaction: str,
use_instant_send: bool,
decoded_transaction: Optional[dict] = None,
dependent_transactions: Optional[dict] = None
):
QDialog.__init__(self, parent=parent)
Ui_TransactionDlg.__init__(self)
WndUtils.__init__(self, config)
self.config = config
self.parent = parent
self.gobyted_intf = gobyted_intf
self.transaction_sent = False
self.raw_transaction = raw_transaction
self.use_instant_send = use_instant_send
self.tx_id = None # will be decoded from rawtransaction
self.tx_size = None # as above
self.decoded_transaction: Optional[dict] = decoded_transaction
self.dependent_transactions = dependent_transactions # key: txid, value: transaction dict
self.setupUi()
def setupUi(self):
Ui_TransactionDlg.setupUi(self, self)
self.setWindowTitle('Transaction')
self.chb_word_wrap.setChecked(app_cache.get_value(CACHE_ITEM_DETAILS_WORD_WRAP, False, bool))
self.apply_word_wrap(self.chb_word_wrap.isChecked())
self.edt_recipients.setOpenExternalLinks(True)
self.edt_recipients.viewport().setAutoFillBackground(False)
self.prepare_tx_view()
def on_chb_word_wrap_toggled(self, checked):
app_cache.set_value(CACHE_ITEM_DETAILS_WORD_WRAP, checked)
self.apply_word_wrap(checked)
def apply_word_wrap(self, checked):
self.edt_raw_transaction.setWordWrapMode(0 if not checked else 1)
def prepare_tx_view(self):
def get_vout_value(vout: dict):
val = vout.get('value')
if not isinstance(val, (float, Decimal)):
val = vout.get('valueSat')
if val is not None:
val = round(val / 1e8, 8)
return float(val)
try:
self.edt_recipients.clear()
if not self.decoded_transaction:
try:
self.decoded_transaction = self.gobyted_intf.decoderawtransaction(self.raw_transaction)
except JSONRPCException as e:
if re.match('.*400 Bad Request', str(e)) and len(self.raw_transaction):
raise Exception('Error while decoding raw transaction: ' + str(e) + '.' +
'\n\nProbable cause: size of the transation exceeded the RPC node limit.'
'\n\nDecrease the number of inputs.')
else:
raise Exception('Error while decoding raw transaction: ' + str(e) + '.')
except Exception as e:
raise Exception('Error while decoding raw transaction: ' + str(e) + '.')
if isinstance(self.decoded_transaction, dict):
self.edt_raw_transaction.setPlainText(simplejson.dumps(self.decoded_transaction, indent=2))
vout_list = self.decoded_transaction.get('vout')
self.tx_size = self.decoded_transaction.get('size')
self.tx_id = self.decoded_transaction.get('txid')
if vout_list and isinstance(vout_list, list):
vin_list = self.decoded_transaction.get('vin')
if vin_list and isinstance(vin_list, list):
inputs_total = 0.0
for vin in vin_list:
txid = vin.get('txid')
txindex = vin.get('vout')
rawtx = None
if isinstance(self.dependent_transactions, dict):
rawtx = self.dependent_transactions.get(txid)
if not rawtx:
rawtx = self.gobyted_intf.getrawtransaction(txid, 1)
if rawtx:
vlist = rawtx.get('vout')
val = None
for v in vlist:
if v.get('n') == txindex:
val = get_vout_value(v)
break
if val is None:
logging.error(f'Couldn\'t find output {txindex} in source transaction {txid}')
else:
inputs_total += val
if self.tx_size is not None:
if self.tx_size > 1024:
tx_size_str = f'{round(self.tx_size/1024, 2)} kB'
else:
tx_size_str = f'{self.tx_size} bytes'
# prepare list of recipients
outputs_total = 0.0
recipients = ''
for row_idx, vout in enumerate(vout_list):
val = get_vout_value(vout)
outputs_total += val
spk = vout.get('scriptPubKey')
address = ''
if spk:
ads = spk.get('addresses')
if isinstance(ads, list) and len(ads) == 1:
address = ads[0]
else:
address = str(ads)
if row_idx == 0:
recipients = f'<tr><td class="lbl"><p class="lbl">Recipients:</p></td><td>{address}</td><td><p class="val">{app_utils.to_string(val)} GoByte</p></td></tr>'
else:
recipients += f'<tr><td></td><td>{address}</td><td><p class="val">{app_utils.to_string(val)} GoByte</p></td></tr>'
fee = round(inputs_total - outputs_total, 8)
send_tx_row = ''
if self.transaction_sent:
url = self.config.get_block_explorer_tx()
if url:
url = url.replace('%TXID%', self.tx_id)
send_tx_row = f'<tr><td class="lbl"><p class="lbl">Transaction ID:</p></td><td><a href="{url}">{self.tx_id}</a></td></tr>'
if sys.platform in ('win32', 'linux'):
base_font_size = '11'
title_font_size = '17'
else:
base_font_size = '13'
title_font_size = '20'
if self.transaction_sent:
title = 'Transaction summary - sent'
subtitle = '<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; ' \
'margin-right:0px; -qt-block-indent:0; text-indent:0px; ' \
'background-color:#2eb82e;color:white; padding: 1px 3px 1px 3px; ' \
f'border-radius: 3px;"><span style=" font-size:{base_font_size}pt;">' \
'Transaction successfully sent...</span></p>'
else:
title = 'Transaction summary - ready to send'
subtitle = '<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; ' \
'margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=' \
f'"font-size:{base_font_size}pt;">Click the <b><Send transaction></b> button to ' \
'broadcast the transaction.</span></p>'
summary = f"""<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd">
<html><head><meta name="qrichtext" content="1" /><style type="text/css">
td.lbl{{text-align: right;vertical-align: top}} p.lbl{{margin: 0 5px 0 0; font-weight: bold}} p.val{{margin: 0 0 0 8px; color: navy}}
</style></head><body style="font-size:{base_font_size}pt; font-weight:400; font-style:normal; margin-left:10px;margin-right:10px;">
<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:{title_font_size}pt; font-weight:600;">{title}</span></p>
<p style="-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:{base_font_size}pt;"><br /></p>
{subtitle}
<p style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;">
<table>
{send_tx_row}
<tr><td class="lbl"><p class="lbl">Total amount:</p></td><td>{app_utils.to_string(inputs_total)} GoByte</td><td></td></tr>
<tr><td class="lbl"><p class="lbl">Fee:</p></td><td>{app_utils.to_string(fee)} GoByte</td><td></td></tr>
<tr><td class="lbl"><p class="lbl">Transaction size:</p></td><td>{tx_size_str}</td><td></td></tr>
<tr><td class="lbl"><p class="lbl">InstantSend:</p></td><td>{'YES' if self.use_instant_send else 'NO'}</td><td></td></tr>
{recipients}
</table></p></body></html>"""
self.edt_recipients.setText(summary)
else:
raise Exception('Empty \'vin\' list in the decoded transaction.')
else:
raise Exception('Empty \'vout\' list in the decoded transaction.')
else:
raise Exception('Error: could\'t parse tha raw transaction.')
except Exception as e:
logging.exception("Unhandled exception occurred.")
raise
@pyqtSlot(bool)
def on_btn_details_clicked(self, enabled):
idx = (self.stacket_widget.currentIndex() + 1) % 2
self.stacket_widget.setCurrentIndex(idx)
self.btn_details.setText({0: 'Show Details', 1: 'Hide Details'}.get(idx))
@pyqtSlot(bool)
def on_btn_broadcast_clicked(self):
try:
txid = self.gobyted_intf.sendrawtransaction(self.raw_transaction, self.use_instant_send)
if txid != self.tx_id:
logging.warning('TXID returned by sendrawtransaction differs from the original txid')
self.tx_id = txid
logging.info('Transaction sent, txid: ' + txid)
self.transaction_sent = True
self.btn_broadcast.setEnabled(False)
self.prepare_tx_view()
except Exception as e:
logging.exception(f'Exception occurred while broadcasting transaction. '
f'Transaction size: {self.tx_size} bytes.')
self.errorMsg('An error occurred while sending transation: '+ str(e))
@pyqtSlot(bool)
def on_btn_close_clicked(self, enabled):
if self.transaction_sent:
self.accept()
else:
self.reject()
|
/* @flow */
import config from '../config'
import { noop } from 'shared/util'
export let warn = noop
export let tip = noop
export let generateComponentTrace = (noop: any) // work around flow check 生成组件堆栈
export let formatComponentName = (noop: any) //格式化组件名
if (process.env.NODE_ENV !== 'production') {
const hasConsole = typeof console !== 'undefined'
const classifyRE = /(?:^|[-_])(\w)/g
const classify = str => str
.replace(classifyRE, c => c.toUpperCase())
.replace(/[-_]/g, '')
// 打印报错,堆栈信息
warn = (msg, vm) => {
const trace = vm ? generateComponentTrace(vm) : ''
if (config.warnHandler) {
config.warnHandler.call(null, msg, vm, trace)
} else if (hasConsole && (!config.silent)) {
console.error(`[Vue warn]: ${msg}${trace}`)
}
}
// 打印警告,堆栈信息
tip = (msg, vm) => {
if (hasConsole && (!config.silent)) {
console.warn(`[Vue tip]: ${msg}` + (
vm ? generateComponentTrace(vm) : ''
))
}
}
// 格式化组件名
formatComponentName = (vm, includeFile) => {
if (vm.$root === vm) {
return '<Root>'
}
const options = typeof vm === 'function' && vm.cid != null
? vm.options
: vm._isVue
? vm.$options || vm.constructor.options
: vm
let name = options.name || options._componentTag
const file = options.__file
if (!name && file) {
const match = file.match(/([^/\\]+)\.vue$/)
name = match && match[1]
}
return (
(name ? `<${classify(name)}>` : `<Anonymous>`) +
(file && includeFile !== false ? ` at ${file}` : '')
)
}
// 重复str n次
const repeat = (str, n) => {
let res = ''
while (n) {
if (n % 2 === 1) res += str
if (n > 1) str += str
n >>= 1//位运算,符号位右移1位并赋值给n ,例 35>>1 = 17;
}
return res
}
// 生成组件堆栈信息,办法就是 while循环 vm.$parent,格式化组件名,打印
generateComponentTrace = vm => {
if (vm._isVue && vm.$parent) {
const tree = []
let currentRecursiveSequence = 0
while (vm) {
if (tree.length > 0) {
const last = tree[tree.length - 1]
if (last.constructor === vm.constructor) {//处理递归组件
currentRecursiveSequence++
vm = vm.$parent
continue
} else if (currentRecursiveSequence > 0) {
tree[tree.length - 1] = [last, currentRecursiveSequence]
currentRecursiveSequence = 0
}
}
tree.push(vm)
vm = vm.$parent
}
return '\n\nfound in\n\n' + tree
.map((vm, i) => `${
i === 0 ? '---> ' : repeat(' ', 5 + i * 2)
}${
Array.isArray(vm)
? `${formatComponentName(vm[0])}... (${vm[1]} recursive calls)`
: formatComponentName(vm)
}`)
.join('\n')
} else {
return `\n\n(found in ${formatComponentName(vm)})`
}
}
}
|
from django.urls import path
from . import views
app_name = "tasks"
urlpatterns = [
path("", views.index, name="index"),
path("add/", views.add, name="add")
]
|
/**
*
* @Description 邮件发送
* 调用方法:sendMail('994718917@qq.com','这是测试邮件', 'Hi miqilin,这是一封测试邮件');
* @Author miqilin
* @Created 2020/04/05 15:10
* 技术只是解决问题的选择,而不是解决问题的根本...
*
*/
const nodemailer = require('nodemailer')
const smtpTransport = require('nodemailer-smtp-transport')
const config = {
email: {
service: 'QQ', // 邮箱类别,我固定QQ
user: 'xxxxxxxxxx@qq.com', // 你的QQ邮箱,用来发送回复邮件
pass: 'xxxxxxxxxxxxx', // 你的QQ邮箱授权码,如何操作获取见README.md所述
},
}
sendSmtpTransport = nodemailer.createTransport(
smtpTransport({
service: config.email.service,
auth: {
user: config.email.user,
pass: config.email.pass,
},
})
)
/**
* @param {String} recipient 收件人
* @param {String} subject 发送的主题
* @param {String} html 发送的html内容
*/
var sendMail = function (replyObj) {
sendSmtpTransport.sendMail(
{
from: config.email.user,
to: replyObj.recipient,
subject: `${replyObj.subject} 你好! 你有新邮件了, 请查收`,
html: replyObj.html,
},
function (error, response) {
if (error) {
console.log(error)
}
console.log('发送成功')
}
)
}
module.exports = sendMail
|
# coding:utf-8
"""
@author : linkin
@email : yooleak@outlook.com
@date : 2018-10-04
"""
import logging
import time
from inspect import isfunction
from components.crawlers import builtin_crawlers
from config.config import COLLECT_TIME_GAP
from custom.custom import my_crawlers
from tools.threads import CrawlThread
logger = logging.getLogger('Collector')
class Collector(object):
"""
负责对IP代理数据的有效采集,供给验证器进行验证入库
"""
def __init__(self):
self.__proxyList = None
self.__crawlers = my_crawlers
def find_crawlers(self):
"""
查找采集器包含的代理采集爬虫,包含内置的和自定义的
:return: 找到的爬虫 list 类型
"""
_crawlers = [i for i in builtin_crawlers if isfunction(i)]
custom_crawlers = [i for i in self.__crawlers if isfunction(i)]
_crawlers.extend(custom_crawlers) # 作者莫名其妙是个pass空语句
# _crawlers.append(custom_crawlers)
logger.info('Find %d data collectors.' % len(_crawlers))
return _crawlers
def run(self, proxyList):
"""
运行采集器,使用多线程进行采集,一个采集爬虫一个线程,采集结果存入proxyList
:param proxyList: 与验证器共享的变量,存储采集到的IP代理数据,list类型
"""
while 1:
results = []
t_res = set()
self.__proxyList = proxyList
funcs = self.find_crawlers()
threads = [CrawlThread(i) for i in funcs]
for i in threads:
i.start()
for i in threads:
i.join()
results.append(i.get_result())
for res in results:
logger.info('Received %d proxy data from a spider.' % len(res))
for x in res:
t_res.add(x)
self.__proxyList.extend(t_res)
time.sleep(COLLECT_TIME_GAP)
|
from episod_lib import get_all_episod, printe
# --------------------------------------------------------
def epirule_equals(source, new):
if len(source) is not len(new):
return False
for index in range(len(source)):
src_sym = source[index]
new_sym = new[index]
if src_sym is not new_sym:
return False
return True
def epirule_reversed(source, new):
equal = epirule_equals(source, new)
reverse = epirule_equals(list(reversed(source)), new)
return reverse and not equal
# ----------------------------------------------
rules = {
'equals':epirule_equals,
'reversed':epirule_reversed
}
# --------------------------------------------------------
sequence = 'abLLOabOLLab'
episods = get_all_episod(sequence)
for from_i in range(len(episods)):
for to_i in range(from_i+1, len(episods)):
source = episods[from_i]
new = episods[to_i]
for rule, meth in rules.items():
if meth(source,new):
print(rule,source,new)
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
@interface CDVInAppBrowserOptions : NSObject {}
@property (nonatomic, assign) BOOL location;
@property (nonatomic, assign) BOOL toolbar;
@property (nonatomic, copy) NSString* closebuttoncaption;
@property (nonatomic, copy) NSString* closebuttoncolor;
@property (nonatomic, assign) BOOL lefttoright;
@property (nonatomic, copy) NSString* toolbarposition;
@property (nonatomic, copy) NSString* toolbarcolor;
@property (nonatomic, assign) BOOL toolbartranslucent;
@property (nonatomic, assign) BOOL hidenavigationbuttons;
@property (nonatomic, copy) NSString* navigationbuttoncolor;
@property (nonatomic, assign) BOOL cleardata;
@property (nonatomic, assign) BOOL clearcache;
@property (nonatomic, assign) BOOL clearsessioncache;
@property (nonatomic, assign) BOOL hidespinner;
@property (nonatomic, copy) NSString* presentationstyle;
@property (nonatomic, copy) NSString* transitionstyle;
@property (nonatomic, assign) BOOL enableviewportscale;
@property (nonatomic, assign) BOOL mediaplaybackrequiresuseraction;
@property (nonatomic, assign) BOOL allowinlinemediaplayback;
@property (nonatomic, assign) BOOL hidden;
@property (nonatomic, assign) BOOL disallowoverscroll;
@property (nonatomic, copy) NSString* beforeload;
+ (CDVInAppBrowserOptions*)parseOptions:(NSString*)options;
+ (NSMutableURLRequest*)createRequest:(NSURL*)url headers:(NSString*)headers;
@end
|
# -*- coding: utf-8 -*-
import numpy as np
from .signal_formatpeaks import _signal_formatpeaks_sanitize
from .signal_interpolate import signal_interpolate
def signal_period(peaks, sampling_rate=1000, desired_length=None, interpolation_order="cubic"):
"""Calculate signal period from a series of peaks.
Parameters
----------
peaks : list, array, DataFrame, Series or dict
The samples at which the peaks occur. If an array is passed in, it is assumed that it was obtained
with `signal_findpeaks()`. If a DataFrame is passed in, it is assumed it is of the same length as
the input signal in which occurrences of R-peaks are marked as "1", with such containers obtained
with e.g., ecg_findpeaks() or rsp_findpeaks().
sampling_rate : int
The sampling frequency of the signal that contains peaks (in Hz, i.e., samples/second).
Defaults to 1000.
desired_length : int
By default, the returned signal rate has the same number of elements as the raw signal. If set
to an integer, the returned signal rate will be interpolated between peaks over `desired_length`
samples. Has no effect if a DataFrame is passed in as the `signal` argument. Defaults to None.
interpolation_order : str
Order used to interpolate the rate between peaks. See `signal_interpolate()`.
Returns
-------
array
A vector containing the period.
See Also
--------
signal_findpeaks, signal_fixpeaks, signal_plot
Examples
--------
>>> import neurokit2 as nk
>>>
>>> signal = nk.signal_simulate(duration=10, sampling_rate=1000, frequency=1)
>>> info = nk.signal_findpeaks(signal)
>>>
>>> period = nk.signal_period(peaks=info["Peaks"])
>>> nk.signal_plot(period)
"""
peaks, desired_length = _signal_formatpeaks_sanitize(peaks, desired_length)
# Sanity checks.
if len(peaks) <= 3:
print(
"NeuroKit warning: _signal_formatpeaks(): too few peaks detected"
" to compute the rate. Returning empty vector."
)
return np.full(desired_length, np.nan)
# Calculate period in sec, based on peak to peak difference and make sure
# that rate has the same number of elements as peaks (important for
# interpolation later) by prepending the mean of all periods.
period = np.ediff1d(peaks, to_begin=0) / sampling_rate
period[0] = np.mean(period[1:])
# Interpolate all statistics to desired length.
if desired_length != np.size(peaks):
period = signal_interpolate(peaks, period, desired_length=desired_length, method=interpolation_order)
# TODO: extending beyond range, interpolation might cause period = 0
return period
|
"""
Tests for DataFrame.mask; tests DataFrame.where as a side-effect.
"""
import numpy as np
from pandas import DataFrame, isna
import pandas._testing as tm
class TestDataFrameMask:
def test_mask(self):
df = DataFrame(np.random.randn(5, 3))
cond = df > 0
rs = df.where(cond, np.nan)
tm.assert_frame_equal(rs, df.mask(df <= 0))
tm.assert_frame_equal(rs, df.mask(~cond))
other = DataFrame(np.random.randn(5, 3))
rs = df.where(cond, other)
tm.assert_frame_equal(rs, df.mask(df <= 0, other))
tm.assert_frame_equal(rs, df.mask(~cond, other))
# see GH#21891
df = DataFrame([1, 2])
res = df.mask([[True], [False]])
exp = DataFrame([np.nan, 2])
tm.assert_frame_equal(res, exp)
def test_mask_inplace(self):
# GH#8801
df = DataFrame(np.random.randn(5, 3))
cond = df > 0
rdf = df.copy()
return_value = rdf.where(cond, inplace=True)
assert return_value is None
tm.assert_frame_equal(rdf, df.where(cond))
tm.assert_frame_equal(rdf, df.mask(~cond))
rdf = df.copy()
return_value = rdf.where(cond, -df, inplace=True)
assert return_value is None
tm.assert_frame_equal(rdf, df.where(cond, -df))
tm.assert_frame_equal(rdf, df.mask(~cond, -df))
def test_mask_edge_case_1xN_frame(self):
# GH#4071
df = DataFrame([[1, 2]])
res = df.mask(DataFrame([[True, False]]))
expec = DataFrame([[np.nan, 2]])
tm.assert_frame_equal(res, expec)
def test_mask_callable(self):
# GH#12533
df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
result = df.mask(lambda x: x > 4, lambda x: x + 1)
exp = DataFrame([[1, 2, 3], [4, 6, 7], [8, 9, 10]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, df.mask(df > 4, df + 1))
# return ndarray and scalar
result = df.mask(lambda x: (x % 2 == 0).values, lambda x: 99)
exp = DataFrame([[1, 99, 3], [99, 5, 99], [7, 99, 9]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, df.mask(df % 2 == 0, 99))
# chain
result = (df + 2).mask(lambda x: x > 8, lambda x: x + 10)
exp = DataFrame([[3, 4, 5], [6, 7, 8], [19, 20, 21]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, (df + 2).mask((df + 2) > 8, (df + 2) + 10))
def test_mask_dtype_bool_conversion(self):
# GH#3733
df = DataFrame(data=np.random.randn(100, 50))
df = df.where(df > 0) # create nans
bools = df > 0
mask = isna(df)
expected = bools.astype(object).mask(mask)
result = bools.mask(mask)
tm.assert_frame_equal(result, expected)
def test_mask_try_cast_deprecated(frame_or_series):
obj = DataFrame(np.random.randn(4, 3))
if frame_or_series is not DataFrame:
obj = obj[0]
mask = obj > 0
with tm.assert_produces_warning(FutureWarning):
# try_cast keyword deprecated
obj.mask(mask, -1, try_cast=True)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2015,2016,2017,2018 Daniel Fernandez (daniel@dfmd.mx), Saul Pilatowsky (saul@dfmd.mx)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Find us at: http://www.dfmd.mx
#
# Wifi module based on:
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
import sys
if sys.version_info[0] < 3:
print('Error: The client needs to run on python3')
quit()
if len(sys.argv) > 1 and sys.argv[1] == '-setup':
from app import setup
setup.install_dependencies()
quit()
from threading import Thread
import asyncio
import logging
import os
import json
files = {}
current_path = os.path.dirname(os.path.abspath(__file__))
config_folder = os.path.join(current_path,'config')
if not os.path.isdir(config_folder): os.makedirs(config_folder)
files['data_file'] = os.path.join(config_folder,"data.json")
files['version_file'] =os.path.join(config_folder,"client_version")
files['cut_queue_folder'] = os.path.join(config_folder,"cutqueue")
files['chilipeppr_folder'] = os.path.join(current_path,"chilipeppr_serial")
files['current_path'] = current_path
with open(os.path.join(config_folder,'file_paths.json'),'w') as f: f.write(json.dumps(files))
from app import websocket_server
from app import websocket_client
from app import utils
from app import wifi_module as wifi
from app import usb_module
logger = logging.getLogger()
logPan = os.path.join(os.path.dirname( __file__ ), "log.log")
fh = logging.FileHandler(logPan)
fh.setLevel(logging.ERROR)
formatter = logging.Formatter("%(asctime)s:%(name)s:"
"%(levelname)s:%(message)s")
fh.setFormatter(formatter)
console = logging.StreamHandler(sys.stdout)
console.setLevel(logging.INFO)
console.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(console)
utils.load_data()
wifi.check_interfaces_file()
ev=asyncio.get_event_loop()
asyncio.ensure_future(websocket_server.run())
asyncio.ensure_future(websocket_client.run())
asyncio.ensure_future(usb_module.run())
ev.set_debug(False)
ev.run_forever()
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'OpinsysAuthProvider.school'
db.add_column('opinsys_opinsysauthprovider', 'school', self.gf('django.db.models.fields.CharField')(default='', max_length=200), keep_default=False)
def backwards(self, orm):
# Deleting field 'OpinsysAuthProvider.school'
db.delete_column('opinsys_opinsysauthprovider', 'school')
models = {
'dreamuserdb.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organisation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_providers'", 'to': "orm['dreamuserdb.Organisation']"})
},
'dreamuserdb.organisation': {
'Meta': {'object_name': 'Organisation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'opinsys.opinsysauthprovider': {
'Meta': {'object_name': 'OpinsysAuthProvider', '_ormbases': ['dreamuserdb.AuthProvider']},
'authprovider_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['dreamuserdb.AuthProvider']", 'unique': 'True', 'primary_key': 'True'}),
'dc': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'school': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['opinsys']
|
export default{
label:'margin-vertical',
isResponsive:true,
prefix:'my',
isStepAsLabel:true,
steps:['0','1','2','3','4','5']
}
|
module.exports=[' ','\xA0','\u1680','\u180E','\u2000','\u2001','\u2002','\u2003','\u2004','\u2005','\u2006','\u2007','\u2008','\u2009','\u200A','\u2028','\u2029','\u202F','\u205F','\u3000']
|
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var core_1 = require('@angular/core');
var range_length_1 = require('./directives/range-length');
var min_1 = require('./directives/min');
var max_1 = require('./directives/max');
var range_1 = require('./directives/range');
var digits_1 = require('./directives/digits');
var number_1 = require('./directives/number');
var url_1 = require('./directives/url');
var email_1 = require('./directives/email');
var date_1 = require('./directives/date');
var min_date_1 = require('./directives/min-date');
var max_date_1 = require('./directives/max-date');
var date_iso_1 = require('./directives/date-iso');
var credit_card_1 = require('./directives/credit-card');
var json_1 = require('./directives/json');
var base64_1 = require('./directives/base64');
var phone_1 = require('./directives/phone');
var uuid_1 = require('./directives/uuid');
var equal_1 = require('./directives/equal');
var equal_to_1 = require('./directives/equal-to');
exports.CUSTOM_FORM_DIRECTIVES = [
range_length_1.RangeLengthValidator,
min_1.MinValidator,
max_1.MaxValidator,
range_1.RangeValidator,
digits_1.DigitsValidator,
number_1.NumberValidator,
url_1.UrlValidator,
email_1.EmailValidator,
date_1.DateValidator,
min_date_1.MinDateValidator,
max_date_1.MaxDateValidator,
date_iso_1.DateISOValidator,
credit_card_1.CreditCardValidator,
json_1.JSONValidator,
base64_1.Base64Validator,
phone_1.PhoneValidator,
uuid_1.UUIDValidator,
equal_1.EqualValidator,
equal_to_1.EqualToValidator
];
var CustomFormsModule = (function () {
function CustomFormsModule() {
}
CustomFormsModule = __decorate([
core_1.NgModule({
declarations: [exports.CUSTOM_FORM_DIRECTIVES],
exports: [exports.CUSTOM_FORM_DIRECTIVES]
}),
__metadata('design:paramtypes', [])
], CustomFormsModule);
return CustomFormsModule;
}());
exports.CustomFormsModule = CustomFormsModule;
//# sourceMappingURL=directives.js.map
|
import subprocess
import argparse
def get_args():
params = argparse.ArgumentParser()
params.add_argument('--rom', help="specified modified BIOS rom", required=True)
params.add_argument('--extractor', help='specified Modified version UEFIExtract path')
return params.parse_args()
def get_digests(cmd):
# Get the string of digits
dict_digests = {}
output = subprocess.check_output(cmd, shell=True)
output = output.decode('utf-8').split('\n')
str_ibb_digest_new = list(filter(lambda a: 'BG-Protect Hash Calculated: ' in a, output))[0].split(": ")[1].lower()
bytes_ibb_digest_new = bytearray.fromhex(str_ibb_digest_new)
dict_digests['str_ibb_new'] = str_ibb_digest_new
dict_digests['bytes_ibb_new'] = bytes_ibb_digest_new
str_ibb_digest_old = list(filter(lambda a: 'BG-Protect Hash Existing: ' in a, output))[0].split(": ")[1].lower()
bytes_ibb_digest_old = bytearray.fromhex(str_ibb_digest_old)
dict_digests['str_ibb_old'] = str_ibb_digest_old
dict_digests['bytes_ibb_old'] = bytes_ibb_digest_old
str_ami_digest_new = list(filter(lambda a: 'AMI Hash Calculated: ' in a, output))[0].split(": ")[1].lower()
bytes_ami_digest_new = bytearray.fromhex(str_ami_digest_new)
dict_digests['str_ami_new'] = str_ami_digest_new
dict_digests['bytes_ami_new'] = bytes_ami_digest_new
str_ami_digest_old = list(filter(lambda a: 'AMI Hash Existing: ' in a, output))[0].split(": ")[1].lower()
bytes_ami_digest_old = bytearray.fromhex(str_ami_digest_old)
dict_digests['str_ami_old'] = str_ami_digest_old
dict_digests['bytes_ami_old'] = bytes_ami_digest_old
return dict_digests
def replace_digest(b_old_digest, b_new_digest, rom_path):
with open(rom_path, "rb") as bf:
rom_bytes = bf.read()
rom_bytes_new_crc = rom_bytes.replace(b_old_digest, b_new_digest)
with open(rom_path, "wb") as bf:
bf.write(rom_bytes_new_crc)
def main():
args = get_args()
# Build digest extract command
UEFIExtract = args.extractor
ROM_Path = args.rom
UEFIExtract_CMD = "{uefiextract_cmd} {rom} report".format(uefiextract_cmd=UEFIExtract, rom=ROM_Path)
# get digests
digest = get_digests(UEFIExtract_CMD)
while digest['str_ami_new'] != digest['str_ami_old'] or digest['str_ibb_new'] != digest['str_ibb_old']:
if digest['str_ami_new'] != digest['str_ami_old']:
print("AMI Hash should be {}, but detected as {} in rom".format(digest['str_ami_new'], digest['str_ami_old']))
print("Replacing ...")
replace_digest(digest['bytes_ami_old'], digest['bytes_ami_new'], ROM_Path)
if digest['str_ibb_new'] != digest['str_ibb_old']:
print("BG-Protected Hash should be {}, but detected as {} in rom".format(digest['str_ibb_new'], digest['str_ibb_old']))
print("Replacing ...")
replace_digest(digest['bytes_ibb_old'], digest['bytes_ibb_new'], ROM_Path)
digest = get_digests(UEFIExtract_CMD)
else:
print("AMI and BG-Protected Hash are correct!")
print("The updated BIOS file could be found at {}".format(ROM_Path))
if __name__ == '__main__':
main()
|
var structsys__fstatat__args =
[
[ "buf", "d9/d5d/structsys__fstatat__args.html#a6783f510ae899c37a1c99d55edc6461d", null ],
[ "buf_l_", "d9/d5d/structsys__fstatat__args.html#a2a2e0cc51edb4a3c726e64af9635e18b", null ],
[ "buf_r_", "d9/d5d/structsys__fstatat__args.html#a23029210abe9ad0de139937db7d40161", null ],
[ "fd", "d9/d5d/structsys__fstatat__args.html#ac79c76decad5ad13a3f3750b3822fdb5", null ],
[ "fd_l_", "d9/d5d/structsys__fstatat__args.html#ae54500cef5cdc07d3a5720d5ea9a8fa6", null ],
[ "fd_r_", "d9/d5d/structsys__fstatat__args.html#a318616b75e88c2ec33d68568e99041fc", null ],
[ "flag", "d9/d5d/structsys__fstatat__args.html#a55db89a275f8dcbbae725bd42b554bf9", null ],
[ "flag_l_", "d9/d5d/structsys__fstatat__args.html#aa327565820410bfb6aff92025aab6559", null ],
[ "flag_r_", "d9/d5d/structsys__fstatat__args.html#a7ec5b82018f89443c2ddb04e0fce2dca", null ],
[ "path", "d9/d5d/structsys__fstatat__args.html#a724d992a283dd8b6400787095031f365", null ],
[ "path_l_", "d9/d5d/structsys__fstatat__args.html#ab07df550e759339207d4f2780f87978a", null ],
[ "path_r_", "d9/d5d/structsys__fstatat__args.html#aa13a9609a08a163082084a835221f008", null ]
];
|
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os.path
import gzip
import pytest
from basic_modules.metadata import Metadata
from tool.gem_indexer import gemIndexerTool
from tool.tb_full_mapping import tbFullMappingTool
def generate_gem():
"""
Create the GEM file
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
genome_fa = resource_path + "tb.Human.GCA_000001405.22.fasta"
genome_gem_fa = resource_path + "tb.Human.GCA_000001405.22_gem.fasta"
with gzip.open(genome_fa + '.gz', 'rb') as fgz_in:
with open(genome_fa, 'wb') as f_out:
f_out.write(fgz_in.read())
genome_gem_idx = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem.gz"
input_files = {
"genome": genome_fa
}
output_files = {
"index": genome_gem_idx,
"genome_gem": genome_gem_fa
}
metadata = {
"genome": Metadata(
"Assembly", "fasta", genome_fa, None,
{'assembly': 'test'}),
}
print(input_files, output_files)
gem_it = gemIndexerTool({"execution": resource_path})
gem_it.run(input_files, metadata, output_files)
@pytest.mark.hic
def test_tb_extract_fastq():
"""
Extract the compressed FASTQ files
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
fastq_file_1 = resource_path + "tb.Human.SRR1658573_1.fastq"
fastq_file_2 = resource_path + "tb.Human.SRR1658573_2.fastq"
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
if not os.path.isfile(gem_file):
generate_gem()
with gzip.open(gem_file + '.gz', 'rb') as fgz_in:
with open(gem_file, 'w') as f_out:
f_out.write(fgz_in.read())
with gzip.open(fastq_file_1 + '.gz', 'rb') as fgz_in:
with open(fastq_file_1, 'w') as f_out:
f_out.write(fgz_in.read())
with gzip.open(fastq_file_2 + '.gz', 'rb') as fgz_in:
with open(fastq_file_2, 'w') as f_out:
f_out.write(fgz_in.read())
assert os.path.isfile(fastq_file_1) is True
assert os.path.getsize(fastq_file_1) > 0
assert os.path.isfile(fastq_file_2) is True
assert os.path.getsize(fastq_file_2) > 0
@pytest.mark.hic
def test_tb_full_mapping_frag_01():
"""
Test case to ensure that the fragment based full mapping works as expected
for the first paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_1 = resource_path + "tb.Human.SRR1658573_1.fastq"
files = [
gem_file,
fastq_file_1
]
metadata = {
'assembly': 'test',
'enzyme_name': 'MboI',
'windows': None
}
gem_file = files[1]
print(gem_file)
tfm1 = tbFullMappingTool()
tfm1_files, tfm1_meta = tfm1.run(files, [], metadata) # pylint: disable=unused-variable
map_frag = resource_path + "tb.Human.SRR1658573_1_frag.map"
map_full = resource_path + "tb.Human.SRR1658573_1_full.map"
assert os.path.isfile(map_frag) is True
assert os.path.getsize(map_frag) > 0
assert os.path.isfile(map_full) is True
assert os.path.getsize(map_full) > 0
@pytest.mark.hic
def test_tb_full_mapping_frag_02():
"""
Test case to ensure that the fragment based full mapping works as expected
for the second paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_2 = resource_path + "tb.Human.SRR1658573_2.fastq"
files = [
gem_file,
fastq_file_2
]
metadata = {
'assembly': 'test',
'enzyme_name': 'MboI',
'windows': None
}
gem_file = files[1]
print(gem_file)
tfm2 = tbFullMappingTool()
tfm2_files, tfm2_meta = tfm2.run(files, [], metadata) # pylint: disable=unused-variable
map_frag = resource_path + "tb.Human.SRR1658573_2_frag.map"
map_full = resource_path + "tb.Human.SRR1658573_2_full.map"
assert os.path.isfile(map_frag) is True
assert os.path.getsize(map_frag) > 0
assert os.path.isfile(map_full) is True
assert os.path.getsize(map_full) > 0
@pytest.mark.hic
def test_tb_full_mapping_iter_01():
"""
Test case to ensure that the iterative based full mapping works as expected
for the first paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_1 = resource_path + "tb.Human.SRR1658573_1.fastq"
files = [
gem_file,
fastq_file_1
]
metadata = {
'assembly': 'test',
# 'enzyme_name': 'MboI',
'windows': ((1, 25), (1, 50), (1, 75), (1, 100))
}
gem_file = files[1]
print(gem_file)
tfm1 = tbFullMappingTool()
tfm1_files, tfm1_meta = tfm1.run(files, [], metadata) # pylint: disable=unused-variable
map25 = resource_path + "tb.Human.SRR1658573_1_full_1-25.map"
map50 = resource_path + "tb.Human.SRR1658573_1_full_1-50.map"
map75 = resource_path + "tb.Human.SRR1658573_1_full_1-75.map"
map100 = resource_path + "tb.Human.SRR1658573_1_full_1-100.map"
assert os.path.isfile(map25) is True
assert os.path.getsize(map25) > 0
assert os.path.isfile(map50) is True
assert os.path.getsize(map50) > 0
assert os.path.isfile(map75) is True
assert os.path.getsize(map75) > 0
assert os.path.isfile(map100) is True
assert os.path.getsize(map100) > 0
@pytest.mark.hic
def test_tb_full_mapping_iter_02():
"""
Test case to ensure that the iterative based full mapping works as expected
for the second paired end
"""
resource_path = os.path.join(os.path.dirname(__file__), "data/")
gem_file = resource_path + "tb.Human.GCA_000001405.22_gem.fasta.gem"
fastq_file_2 = resource_path + "tb.Human.SRR1658573_2.fastq"
files = [
gem_file,
fastq_file_2
]
metadata = {
'assembly': 'test',
# 'enzyme_name': 'MboI',
'windows': ((1, 25), (1, 50), (1, 75), (1, 100))
}
gem_file = files[1]
print(gem_file)
tfm2 = tbFullMappingTool()
tfm2_files, tfm2_meta = tfm2.run(files, [], metadata) # pylint: disable=unused-variable
map25 = resource_path + "tb.Human.SRR1658573_2_full_1-25.map"
map50 = resource_path + "tb.Human.SRR1658573_2_full_1-50.map"
map75 = resource_path + "tb.Human.SRR1658573_2_full_1-75.map"
map100 = resource_path + "tb.Human.SRR1658573_2_full_1-100.map"
assert os.path.isfile(map25) is True
assert os.path.getsize(map25) > 0
assert os.path.isfile(map50) is True
assert os.path.getsize(map50) > 0
assert os.path.isfile(map75) is True
assert os.path.getsize(map75) > 0
assert os.path.isfile(map100) is True
assert os.path.getsize(map100) > 0
|
(self.webpackChunkdocusaurus_template=self.webpackChunkdocusaurus_template||[]).push([[28775],{3905:function(e,t,r){"use strict";r.d(t,{Zo:function(){return u},kt:function(){return m}});var n=r(67294);function o(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function i(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?a(Object(r),!0).forEach((function(t){o(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):a(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function s(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},a=Object.keys(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(n=0;n<a.length;n++)r=a[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var p=n.createContext({}),l=function(e){var t=n.useContext(p),r=t;return e&&(r="function"==typeof e?e(t):i(i({},t),e)),r},u=function(e){var t=l(e.components);return n.createElement(p.Provider,{value:t},e.children)},c={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},d=n.forwardRef((function(e,t){var r=e.components,o=e.mdxType,a=e.originalType,p=e.parentName,u=s(e,["components","mdxType","originalType","parentName"]),d=l(r),m=o,h=d["".concat(p,".").concat(m)]||d[m]||c[m]||a;return r?n.createElement(h,i(i({ref:t},u),{},{components:r})):n.createElement(h,i({ref:t},u))}));function m(e,t){var r=arguments,o=t&&t.mdxType;if("string"==typeof e||o){var a=r.length,i=new Array(a);i[0]=d;var s={};for(var p in t)hasOwnProperty.call(t,p)&&(s[p]=t[p]);s.originalType=e,s.mdxType="string"==typeof e?e:o,i[1]=s;for(var l=2;l<a;l++)i[l]=r[l];return n.createElement.apply(null,i)}return n.createElement.apply(null,r)}d.displayName="MDXCreateElement"},33814:function(e,t,r){"use strict";r.r(t),r.d(t,{frontMatter:function(){return s},contentTitle:function(){return p},metadata:function(){return l},toc:function(){return u},default:function(){return d}});var n=r(22122),o=r(19756),a=(r(67294),r(3905)),i=["components"],s={id:"sdk",title:"Software Development Kits (SDKs)",sidebar_label:"Overview"},p=void 0,l={unversionedId:"sdk",id:"version-v1.8/sdk",isDocsHomePage:!1,title:"Software Development Kits (SDKs)",description:"All SDKs use automated code generation provided by",source:"@site/versioned_docs/version-v1.8/sdk.md",sourceDirName:".",slug:"/sdk",permalink:"/hydra/docs/v1.8/sdk",editUrl:"https://github.com/ory/hydra/edit/master/docs/versioned_docs/version-v1.8/sdk.md",tags:[],version:"v1.8",lastUpdatedBy:"aeneasr",lastUpdatedAt:1601676726,formattedLastUpdatedAt:"10/2/2020",frontMatter:{id:"sdk",title:"Software Development Kits (SDKs)",sidebar_label:"Overview"},sidebar:"version-v1.8/docs",previous:{title:"Logout not Working as Expected",permalink:"/hydra/docs/v1.8/debug/logout"},next:{title:"Go",permalink:"/hydra/docs/v1.8/sdk/go"}},u=[],c={toc:u};function d(e){var t=e.components,r=(0,o.Z)(e,i);return(0,a.kt)("wrapper",(0,n.Z)({},c,r,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"All SDKs use automated code generation provided by\n",(0,a.kt)("a",{parentName:"p",href:"https://github.com/OpenAPITools/openapi-generator"},(0,a.kt)("inlineCode",{parentName:"a"},"openapi-generator")),".\nUnfortunately, ",(0,a.kt)("inlineCode",{parentName:"p"},"openapi-generator")," has serious breaking changes in the generated\ncode when upgrading versions. Therefore, we do not make backwards compatibility\npromises with regards to the generated SDKs. We hope to improve this process in\nthe future."),(0,a.kt)("p",null,"Before you check out the SDKs, head over to the ",(0,a.kt)("a",{parentName:"p",href:"/hydra/docs/v1.8/reference/api"},"REST API"),"\ndocumentation which includes code samples for common programming languages for\neach REST endpoint."),(0,a.kt)("blockquote",null,(0,a.kt)("p",{parentName:"blockquote"},"The SDKs do not provide a good API for dealing with OAuth 2.0 Flows (e.g.\nAuthorize Code Flow, Refresh Flow, ...). But there are tons of\n",(0,a.kt)("a",{parentName:"p",href:"https://oauth.net/code/"},"libraries available for consuming OAuth 2.0"),". Do not\nwrite your own OAuth 2.0 Library!")),(0,a.kt)("p",null,"We publish our SDKs for popular languages in their respective package\nrepositories:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://pypi.org/project/ory-hydra-client/"},"Python")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://packagist.org/packages/ory/hydra-client"},"PHP")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://github.com/ory/hydra-client-go"},"Go")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://www.npmjs.com/package/@oryd/hydra-client"},"NodeJS")," (with TypeScript)"),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://search.maven.org/artifact/sh.ory.hydra/hydra-client"},"Java")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"https://rubygems.org/gems/ory-hydra-client"},"Ruby"))),(0,a.kt)("p",null,"We also provide more info for these SDKs:"),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"sdk/go"},"Golang")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("a",{parentName:"li",href:"sdk/js"},"JavaScript"))),(0,a.kt)("p",null,"Missing your programming language?\n",(0,a.kt)("a",{parentName:"p",href:"https://github.com/ory/hydra/issues"},"Create an issue")," and help us build, test\nand publish the SDK for your programming language!"))}d.isMDXComponent=!0}}]);
|
#!/usr/bin/env python3
# Print out all the codons for the sequence below in reading frame 1
# Use a 'for' loop
dna = 'ATAGCGAATATCTCTCATGAGAGGGAA'
"""
print(dna[0:3])
print(dna[3:6])
print(dna[6:9])
print(dna[9:12])
"""
# For loop prints out all codons in increments of 3
#for i in range(0, 27, 3): # 27 total characters, in increments of 3
# print(dna[i:i+3])
# Nested loop
for f in range(3):
print('frame', f)
for i in range(0, len(dna) -2, 3):
codon = dna[i:i+3]
print(codon)
"""
ATA
GCG
AAT
ATC
TCT
CAT
GAG
AGG
GAA
"""
|
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
var React = __importStar(require("react"));
var react_1 = require("react");
var react_final_form_1 = require("react-final-form");
var final_form_arrays_1 = __importDefault(require("final-form-arrays"));
var useInitializeFormWithRecord_1 = __importDefault(require("./useInitializeFormWithRecord"));
var useWarnWhenUnsavedChanges_1 = __importDefault(require("./useWarnWhenUnsavedChanges"));
var sanitizeEmptyValues_1 = __importDefault(require("./sanitizeEmptyValues"));
var getFormInitialValues_1 = __importDefault(require("./getFormInitialValues"));
var FormContext_1 = __importDefault(require("./FormContext"));
var react_redux_1 = require("react-redux");
var uiActions_1 = require("../actions/uiActions");
/**
* Wrapper around react-final-form's Form to handle redirection on submit,
* legacy defaultValue prop, and array inputs.
*
* Requires a render function, just like react-final-form
*
* @example
*
* const SimpleForm = props => (
* <FormWithRedirect
* {...props}
* render={formProps => <SimpleFormView {...formProps} />}
* />
* );
*
* @typedef {Object} Props the props you can use (other props are injected by Create or Edit)
* @prop {Object} initialValues
* @prop {Function} validate
* @prop {Function} save
* @prop {boolean} submitOnEnter
* @prop {string} redirect
* @prop {boolean} sanitizeEmptyValues
*
* @param {Prop} props
*/
var FormWithRedirect = function (_a) {
var debug = _a.debug, decorators = _a.decorators, defaultValue = _a.defaultValue, destroyOnUnregister = _a.destroyOnUnregister, form = _a.form, initialValues = _a.initialValues, initialValuesEqual = _a.initialValuesEqual, _b = _a.keepDirtyOnReinitialize, keepDirtyOnReinitialize = _b === void 0 ? true : _b, _c = _a.mutators, mutators = _c === void 0 ? final_form_arrays_1.default : _c, // FIXME see https://github.com/final-form/react-final-form/issues/704 and https://github.com/microsoft/TypeScript/issues/35771
record = _a.record, render = _a.render, save = _a.save, saving = _a.saving, _d = _a.subscription, subscription = _d === void 0 ? defaultSubscription : _d, validate = _a.validate, validateOnBlur = _a.validateOnBlur, version = _a.version, warnWhenUnsavedChanges = _a.warnWhenUnsavedChanges, _e = _a.sanitizeEmptyValues, shouldSanitizeEmptyValues = _e === void 0 ? true : _e, props = __rest(_a, ["debug", "decorators", "defaultValue", "destroyOnUnregister", "form", "initialValues", "initialValuesEqual", "keepDirtyOnReinitialize", "mutators", "record", "render", "save", "saving", "subscription", "validate", "validateOnBlur", "version", "warnWhenUnsavedChanges", "sanitizeEmptyValues"]);
var redirect = react_1.useRef(props.redirect);
var onSave = react_1.useRef(save);
// We don't use state here for two reasons:
// 1. There no way to execute code only after the state has been updated
// 2. We don't want the form to rerender when redirect is changed
var setRedirect = function (newRedirect) {
redirect.current = newRedirect;
};
/**
* A form can have several Save buttons. In case the user clicks on
* a Save button with a custom onSave handler, then on a second Save button
* without custom onSave handler, the user expects the default save
* handler (the one of the Form) to be called.
* That's why the SaveButton onClick calls setOnSave() with no parameters
* if it has no custom onSave, and why this function forces a default to
* save.
*/
var setOnSave = react_1.useCallback(function (newOnSave) {
typeof newOnSave === 'function'
? (onSave.current = newOnSave)
: (onSave.current = save);
}, [save]);
var formContextValue = react_1.useMemo(function () { return ({ setOnSave: setOnSave }); }, [setOnSave]);
var finalInitialValues = getFormInitialValues_1.default(initialValues, defaultValue, record);
var submit = function (values) {
var finalRedirect = typeof redirect.current === undefined
? props.redirect
: redirect.current;
if (shouldSanitizeEmptyValues) {
var sanitizedValues = sanitizeEmptyValues_1.default(finalInitialValues, values);
onSave.current(sanitizedValues, finalRedirect);
}
else {
onSave.current(values, finalRedirect);
}
};
return (React.createElement(FormContext_1.default.Provider, { value: formContextValue },
React.createElement(react_final_form_1.Form, { key: version, debug: debug, decorators: decorators, destroyOnUnregister: destroyOnUnregister, form: form, initialValues: finalInitialValues, initialValuesEqual: initialValuesEqual, keepDirtyOnReinitialize: keepDirtyOnReinitialize, mutators: mutators, onSubmit: submit, subscription: subscription, validate: validate, validateOnBlur: validateOnBlur, render: function (formProps) { return (React.createElement(FormView, __assign({}, props, formProps, { record: record, setRedirect: setRedirect, saving: formProps.submitting || saving, render: render, save: save, warnWhenUnsavedChanges: warnWhenUnsavedChanges }))); } })));
};
var defaultSubscription = {
submitting: true,
pristine: true,
valid: true,
invalid: true,
};
var FormView = function (_a) {
var render = _a.render, warnWhenUnsavedChanges = _a.warnWhenUnsavedChanges, setRedirect = _a.setRedirect, props = __rest(_a, ["render", "warnWhenUnsavedChanges", "setRedirect"]);
// if record changes (after a getOne success or a refresh), the form must be updated
useInitializeFormWithRecord_1.default(props.record);
useWarnWhenUnsavedChanges_1.default(warnWhenUnsavedChanges);
var dispatch = react_redux_1.useDispatch();
react_1.useEffect(function () {
dispatch(uiActions_1.setAutomaticRefresh(props.pristine));
}, [dispatch, props.pristine]);
var redirect = props.redirect, handleSubmit = props.handleSubmit;
/**
* We want to let developers define the redirection target from inside the form,
* e.g. in a <SaveButton redirect="list" />.
* This callback does two things: handle submit, and change the redirection target.
* The actual redirection is done in save(), passed by the main controller.
*
* If the redirection target doesn't depend on the button clicked, it's a
* better option to define it directly on the Form component. In that case,
* using handleSubmit() instead of handleSubmitWithRedirect is fine.
*
* @example
*
* <Button onClick={() => handleSubmitWithRedirect('edit')}>
* Save and edit
* </Button>
*/
var handleSubmitWithRedirect = react_1.useCallback(function (redirectTo) {
if (redirectTo === void 0) { redirectTo = redirect; }
setRedirect(redirectTo);
handleSubmit();
}, [setRedirect, redirect, handleSubmit]);
return render(__assign(__assign({}, props), { handleSubmitWithRedirect: handleSubmitWithRedirect }));
};
exports.default = FormWithRedirect;
|
import { useSelector } from "react-redux";
import { ProgressBar as Bar } from "react-step-progress-bar";
import styled from "styled-components";
import { selectCurrentStepPercentage } from "../../app/module/selectors";
import { Constants } from "../../data/constants";
const ProgressBar = (props) => {
const percent = useSelector(selectCurrentStepPercentage);
return (
<Container>
<StyledProgressBar
{...props}
percent={percent}
filledBackground={`linear-gradient(to right, ${Constants.COLOR.TEAL}, ${Constants.COLOR.GREEN})`}
width={"90%"}
/>
</Container>
);
};
export default ProgressBar;
const StyledProgressBar = styled(Bar)`
box-shadow: 0 4% 4% rgba(0, 0, 0, 0.25);
color: white;
background-color: rgba(211, 211, 211, 0.8);
border-radius: 50%;
display: flex;
justify-content: center;
align-items: center;
`;
const Container = styled.div`
display: flex;
align-items: center;
justify-content: center;
margin-top: 10px;
`;
|
import versioneer
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.rst"), encoding="utf-8") as f:
long_description = f.read()
requires = [
"matplotlib>=2.0.0",
"pandas>=0.24",
"Cython",
"scipy<1.3",
"seaborn",
"scikit-learn>=0.21.3",
"statsmodels",
"natsort",
"anndata",
"numba",
"numpy",
"tables",
"xlsxwriter",
"loompy",
"docopt",
"setuptools",
"plotly",
"pybind11",
"joblib",
"scikit-misc",
"pyarrow",
"umap-learn>=0.3.9",
"lightgbm==2.2.1",
"python-igraph",
"MulticoreTSNE-modified",
"hnswlib",
"fisher-modified",
"louvain-github",
"leidenalg",
"forceatlas2-python",
"scplot"
]
setup(
name="pegasuspy",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description="scRNA-Seq analysis tools that scale to millions of cells",
long_description=long_description,
url="https://github.com/klarman-cell-observatory/pegasus",
author="Bo Li, Joshua Gould, Yiming Yang, Siranush Sarkizova",
author_email="sccloud@googlegroups.com, sccloud@broadinstitute.org",
classifiers=[ # https://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Framework :: Jupyter",
"Natural Language :: English",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Topic :: Scientific/Engineering :: Bio-Informatics",
],
keywords="single cell/nucleus genomics analysis",
packages=find_packages(),
install_requires=requires,
extras_require=dict(fitsne=["fitsne"], mkl=["mkl"]),
python_requires="~=3.5",
package_data={
"pegasus.annotate_cluster": [
"human_immune_cell_markers.json",
"mouse_immune_cell_markers.json",
"mouse_brain_cell_markers.json",
"human_brain_cell_markers.json",
],
"pegasus.check_sample_indexes": ["chromium-dna-sample-indexes-plate.json"],
},
entry_points={"console_scripts": ["pegasus=pegasus.__main__:main"]},
)
|
const types =
{
FIREBASE_INITIALIZED: "FIREBASE_INITIALIZED",
ATTACHED_ON_AUTH_STATE_CHANGE: "ATTACHED_ON_AUTH_STATE_CHANGE",
ON_AUTH_STATE_CHANGE: "ON_AUTH_STATE_CHANGE",
SIGN_IN: "SIGN_IN",
SIGN_UP: "SIGN_UP",
SIGN_OUT: "SIGN_OUT"
}
export default types;
|
/* Version 2.0, May 2020 */
/*jslint browser: true */
(function ($) {
"use strict";
function highlight(selector) {
$(selector).effect("highlight", {}, 2000);
}
function initToggleAnswers() {
var answerTitles = $(".admonition-answer .admonition-title");
$(".admonition-title ~ *", ".admonition-answer").toggle();
answerTitles.toggleClass("expanded");
answerTitles.on("click", function (event) {
var title = $(event.target),
content = $(".admonition-title ~ *", title.parent());
title.toggleClass("expanded");
content.slideToggle();
});
}
function initHighlightAnchoredSection() {
if (document.location.hash !== "") {
highlight(document.location.hash);
}
$('.rst-content a[href*="#"]').click(function () {
highlight($(this).attr("href"));
});
}
$(document).ready(function () {
initToggleAnswers();
initHighlightAnchoredSection();
});
}(window.jQuery));
|
import logging
import operator
import warnings
from haystack.backends import SQ
from haystack.constants import REPR_OUTPUT_SIZE, ITERATOR_LOAD_PER_QUERY, DEFAULT_OPERATOR
from haystack.exceptions import NotRegistered
class SearchQuerySet(object):
"""
Provides a way to specify search parameters and lazily load results.
Supports chaining (a la QuerySet) to narrow the search.
"""
def __init__(self, site=None, query=None):
if query is not None:
self.query = query
else:
from haystack import backend
self.query = backend.SearchQuery(site=site)
self._result_cache = []
self._result_count = None
self._cache_full = False
self._load_all = False
self._ignored_result_count = 0
self.log = logging.getLogger('haystack')
if site is not None:
self.site = site
else:
from haystack import site as main_site
self.site = main_site
def __getstate__(self):
"""
For pickling.
"""
len(self)
obj_dict = self.__dict__.copy()
obj_dict['_iter'] = None
del obj_dict['site']
obj_dict['log'] = None
return obj_dict
def __setstate__(self, dict):
"""
For unpickling.
"""
self.__dict__ = dict
from haystack import site as main_site
self.site = main_site
self.log = logging.getLogger('haystack')
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE])
if len(self) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return repr(data)
def __len__(self):
if not self._result_count:
self._result_count = self.query.get_count()
# Some backends give weird, false-y values here. Convert to zero.
if not self._result_count:
self._result_count = 0
# This needs to return the actual number of hits, not what's in the cache.
return self._result_count - self._ignored_result_count
def __iter__(self):
if self._cache_is_full():
# We've got a fully populated cache. Let Python do the hard work.
return iter(self._result_cache)
return self._manual_iter()
def __and__(self, other):
if isinstance(other, EmptySearchQuerySet):
return other._clone()
combined = self._clone()
combined.query.combine(other.query, SQ.AND)
return combined
def __or__(self, other):
combined = self._clone()
if isinstance(other, EmptySearchQuerySet):
return combined
combined.query.combine(other.query, SQ.OR)
return combined
def _cache_is_full(self):
if not self.query.has_run():
return False
if len(self) <= 0:
return True
try:
self._result_cache.index(None)
return False
except ValueError:
# No ``None``s found in the results. Check the length of the cache.
return len(self._result_cache) > 0
def _manual_iter(self):
# If we're here, our cache isn't fully populated.
# For efficiency, fill the cache as we go if we run out of results.
# Also, this can't be part of the __iter__ method due to Python's rules
# about generator functions.
current_position = 0
current_cache_max = 0
while True:
if len(self._result_cache) > 0:
try:
current_cache_max = self._result_cache.index(None)
except ValueError:
current_cache_max = len(self._result_cache)
while current_position < current_cache_max:
yield self._result_cache[current_position]
current_position += 1
if self._cache_is_full():
raise StopIteration
# We've run out of results and haven't hit our limit.
# Fill more of the cache.
if not self._fill_cache(current_position, current_position + ITERATOR_LOAD_PER_QUERY):
raise StopIteration
def _fill_cache(self, start, end, **kwargs):
# Tell the query where to start from and how many we'd like.
self.query._reset()
self.query.set_limits(start, end)
results = self.query.get_results(**kwargs)
if results == None or len(results) == 0:
return False
# Setup the full cache now that we know how many results there are.
# We need the ``None``s as placeholders to know what parts of the
# cache we have/haven't filled.
# Using ``None`` like this takes up very little memory. In testing,
# an array of 100,000 ``None``s consumed less than .5 Mb, which ought
# to be an acceptable loss for consistent and more efficient caching.
if len(self._result_cache) == 0:
self._result_cache = [None for i in xrange(self.query.get_count())]
if start is None:
start = 0
if end is None:
end = self.query.get_count()
to_cache = self.post_process_results(results)
# Assign by slice.
self._result_cache[start:start + len(to_cache)] = to_cache
return True
def post_process_results(self, results):
to_cache = []
# Check if we wish to load all objects.
if self._load_all:
original_results = []
models_pks = {}
loaded_objects = {}
# Remember the search position for each result so we don't have to resort later.
for result in results:
original_results.append(result)
models_pks.setdefault(result.model, []).append(result.pk)
# Load the objects for each model in turn.
for model in models_pks:
try:
loaded_objects[model] = self.site.get_index(model).read_queryset().in_bulk(models_pks[model])
except NotRegistered:
self.log.warning("Model not registered with search site '%s.%s'." % (self.app_label, self.model_name))
# Revert to old behaviour
loaded_objects[model] = model._default_manager.in_bulk(models_pks[model])
for result in results:
if self._load_all:
# We have to deal with integer keys being cast from strings
model_objects = loaded_objects.get(result.model, {})
if not result.pk in model_objects:
try:
result.pk = int(result.pk)
except ValueError:
pass
try:
result._object = model_objects[result.pk]
except KeyError:
# The object was either deleted since we indexed or should
# be ignored; fail silently.
self._ignored_result_count += 1
continue
to_cache.append(result)
return to_cache
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice, int, long)):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0))
or (isinstance(k, slice) and (k.start is None or k.start >= 0)
and (k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
# Remember if it's a slice or not. We're going to treat everything as
# a slice to simply the logic and will `.pop()` at the end as needed.
if isinstance(k, slice):
is_slice = True
start = k.start
if k.stop is not None:
bound = int(k.stop)
else:
bound = None
else:
is_slice = False
start = k
bound = k + 1
# We need check to see if we need to populate more of the cache.
if len(self._result_cache) <= 0 or (None in self._result_cache[start:bound] and not self._cache_is_full()):
try:
self._fill_cache(start, bound)
except StopIteration:
# There's nothing left, even though the bound is higher.
pass
# Cache should be full enough for our needs.
if is_slice:
return self._result_cache[start:bound]
else:
return self._result_cache[start]
# Methods that return a SearchQuerySet.
def all(self):
"""Returns all results for the query."""
return self._clone()
def none(self):
"""Returns all results for the query."""
return self._clone(klass=EmptySearchQuerySet)
def filter(self, *args, **kwargs):
"""Narrows the search based on certain attributes and the default operator."""
if DEFAULT_OPERATOR == 'OR':
return self.filter_or(*args, **kwargs)
else:
return self.filter_and(*args, **kwargs)
def exclude(self, *args, **kwargs):
"""Narrows the search by ensuring certain attributes are not included."""
clone = self._clone()
clone.query.add_filter(~SQ(*args, **kwargs))
return clone
def filter_and(self, *args, **kwargs):
"""Narrows the search by looking for (and including) certain attributes."""
clone = self._clone()
clone.query.add_filter(SQ(*args, **kwargs))
return clone
def filter_or(self, *args, **kwargs):
"""Narrows the search by ensuring certain attributes are not included."""
clone = self._clone()
clone.query.add_filter(SQ(*args, **kwargs), use_or=True)
return clone
def order_by(self, *args):
"""Alters the order in which the results should appear."""
clone = self._clone()
for field in args:
clone.query.add_order_by(field)
return clone
def highlight(self):
"""Adds highlighting to the results."""
clone = self._clone()
clone.query.add_highlight()
return clone
def models(self, *models):
"""Accepts an arbitrary number of Model classes to include in the search."""
clone = self._clone()
for model in models:
if not model in self.site.get_indexed_models():
warnings.warn('The model %r is not registered for search.' % model)
clone.query.add_model(model)
return clone
def result_class(self, klass):
"""
Allows specifying a different class to use for results.
Overrides any previous usages. If ``None`` is provided, Haystack will
revert back to the default ``SearchResult`` object.
"""
clone = self._clone()
clone.query.set_result_class(klass)
return clone
def boost(self, term, boost):
"""Boosts a certain aspect of the query."""
clone = self._clone()
clone.query.add_boost(term, boost)
return clone
def facet(self, field):
"""Adds faceting to a query for the provided field."""
clone = self._clone()
clone.query.add_field_facet(field)
return clone
def date_facet(self, field, start_date, end_date, gap_by, gap_amount=1):
"""Adds faceting to a query for the provided field by date."""
clone = self._clone()
clone.query.add_date_facet(field, start_date, end_date, gap_by, gap_amount=gap_amount)
return clone
def query_facet(self, field, query):
"""Adds faceting to a query for the provided field with a custom query."""
clone = self._clone()
clone.query.add_query_facet(field, query)
return clone
def narrow(self, query):
"""Pushes existing facet choices into the search."""
clone = self._clone()
clone.query.add_narrow_query(query)
return clone
def raw_search(self, query_string, **kwargs):
"""Passes a raw query directly to the backend."""
clone = self._clone()
clone.query.raw_search(query_string, **kwargs)
return clone
def load_all(self):
"""Efficiently populates the objects in the search results."""
clone = self._clone()
clone._load_all = True
return clone
def auto_query(self, query_string):
"""
Performs a best guess constructing the search query.
This method is somewhat naive but works well enough for the simple,
common cases.
"""
clone = self._clone()
# Pull out anything wrapped in quotes and do an exact match on it.
open_quote_position = None
non_exact_query = query_string
for offset, char in enumerate(query_string):
if char == '"':
if open_quote_position != None:
current_match = non_exact_query[open_quote_position + 1:offset]
if current_match:
clone = clone.filter(content=clone.query.clean(current_match))
non_exact_query = non_exact_query.replace('"%s"' % current_match, '', 1)
open_quote_position = None
else:
open_quote_position = offset
# Pseudo-tokenize the rest of the query.
keywords = non_exact_query.split()
# Loop through keywords and add filters to the query.
for keyword in keywords:
exclude = False
if keyword.startswith('-') and len(keyword) > 1:
keyword = keyword[1:]
exclude = True
cleaned_keyword = clone.query.clean(keyword)
if exclude:
clone = clone.exclude(content=cleaned_keyword)
else:
clone = clone.filter(content=cleaned_keyword)
return clone
def autocomplete(self, **kwargs):
"""
A shortcut method to perform an autocomplete search.
Must be run against fields that are either ``NgramField`` or
``EdgeNgramField``.
"""
clone = self._clone()
query_bits = []
for field_name, query in kwargs.items():
for word in query.split(' '):
bit = clone.query.clean(word.strip())
kwargs = {
field_name: bit,
}
query_bits.append(SQ(**kwargs))
return clone.filter(reduce(operator.__and__, query_bits))
# Methods that do not return a SearchQuerySet.
def count(self):
"""Returns the total number of matching results."""
return len(self)
def best_match(self):
"""Returns the best/top search result that matches the query."""
return self[0]
def latest(self, date_field):
"""Returns the most recent search result that matches the query."""
clone = self._clone()
clone.query.clear_order_by()
clone.query.add_order_by("-%s" % date_field)
return clone.best_match()
def more_like_this(self, model_instance):
"""Finds similar results to the object passed in."""
clone = self._clone()
clone.query.more_like_this(model_instance)
return clone
def facet_counts(self):
"""
Returns the facet counts found by the query.
This will cause the query to execute and should generally be used when
presenting the data.
"""
if self.query.has_run():
return self.query.get_facet_counts()
else:
clone = self._clone()
return clone.query.get_facet_counts()
def spelling_suggestion(self, preferred_query=None):
"""
Returns the spelling suggestion found by the query.
To work, you must set ``settings.HAYSTACK_INCLUDE_SPELLING`` to True.
Otherwise, ``None`` will be returned.
This will cause the query to execute and should generally be used when
presenting the data.
"""
if self.query.has_run():
return self.query.get_spelling_suggestion(preferred_query)
else:
clone = self._clone()
return clone.query.get_spelling_suggestion(preferred_query)
def values(self, *fields):
"""
Returns a list of dictionaries, each containing the key/value pairs for
the result, exactly like Django's ``ValuesQuerySet``.
"""
qs = self._clone(klass=ValuesSearchQuerySet)
qs._fields.extend(fields)
return qs
def values_list(self, *fields, **kwargs):
"""
Returns a list of field values as tuples, exactly like Django's
``QuerySet.values``.
Optionally accepts a ``flat=True`` kwarg, which in the case of a
single field being provided, will return a flat list of that field
rather than a list of tuples.
"""
flat = kwargs.pop("flat", False)
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
qs = self._clone(klass=ValuesListSearchQuerySet)
qs._fields.extend(fields)
qs._flat = flat
return qs
# Utility methods.
def _clone(self, klass=None):
if klass is None:
klass = self.__class__
query = self.query._clone()
clone = klass(site=self.site, query=query)
clone._load_all = self._load_all
return clone
class EmptySearchQuerySet(SearchQuerySet):
"""
A stubbed SearchQuerySet that behaves as normal but always returns no
results.
"""
def __len__(self):
return 0
def _cache_is_full(self):
# Pretend the cache is always full with no results.
return True
def _clone(self, klass=None):
clone = super(EmptySearchQuerySet, self)._clone(klass=klass)
clone._result_cache = []
return clone
def _fill_cache(self, start, end):
return False
def facet_counts(self):
return {}
class ValuesListSearchQuerySet(SearchQuerySet):
"""
A ``SearchQuerySet`` which returns a list of field values as tuples, exactly
like Django's ``ValuesListQuerySet``.
"""
def __init__(self, *args, **kwargs):
super(ValuesListSearchQuerySet, self).__init__(*args, **kwargs)
self._flat = False
self._fields = []
# Removing this dependency would require refactoring much of the backend
# code (_process_results, etc.) and these aren't large enough to make it
# an immediate priority:
self._internal_fields = ['id', 'django_ct', 'django_id', 'score']
def _clone(self, klass=None):
clone = super(ValuesListSearchQuerySet, self)._clone(klass=klass)
clone._fields = self._fields
clone._flat = self._flat
return clone
def _fill_cache(self, start, end):
query_fields = set(self._internal_fields)
query_fields.update(self._fields)
kwargs = {
'fields': query_fields
}
return super(ValuesListSearchQuerySet, self)._fill_cache(start, end, **kwargs)
def post_process_results(self, results):
to_cache = []
if self._flat:
accum = to_cache.extend
else:
accum = to_cache.append
for result in results:
accum([getattr(result, i, None) for i in self._fields])
return to_cache
class ValuesSearchQuerySet(ValuesListSearchQuerySet):
"""
A ``SearchQuerySet`` which returns a list of dictionaries, each containing
the key/value pairs for the result, exactly like Django's
``ValuesQuerySet``.
"""
def _fill_cache(self, start, end):
query_fields = set(self._internal_fields)
query_fields.update(self._fields)
kwargs = {
'fields': query_fields
}
return super(ValuesListSearchQuerySet, self)._fill_cache(start, end, **kwargs)
def post_process_results(self, results):
to_cache = []
for result in results:
to_cache.append(dict((i, getattr(result, i, None)) for i in self._fields))
return to_cache
class RelatedSearchQuerySet(SearchQuerySet):
"""
A variant of the SearchQuerySet that can handle `load_all_queryset`s.
This is predominantly different in the `_fill_cache` method, as it is
far less efficient but needs to fill the cache before it to maintain
consistency.
"""
_load_all_querysets = {}
_result_cache = []
def _cache_is_full(self):
return len(self._result_cache) >= len(self)
def _manual_iter(self):
# If we're here, our cache isn't fully populated.
# For efficiency, fill the cache as we go if we run out of results.
# Also, this can't be part of the __iter__ method due to Python's rules
# about generator functions.
current_position = 0
current_cache_max = 0
while True:
current_cache_max = len(self._result_cache)
while current_position < current_cache_max:
yield self._result_cache[current_position]
current_position += 1
if self._cache_is_full():
raise StopIteration
# We've run out of results and haven't hit our limit.
# Fill more of the cache.
start = current_position + self._ignored_result_count
if not self._fill_cache(start, start + ITERATOR_LOAD_PER_QUERY):
raise StopIteration
def _fill_cache(self, start, end):
# Tell the query where to start from and how many we'd like.
self.query._reset()
self.query.set_limits(start, end)
results = self.query.get_results()
if len(results) == 0:
return False
if start is None:
start = 0
if end is None:
end = self.query.get_count()
# Check if we wish to load all objects.
if self._load_all:
original_results = []
models_pks = {}
loaded_objects = {}
# Remember the search position for each result so we don't have to resort later.
for result in results:
original_results.append(result)
models_pks.setdefault(result.model, []).append(result.pk)
# Load the objects for each model in turn.
for model in models_pks:
if model in self._load_all_querysets:
# Use the overriding queryset.
loaded_objects[model] = self._load_all_querysets[model].in_bulk(models_pks[model])
else:
# Check the SearchIndex for the model for an override.
try:
index = self.site.get_index(model)
qs = index.load_all_queryset()
loaded_objects[model] = qs.in_bulk(models_pks[model])
except NotRegistered:
# The model returned doesn't seem to be registered with
# the current site. We should silently fail and populate
# nothing for those objects.
loaded_objects[model] = []
if len(results) + len(self._result_cache) < len(self) and len(results) < ITERATOR_LOAD_PER_QUERY:
self._ignored_result_count += ITERATOR_LOAD_PER_QUERY - len(results)
for result in results:
if self._load_all:
# We have to deal with integer keys being cast from strings; if this
# fails we've got a character pk.
try:
result.pk = int(result.pk)
except ValueError:
pass
try:
result._object = loaded_objects[result.model][result.pk]
except (KeyError, IndexError):
# The object was either deleted since we indexed or should
# be ignored; fail silently.
self._ignored_result_count += 1
continue
self._result_cache.append(result)
return True
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice, int, long)):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0))
or (isinstance(k, slice) and (k.start is None or k.start >= 0)
and (k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
# Remember if it's a slice or not. We're going to treat everything as
# a slice to simply the logic and will `.pop()` at the end as needed.
if isinstance(k, slice):
is_slice = True
start = k.start
if k.stop is not None:
bound = int(k.stop)
else:
bound = None
else:
is_slice = False
start = k
bound = k + 1
# We need check to see if we need to populate more of the cache.
if len(self._result_cache) <= 0 or not self._cache_is_full():
try:
while len(self._result_cache) < bound and not self._cache_is_full():
current_max = len(self._result_cache) + self._ignored_result_count
self._fill_cache(current_max, current_max + ITERATOR_LOAD_PER_QUERY)
except StopIteration:
# There's nothing left, even though the bound is higher.
pass
# Cache should be full enough for our needs.
if is_slice:
return self._result_cache[start:bound]
else:
return self._result_cache[start]
def load_all_queryset(self, model, queryset):
"""
Allows for specifying a custom ``QuerySet`` that changes how ``load_all``
will fetch records for the provided model.
This is useful for post-processing the results from the query, enabling
things like adding ``select_related`` or filtering certain data.
"""
clone = self._clone()
clone._load_all_querysets[model] = queryset
return clone
def _clone(self, klass=None):
if klass is None:
klass = self.__class__
query = self.query._clone()
clone = klass(site=self.site, query=query)
clone._load_all = self._load_all
clone._load_all_querysets = self._load_all_querysets
return clone
|
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.pci import utils as pci_utils
from nova import test
from nova.tests.unit import matchers
from nova.tests.unit.virt.libvirt import fake_libvirt_data
from nova.virt.libvirt import config
from nova.virt.libvirt import designer
class DesignerTestCase(test.NoDBTestCase):
def test_set_vif_bandwidth_config_no_extra_specs(self):
# Test whether test_set_vif_bandwidth_config_no_extra_specs fails when
# its second parameter has no 'extra_specs' field.
try:
# The conf will never be user be used, so we can use 'None'.
# An empty dictionary is fine: all that matters it that there is no
# 'extra_specs' field.
designer.set_vif_bandwidth_config(None, {})
except KeyError as e:
self.fail('KeyError: %s' % e)
def test_set_vif_guest_frontend_config(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_guest_frontend_config(conf, 'fake-mac',
'fake-model', 'fake-driver',
'fake-queues', None)
self.assertEqual('fake-mac', conf.mac_addr)
self.assertEqual('fake-model', conf.model)
self.assertEqual('fake-driver', conf.driver_name)
self.assertEqual('fake-queues', conf.vhost_queues)
self.assertIsNone(conf.vhost_rx_queue_size)
def test_set_vif_guest_frontend_config_rx_queue_size(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_guest_frontend_config(conf, 'fake-mac',
'fake-model', 'fake-driver',
'fake-queues', 1024)
self.assertEqual('fake-mac', conf.mac_addr)
self.assertEqual('fake-model', conf.model)
self.assertEqual('fake-driver', conf.driver_name)
self.assertEqual('fake-queues', conf.vhost_queues)
self.assertEqual(1024, conf.vhost_rx_queue_size)
def test_set_vif_host_backend_ethernet_config_libvirt_1_3_3(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_ethernet_config(conf, 'fake-tap')
self.assertEqual('ethernet', conf.net_type)
self.assertEqual('fake-tap', conf.target_dev)
self.assertIsNone(conf.script)
def test_set_vif_host_backend_802qbg_config(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_802qbg_config(conf, 'fake-devname',
'fake-managerid',
'fake-typeid',
'fake-typeidversion',
'fake-instanceid',
'fake-tap')
self.assertEqual('direct', conf.net_type)
self.assertEqual('fake-devname', conf.source_dev)
self.assertEqual('vepa', conf.source_mode)
self.assertEqual('802.1Qbg', conf.vporttype)
expected = [{'key': 'managerid', 'value': 'fake-managerid'},
{'key': 'typeid', 'value': 'fake-typeid'},
{'key': 'typeidversion', 'value': 'fake-typeidversion'},
{'key': 'instanceid', 'value': 'fake-instanceid'}]
self.assertThat(expected, matchers.DictListMatches(conf.vportparams))
self.assertEqual('fake-tap', conf.target_dev)
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address',
return_value='fake-devname')
def test_set_vif_host_backend_802qbh_config_direct(self,
mock_pci):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_802qbh_config(conf, 'direct',
'fake-pci-dev',
'fake-profileid',
'fake-tap')
self.assertEqual('direct', conf.net_type)
self.assertEqual('fake-devname', conf.source_dev)
self.assertEqual('passthrough', conf.source_mode)
self.assertEqual('vhost', conf.driver_name)
mock_pci.assert_called_with('fake-pci-dev')
self.assertEqual('802.1Qbh', conf.vporttype)
self.assertEqual('fake-tap', conf.target_dev)
def test_set_vif_host_backend_802qbh_config_hostdev(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_802qbh_config(conf, 'hostdev',
'fake-devname',
'fake-profileid',
'fake-tap')
self.assertEqual('hostdev', conf.net_type)
self.assertEqual('fake-devname', conf.source_dev)
self.assertIsNone(conf.model)
self.assertEqual('802.1Qbh', conf.vporttype)
self.assertEqual('fake-tap', conf.target_dev)
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address',
return_value='fake-devname')
def test_set_vif_host_backend_hw_veb_direct(self,
mock_pci):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_hw_veb(conf, 'direct',
'fake-pci-dev',
'fake-vlan',
'fake-tap')
self.assertEqual('direct', conf.net_type)
self.assertEqual('fake-devname', conf.source_dev)
self.assertEqual('passthrough', conf.source_mode)
self.assertEqual('vhost', conf.driver_name)
self.assertEqual('fake-tap', conf.target_dev)
mock_pci.assert_called_with('fake-pci-dev')
def test_set_vif_host_backend_hw_veb_hostdev(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_hw_veb(conf, 'hostdev',
'fake-devname',
'fake-vlan',
'fake-tap')
self.assertEqual('hostdev', conf.net_type)
self.assertEqual('fake-devname', conf.source_dev)
self.assertIsNone(conf.model)
self.assertEqual('fake-vlan', conf.vlan)
self.assertEqual('fake-tap', conf.target_dev)
@mock.patch.object(pci_utils, 'get_pci_address_fields',
return_value=('fake-domain', 'fake-bus',
'fake-slot', 'fake-function'))
def test_set_vif_host_backend_hostdev_pci_config(self, mock_pci_fields):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_hostdev_pci_config(conf,
'fake-pci-slot')
self.assertEqual('fake-domain', conf.domain)
self.assertEqual('fake-bus', conf.bus)
self.assertEqual('fake-slot', conf.slot)
self.assertEqual('fake-function', conf.function)
mock_pci_fields.assert_called_with('fake-pci-slot')
def test_set_vif_host_backend_direct_config(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_direct_config(conf, 'fake-devname',
mode="passthrough")
self.assertEqual('direct', conf.net_type)
self.assertEqual('fake-devname', conf.source_dev)
self.assertEqual('passthrough', conf.source_mode)
self.assertEqual('virtio', conf.model)
def test_set_vif_host_backend_vhostuser_config(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_vhostuser_config(conf, 'fake-mode',
'fake-path', None, None)
self.assertEqual('vhostuser', conf.net_type)
self.assertEqual('unix', conf.vhostuser_type)
self.assertEqual('fake-mode', conf.vhostuser_mode)
self.assertEqual('fake-path', conf.vhostuser_path)
self.assertIsNone(conf.vhost_rx_queue_size)
self.assertIsNone(conf.vhost_tx_queue_size)
def test_set_vif_host_backend_vhostuser_config_queue_size(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_vhostuser_config(conf, 'fake-mode',
'fake-path', 512, 1024)
self.assertEqual('vhostuser', conf.net_type)
self.assertEqual('unix', conf.vhostuser_type)
self.assertEqual('fake-mode', conf.vhostuser_mode)
self.assertEqual('fake-path', conf.vhostuser_path)
self.assertEqual(512, conf.vhost_rx_queue_size)
self.assertEqual(1024, conf.vhost_tx_queue_size)
def test_set_vif_host_backend_vhostuser_config_tx_queue_size(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_vhostuser_config(conf, 'fake-mode',
'fake-path', None, 1024)
self.assertEqual('vhostuser', conf.net_type)
self.assertEqual('unix', conf.vhostuser_type)
self.assertEqual('fake-mode', conf.vhostuser_mode)
self.assertEqual('fake-path', conf.vhostuser_path)
self.assertIsNone(conf.vhost_rx_queue_size)
self.assertEqual(1024, conf.vhost_tx_queue_size)
def test_set_vif_host_backend_vhostuser_config_rx_queue_size(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_host_backend_vhostuser_config(conf, 'fake-mode',
'fake-path', 512, None)
self.assertEqual('vhostuser', conf.net_type)
self.assertEqual('unix', conf.vhostuser_type)
self.assertEqual('fake-mode', conf.vhostuser_mode)
self.assertEqual('fake-path', conf.vhostuser_path)
self.assertEqual(512, conf.vhost_rx_queue_size)
self.assertIsNone(conf.vhost_tx_queue_size)
def test_set_vif_mtu_config(self):
conf = config.LibvirtConfigGuestInterface()
designer.set_vif_mtu_config(conf, 9000)
self.assertEqual(9000, conf.mtu)
def test_set_driver_iommu_for_sev(self):
conf = fake_libvirt_data.fake_kvm_guest()
# obj.devices[11]
controller = config.LibvirtConfigGuestController()
controller.type = 'virtio-serial'
controller.index = 0
conf.add_device(controller)
designer.set_driver_iommu_for_sev(conf)
# All disks/interfaces/memballoon are expected to be virtio,
# thus driver_iommu should be on
self.assertEqual(11, len(conf.devices))
for i in (0, 2, 3, 6, 8, 9, 10):
dev = conf.devices[i]
self.assertTrue(
dev.driver_iommu,
"expected device %d to have driver_iommu enabled\n%s" %
(i, dev.to_xml()))
for i in (1, 4):
dev = conf.devices[i]
self.assertFalse(
dev.driver_iommu,
"didn't expect device %i to have driver_iommu enabled\n%s" %
(i, dev.to_xml()))
|
import React, { Component } from 'react';
import { StyleSheet, View, TextInput, Dimensions } from 'react-native';
import { Button, Container, Text, Header, Content, Left, Right } from 'native-base';
import GlobalHeader from './GlobalHeader';
import ip from '../server/keys/ipstore';
import Icon from 'react-native-vector-icons/MaterialIcons';
import colors from '../constants/Colors';
import { postRequestAuthorized } from '../API';
export default class ChangePassword extends Component {
static navigationOptions = {
header: null
};
state = {
city: '',
street: '',
house_number: '',
postcode: ''
};
navigateTo = () => {
this.props.navigation.navigate('Account');
};
addAddress = () => {
const { city, street, house_number, postcode } = this.state;
postRequestAuthorized(`http://${ip}:3000/addAddress`, { city, street, house_number, postcode })
.then((responseJSON) => {
console.log(responseJSON);
switch (responseJSON.status) {
//Success
case 10:
alert('Added');
break;
//If email exist
case (1, 0):
alert('Could not add address');
break;
}
})
.catch((err) => {
console.log(err);
});
};
render() {
return (
<Container>
<GlobalHeader type={1} navigateTo={this.navigateTo} isBackButtonActive={1} />
<Content>
<View style={styles.contentContainer}>
<View style={styles.titleContainer}>
<Text style={styles.title}>Address</Text>
</View>
</View>
<View style={styles.inputContainer}>
<Icon name="location-city" size={32} style={styles.updateIcon} />
<TextInput
onChangeText={(text) => this.setState({ city: text })}
placeholder="City"
style={styles.input}
value={this.state.city}
/>
</View>
<View style={styles.inputContainer}>
<Icon name="location-city" size={32} style={styles.updateIcon} />
<TextInput
onChangeText={(text) => this.setState({ street: text })}
placeholder="Street"
style={styles.input}
value={this.state.street}
/>
</View>
<View style={styles.inputContainer}>
<Icon name="location-city" color={colors.emphasisTextColor} size={32} style={styles.updateIcon} />
<TextInput
onChangeText={(text) => this.setState({ house_number: text })}
placeholder="House Number"
style={styles.input}
value={this.state.house_number}
/>
</View>
<View style={styles.inputContainer}>
<Icon name="location-city" size={32} style={styles.updateIcon} />
<TextInput
onChangeText={(text) => this.setState({ postcode: text })}
placeholder="Postcode"
style={styles.input}
value={this.state.postcode}
/>
</View>
<View style={styles.buttonContainer}>
<Button danger style={styles.button} onPress={this.addAddress}>
<Text style={styles.buttonText}>ADD ADDRESS</Text>
</Button>
</View>
</Content>
</Container>
);
}
}
const width = '80%';
const buttonWidth = '40%';
const window = Dimensions.get('window');
const styles = StyleSheet.create({
input: {
flex: 1,
padding: 10,
color: colors.emphasisTextColor
},
inputIcons: {
width: 50,
padding: 10,
textAlign: 'center',
color: colors.emphasisTextColor
},
inputContainer: {
flexDirection: 'row',
borderBottomWidth: 2,
borderBottomColor: colors.lightBorder,
alignItems: 'center',
width,
alignSelf: 'center',
justifyContent: 'center'
},
contentContainer: {
flex: 1,
flexDirection: 'column',
alignItems: 'center',
justifyContent: 'flex-end'
},
titleContainer: {
paddingTop: 30,
paddingBottom: 5,
width
},
title: {
textAlign: 'left',
fontSize: 30,
fontWeight: 'bold',
color: colors.emphasisTextColor,
marginBottom: 20
},
buttonContainer: {
flexDirection: 'row',
alignItems: 'center',
marginTop: 30,
justifyContent: 'center'
},
button: {
width: buttonWidth,
justifyContent: 'center',
backgroundColor: colors.brandColor
},
buttontext: {
color: '#000000',
fontSize: 20
},
updateIcon: {
padding: 6,
color: colors.emphasisTextColor,
}
});
|
/*
This file is part of web3.js.
web3.js is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
web3.js is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with web3.js. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @file utils.js
* @author Marek Kotewicz <marek@ethdev.com>
* @date 2015
*/
/**
* Utils
*
* @module utils
*/
/**
* Utility functions
*
* @class [utils] utils
* @constructor
*/
var BigNumber = require('bignumber.js');
var sha3 = require('./sha3.js');
var utf8 = require('utf8');
var unitMap = {
'noether': '0',
'wei': '1',
'kwei': '1000',
'Kwei': '1000',
'babbage': '1000',
'femtoether': '1000',
'mwei': '1000000',
'Mwei': '1000000',
'lovelace': '1000000',
'picoether': '1000000',
'gwei': '1000000000',
'Gwei': '1000000000',
'shannon': '1000000000',
'nanoether': '1000000000',
'nano': '1000000000',
'szabo': '1000000000000',
'microether': '1000000000000',
'micro': '1000000000000',
'finney': '1000000000000000',
'milliether': '1000000000000000',
'milli': '1000000000000000',
'ether': '1000000000000000000',
'kether': '1000000000000000000000',
'grand': '1000000000000000000000',
'mether': '1000000000000000000000000',
'gether': '1000000000000000000000000000',
'tether': '1000000000000000000000000000000'
};
/**
* Should be called to pad string to expected length
*
* @method padLeft
* @param {String} string to be padded
* @param {Number} characters that result string should have
* @param {String} sign, by default 0
* @returns {String} right aligned string
*/
var padLeft = function (string, chars, sign) {
return new Array(chars - string.length + 1).join(sign ? sign : "0") + string;
};
/**
* Should be called to pad string to expected length
*
* @method padRight
* @param {String} string to be padded
* @param {Number} characters that result string should have
* @param {String} sign, by default 0
* @returns {String} right aligned string
*/
var padRight = function (string, chars, sign) {
return string + (new Array(chars - string.length + 1).join(sign ? sign : "0"));
};
/**
* Should be called to get utf8 from it's hex representation
*
* @method toUtf8
* @param {String} string in hex
* @returns {String} ascii string representation of hex value
*/
var toUtf8 = function(hex) {
// Find termination
var str = "";
var i = 0, l = hex.length;
if (hex.substring(0, 2) === '0x') {
i = 2;
}
for (; i < l; i+=2) {
var code = parseInt(hex.substr(i, 2), 16);
if (code === 0)
break;
str += String.fromCharCode(code);
}
return utf8.decode(str);
};
/**
* Should be called to get ascii from it's hex representation
*
* @method toAscii
* @param {String} string in hex
* @returns {String} ascii string representation of hex value
*/
var toAscii = function(hex) {
// Find termination
var str = "";
var i = 0, l = hex.length;
if (hex.substring(0, 2) === '0x') {
i = 2;
}
for (; i < l; i+=2) {
var code = parseInt(hex.substr(i, 2), 16);
str += String.fromCharCode(code);
}
return str;
};
/**
* Should be called to get hex representation (prefixed by 0x) of utf8 string
*
* @method fromUtf8
* @param {String} string
* @param {Boolean} allowZero to convert code point zero to 00 instead of end of string
* @returns {String} hex representation of input string
*/
var fromUtf8 = function(str, allowZero) {
str = utf8.encode(str);
var hex = "";
for(var i = 0; i < str.length; i++) {
var code = str.charCodeAt(i);
if (code === 0) {
if (allowZero) {
hex += '00';
} else {
break;
}
} else {
var n = code.toString(16);
hex += n.length < 2 ? '0' + n : n;
}
}
return "0x" + hex;
};
/**
* Should be called to get hex representation (prefixed by 0x) of ascii string
*
* @method fromAscii
* @param {String} string
* @param {Number} optional padding
* @returns {String} hex representation of input string
*/
var fromAscii = function(str, num) {
var hex = "";
for(var i = 0; i < str.length; i++) {
var code = str.charCodeAt(i);
var n = code.toString(16);
hex += n.length < 2 ? '0' + n : n;
}
return "0x" + hex.padEnd(num,'0');
};
/**
* Should be used to create full function/event name from json abi
*
* @method transformToFullName
* @param {Object} json-abi
* @return {String} full fnction/event name
*/
var transformToFullName = function (json) {
if (json.name.indexOf('(') !== -1) {
return json.name;
}
var typeName = json.inputs.map(function(i){return i.type; }).join();
return json.name + '(' + typeName + ')';
};
/**
* Should be called to get display name of contract function
*
* @method extractDisplayName
* @param {String} name of function/event
* @returns {String} display name for function/event eg. multiply(uint256) -> multiply
*/
var extractDisplayName = function (name) {
var stBracket = name.indexOf('(');
var endBracket = name.indexOf(')');
return (stBracket !== -1 && endBracket !== -1) ? name.substr(0, stBracket) : name;
};
/**
* Should be called to get type name of contract function
*
* @method extractTypeName
* @param {String} name of function/event
* @returns {String} type name for function/event eg. multiply(uint256) -> uint256
*/
var extractTypeName = function (name) {
var stBracket = name.indexOf('(');
var endBracket = name.indexOf(')');
return (stBracket !== -1 && endBracket !== -1) ? name.substr(stBracket + 1, endBracket - stBracket - 1).replace(' ', '') : "";
};
/**
* Converts value to it's decimal representation in string
*
* @method toDecimal
* @param {String|Number|BigNumber}
* @return {String}
*/
var toDecimal = function (value) {
return toBigNumber(value).toNumber();
};
/**
* Converts value to it's hex representation
*
* @method fromDecimal
* @param {String|Number|BigNumber}
* @return {String}
*/
var fromDecimal = function (value) {
var number = toBigNumber(value);
var result = number.toString(16);
return number.lessThan(0) ? '-0x' + result.substr(1) : '0x' + result;
};
/**
* Auto converts any given value into it's hex representation.
*
* And even stringifys objects before.
*
* @method toHex
* @param {String|Number|BigNumber|Object}
* @return {String}
*/
var toHex = function (val) {
/*jshint maxcomplexity: 8 */
if (isBoolean(val))
return fromDecimal(+val);
if (isBigNumber(val))
return fromDecimal(val);
if (typeof val === 'object')
return fromUtf8(JSON.stringify(val));
// if its a negative number, pass it through fromDecimal
if (isString(val)) {
if (val.indexOf('-0x') === 0)
return fromDecimal(val);
else if(val.indexOf('0x') === 0)
return val;
else if (!isFinite(val))
return fromUtf8(val,1);
}
return fromDecimal(val);
};
/**
* Returns value of unit in Wei
*
* @method getValueOfUnit
* @param {String} unit the unit to convert to, default ether
* @returns {BigNumber} value of the unit (in Wei)
* @throws error if the unit is not correct:w
*/
var getValueOfUnit = function (unit) {
unit = unit ? unit.toLowerCase() : 'ether';
var unitValue = unitMap[unit];
if (unitValue === undefined) {
throw new Error('This unit doesn\'t exists, please use the one of the following units' + JSON.stringify(unitMap, null, 2));
}
return new BigNumber(unitValue, 10);
};
/**
* Takes a number of wei and converts it to any other ether unit.
*
* Possible units are:
* SI Short SI Full Effigy Other
* - kwei femtoether babbage
* - mwei picoether lovelace
* - gwei nanoether shannon nano
* - -- microether szabo micro
* - -- milliether finney milli
* - ether -- --
* - kether -- grand
* - mether
* - gether
* - tether
*
* @method fromWei
* @param {Number|String} number can be a number, number string or a HEX of a decimal
* @param {String} unit the unit to convert to, default ether
* @return {String|Object} When given a BigNumber object it returns one as well, otherwise a number
*/
var fromWei = function(number, unit) {
var returnValue = toBigNumber(number).dividedBy(getValueOfUnit(unit));
return isBigNumber(number) ? returnValue : returnValue.toString(10);
};
/**
* Takes a number of a unit and converts it to wei.
*
* Possible units are:
* SI Short SI Full Effigy Other
* - kwei femtoether babbage
* - mwei picoether lovelace
* - gwei nanoether shannon nano
* - -- microether szabo micro
* - -- milliether finney milli
* - ether -- --
* - kether -- grand
* - mether
* - gether
* - tether
*
* @method toWei
* @param {Number|String|BigNumber} number can be a number, number string or a HEX of a decimal
* @param {String} unit the unit to convert from, default ether
* @return {String|Object} When given a BigNumber object it returns one as well, otherwise a number
*/
var toWei = function(number, unit) {
var returnValue = toBigNumber(number).times(getValueOfUnit(unit));
return isBigNumber(number) ? returnValue : returnValue.toString(10);
};
/**
* Takes an input and transforms it into an bignumber
*
* @method toBigNumber
* @param {Number|String|BigNumber} a number, string, HEX string or BigNumber
* @return {BigNumber} BigNumber
*/
var toBigNumber = function(number) {
/*jshint maxcomplexity:5 */
number = number || 0;
if (isBigNumber(number))
return number;
if (isString(number) && (number.indexOf('0x') === 0 || number.indexOf('-0x') === 0)) {
return new BigNumber(number.replace('0x',''), 16);
}
return new BigNumber(number.toString(10), 10);
};
/**
* Takes and input transforms it into bignumber and if it is negative value, into two's complement
*
* @method toTwosComplement
* @param {Number|String|BigNumber}
* @return {BigNumber}
*/
var toTwosComplement = function (number) {
var bigNumber = toBigNumber(number).round();
if (bigNumber.lessThan(0)) {
return new BigNumber("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", 16).plus(bigNumber).plus(1);
}
return bigNumber;
};
/**
* Checks if the given string is strictly an address
*
* @method isStrictAddress
* @param {String} address the given HEX adress
* @return {Boolean}
*/
var isStrictAddress = function (address) {
return /^0x[0-9a-f]{40}$/i.test(address);
};
/**
* Checks if the given string is an address
*
* @method isAddress
* @param {String} address the given HEX adress
* @return {Boolean}
*/
var isAddress = function (address) {
if (!/^(0x)?[0-9a-f]{40}$/i.test(address)) {
// check if it has the basic requirements of an address
return false;
} else if (/^(0x)?[0-9a-f]{40}$/.test(address) || /^(0x)?[0-9A-F]{40}$/.test(address)) {
// If it's all small caps or all all caps, return true
return true;
} else {
// Otherwise check each case
return isChecksumAddress(address);
}
};
/**
* Checks if the given string is a checksummed address
*
* @method isChecksumAddress
* @param {String} address the given HEX adress
* @return {Boolean}
*/
var isChecksumAddress = function (address) {
// Check each case
address = address.replace('0x','');
var addressHash = sha3(address.toLowerCase());
for (var i = 0; i < 40; i++ ) {
// the nth letter should be uppercase if the nth digit of casemap is 1
if ((parseInt(addressHash[i], 16) > 7 && address[i].toUpperCase() !== address[i]) || (parseInt(addressHash[i], 16) <= 7 && address[i].toLowerCase() !== address[i])) {
return false;
}
}
return true;
};
/**
* Makes a checksum address
*
* @method toChecksumAddress
* @param {String} address the given HEX adress
* @return {String}
*/
var toChecksumAddress = function (address) {
if (typeof address === 'undefined') return '';
address = address.toLowerCase().replace('0x','');
var addressHash = sha3(address);
var checksumAddress = '0x';
for (var i = 0; i < address.length; i++ ) {
// If ith character is 9 to f then make it uppercase
if (parseInt(addressHash[i], 16) > 7) {
checksumAddress += address[i].toUpperCase();
} else {
checksumAddress += address[i];
}
}
return checksumAddress;
};
/**
* Transforms given string to valid 20 bytes-length addres with 0x prefix
*
* @method toAddress
* @param {String} address
* @return {String} formatted address
*/
var toAddress = function (address) {
if (isStrictAddress(address)) {
return address;
}
if (/^[0-9a-f]{40}$/.test(address)) {
return '0x' + address;
}
return '0x' + padLeft(toHex(address).substr(2), 40);
};
/**
* Returns true if object is BigNumber, otherwise false
*
* @method isBigNumber
* @param {Object}
* @return {Boolean}
*/
var isBigNumber = function (object) {
return object instanceof BigNumber ||
(object && object.constructor && object.constructor.name === 'BigNumber');
};
/**
* Returns true if object is string, otherwise false
*
* @method isString
* @param {Object}
* @return {Boolean}
*/
var isString = function (object) {
return typeof object === 'string' ||
(object && object.constructor && object.constructor.name === 'String');
};
/**
* Returns true if object is function, otherwise false
*
* @method isFunction
* @param {Object}
* @return {Boolean}
*/
var isFunction = function (object) {
return typeof object === 'function';
};
/**
* Returns true if object is Objet, otherwise false
*
* @method isObject
* @param {Object}
* @return {Boolean}
*/
var isObject = function (object) {
return object !== null && !(Array.isArray(object)) && typeof object === 'object';
};
/**
* Returns true if object is boolean, otherwise false
*
* @method isBoolean
* @param {Object}
* @return {Boolean}
*/
var isBoolean = function (object) {
return typeof object === 'boolean';
};
/**
* Returns true if object is array, otherwise false
*
* @method isArray
* @param {Object}
* @return {Boolean}
*/
var isArray = function (object) {
return Array.isArray(object);
};
/**
* Returns true if given string is valid json object
*
* @method isJson
* @param {String}
* @return {Boolean}
*/
var isJson = function (str) {
try {
return !!JSON.parse(str);
} catch (e) {
return false;
}
};
/**
* Returns true if given string is a valid Ethereum block header bloom.
*
* @method isBloom
* @param {String} hex encoded bloom filter
* @return {Boolean}
*/
var isBloom = function (bloom) {
if (!/^(0x)?[0-9a-f]{512}$/i.test(bloom)) {
return false;
} else if (/^(0x)?[0-9a-f]{512}$/.test(bloom) || /^(0x)?[0-9A-F]{512}$/.test(bloom)) {
return true;
}
return false;
};
/**
* Returns true if given string is a valid log topic.
*
* @method isTopic
* @param {String} hex encoded topic
* @return {Boolean}
*/
var isTopic = function (topic) {
if (!/^(0x)?[0-9a-f]{64}$/i.test(topic)) {
return false;
} else if (/^(0x)?[0-9a-f]{64}$/.test(topic) || /^(0x)?[0-9A-F]{64}$/.test(topic)) {
return true;
}
return false;
};
module.exports = {
padLeft: padLeft,
padRight: padRight,
toHex: toHex,
toDecimal: toDecimal,
fromDecimal: fromDecimal,
toUtf8: toUtf8,
toAscii: toAscii,
fromUtf8: fromUtf8,
fromAscii: fromAscii,
transformToFullName: transformToFullName,
extractDisplayName: extractDisplayName,
extractTypeName: extractTypeName,
toWei: toWei,
fromWei: fromWei,
toBigNumber: toBigNumber,
toTwosComplement: toTwosComplement,
toAddress: toAddress,
isBigNumber: isBigNumber,
isStrictAddress: isStrictAddress,
isAddress: isAddress,
isChecksumAddress: isChecksumAddress,
toChecksumAddress: toChecksumAddress,
isFunction: isFunction,
isString: isString,
isObject: isObject,
isBoolean: isBoolean,
isArray: isArray,
isJson: isJson,
isBloom: isBloom,
isTopic: isTopic,
};
|
/*
* ATTENTION: The "eval" devtool has been used (maybe by default in mode: "development").
* This devtool is neither made for production nor for readable output files.
* It uses "eval()" calls to create a separate source file in the browser devtools.
* If you are trying to read the output file, select a different devtool (https://webpack.js.org/configuration/devtool/)
* or disable the default devtool with "devtool: false".
* If you are looking for production-ready output files, see mode: "production" (https://webpack.js.org/configuration/mode/).
*/
/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
/***/ "./src/App.tsx":
/*!*********************!*\
!*** ./src/App.tsx ***!
\*********************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
eval("\n\nvar _interopRequireDefault = __webpack_require__(/*! @babel/runtime/helpers/interopRequireDefault */ \"../../node_modules/@babel/runtime/helpers/interopRequireDefault.js\");\n\nvar _interopRequireWildcard = __webpack_require__(/*! @babel/runtime/helpers/interopRequireWildcard */ \"../../node_modules/@babel/runtime/helpers/interopRequireWildcard.js\");\n\nObject.defineProperty(exports, \"__esModule\", ({\n value: true\n}));\nexports.default = void 0;\n\nvar React = _interopRequireWildcard(__webpack_require__(/*! react */ \"../../node_modules/react/index.js\"));\n\nvar _stylex = _interopRequireDefault(__webpack_require__(/*! @ladifire-opensource/stylex */ \"../stylex/index.js\"));\n/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\n\nvar styles = {\n root: {\n color: \"jab0vpmp\",\n paddingTop: \"aaazhgvu\",\n paddingRight: \"jaav1msn\",\n paddingBottom: \"aabi532w\",\n paddingLeft: \"y54c4hc0\",\n backgroundColor: \"a55dalm2\"\n }\n};\n\nvar _default = function _default() {\n return /*#__PURE__*/React.createElement(\"div\", {\n className: (0, _stylex[\"default\"])(styles.root)\n }, \"Wellcome to Stylex\");\n};\n\nexports.default = _default;\n\n_stylex[\"default\"].inject(\".jab0vpmp{color:blue}\");\n\n_stylex[\"default\"].inject(\".aaazhgvu{padding-top:4px}\");\n\n_stylex[\"default\"].inject(\".jaav1msn{padding-right:4px}\");\n\n_stylex[\"default\"].inject(\".aabi532w{padding-bottom:4px}\");\n\n_stylex[\"default\"].inject(\".y54c4hc0{padding-left:4px}\");\n\n_stylex[\"default\"].inject(\".a55dalm2{background-color:red}\");\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/./src/App.tsx?");
/***/ }),
/***/ "./src/index.tsx":
/*!***********************!*\
!*** ./src/index.tsx ***!
\***********************/
/***/ ((__unused_webpack_module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\n\nvar _interopRequireDefault = __webpack_require__(/*! @babel/runtime/helpers/interopRequireDefault */ \"../../node_modules/@babel/runtime/helpers/interopRequireDefault.js\");\n\nvar _interopRequireWildcard = __webpack_require__(/*! @babel/runtime/helpers/interopRequireWildcard */ \"../../node_modules/@babel/runtime/helpers/interopRequireWildcard.js\");\n\nvar React = _interopRequireWildcard(__webpack_require__(/*! react */ \"../../node_modules/react/index.js\"));\n\nvar ReactDOM = _interopRequireWildcard(__webpack_require__(/*! react-dom */ \"../../node_modules/react-dom/index.js\"));\n\nvar _App = _interopRequireDefault(__webpack_require__(/*! ./App */ \"./src/App.tsx\"));\n\n/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\nfunction renderRootComponent() {\n ReactDOM.render( /*#__PURE__*/React.createElement(_App[\"default\"], null), document.getElementById('root'));\n} // This is for anything that needs to be done for ALL react components.\n// This runs before we start to render anything.\n\n\nfunction preRenderSetup(callwhendone) {\n window.onerror = function (msg, url, line, column, stack) {\n if (msg === 'ResizeObserver loop limit exceeded') {\n return;\n }\n\n var l = {};\n l.level = 'ERROR';\n l.message = 'msg: ' + msg + ' row: ' + line + ' col: ' + column + ' stack: ' + stack + ' url: ' + url;\n var req = new XMLHttpRequest();\n req.open('POST', '/api/v1/logs');\n req.setRequestHeader('Content-Type', 'application/json');\n req.send(JSON.stringify(l));\n };\n\n callwhendone();\n}\n/**\n * Adds a function to be invoked onload appended to any existing onload\n * event handlers.\n *\n * @param {function} fn onload event handler\n *\n */\n\n\nfunction appendOnLoadEvent(fn) {\n // @ts-ignore\n if (window.attachEvent) {\n // @ts-ignore\n window.attachEvent('onload', fn);\n } else if (window.onload) {\n var curronload = window.onload;\n\n window.onload = function (evt) {\n // @ts-ignore\n curronload(evt);\n fn(evt);\n };\n } else {\n window.onload = fn;\n }\n}\n\nappendOnLoadEvent(function () {\n // Do the pre-render setup and call renderRootComponent when done\n preRenderSetup(renderRootComponent);\n});\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/./src/index.tsx?");
/***/ }),
/***/ "../stylex-theme/index.js":
/*!********************************!*\
!*** ../stylex-theme/index.js ***!
\********************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nmodule.exports = __webpack_require__(/*! ./src */ \"../stylex-theme/src/index.js\");\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex-theme/index.js?");
/***/ }),
/***/ "../stylex-theme/src/CometStyleXSheet.js":
/*!***********************************************!*\
!*** ../stylex-theme/src/CometStyleXSheet.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nconst StylexSheet = __webpack_require__(/*! ./StyleXSheet */ \"../stylex-theme/src/StyleXSheet.js\");\n\nclass _CometStyleXSheet extends StylexSheet {\n constructor(props = {}) {\n super(props);\n\n this.rootTheme = props.rootTheme || {};\n this.rootDarkTheme = props.rootDarkTheme || {};\n\n this.injectThemeVariables = function(data, themeKey = \"root\") {\n console.log(\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\", data);\n if (themeKey === \"root\") {\n this.rootTheme = Object.assign(this.rootTheme, data);\n } else {\n\n }\n }\n }\n}\n\nmodule.exports = {\n CometStyleXSheet: _CometStyleXSheet,\n rootStyleSheet: new _CometStyleXSheet(),\n};\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex-theme/src/CometStyleXSheet.js?");
/***/ }),
/***/ "../stylex-theme/src/StyleXSheet.js":
/*!******************************************!*\
!*** ../stylex-theme/src/StyleXSheet.js ***!
\******************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nconst ExecutionEnvironment = __webpack_require__(/*! ./utils */ \"../stylex-theme/src/utils.js\");\n\nconst _LIGHT_MODE_CLASS_NAME = \"light\";\nconst _DARK_MODE_CLASS_NAME = \"dark\";\n\nfunction j(a, b) {\n const c = [];\n c.push(a + \" {\");\n for (const d in b) {\n a = b[d];\n c.push(\" --\" + d + \": \" + a + \";\")\n }\n c.push(\"}\");\n return c.join(\"\\n\")\n}\n\nfunction k() {\n const a = document.createElement(\"style\");\n a.setAttribute(\"type\", \"text/css\");\n a.setAttribute(\"data-styled\", \"true\");\n var b = document.head || document.getElementsByTagName(\"head\")[0];\n b || console.warn(\"Error here!\");\n b.appendChild(a);\n return a\n}\n\nfunction l() {\n return window.CSS != null && window.CSS.supports != null && window.CSS.supports(\"--fake-var:0\")\n}\nconst m = /var\\(--(.*?)\\)/g;\n\nclass StyleXSheet {\n constructor(props = {}) {\n let c;\n this.tag = null;\n this.injected = false;\n this.ruleForPriority = new Map();\n this.rules = [];\n this.rootTheme = props.rootTheme;\n this.rootDarkTheme = props.rootDarkTheme;\n this.isSlow = (c = props.isSlow) != null ? c : typeof location === \"object\" && typeof location.search === \"string\" ? location.search.includes(\"stylex-slow\") : !1;\n this.supportsVariables = (c = props.supportsVariables) != null ? c : l();\n this._isRTL = false; // b(\"Locale\").isRTL();\n this.externalRules = new Set()\n }\n\n getVariableMatch() {\n return m\n }\n\n isHeadless() {\n return this.tag == null || !ExecutionEnvironment.canUseDOM\n }\n\n getTag() {\n let a = this.tag;\n a != null || /*g(0, 11103)*/console.log(\"xxxxxxxxxxxxxxxxxxxxxx\");\n return a\n }\n\n getCSS() {\n return this.rules.join(\"\\n\")\n }\n\n getRulePosition(a) {\n return this.rules.indexOf(a)\n }\n\n getRuleCount() {\n return this.rules.length\n }\n\n inject() {\n if (this.injected) return;\n this.injected = !0;\n if (!ExecutionEnvironment.canUseDOM) {\n this.injectTheme();\n return\n }\n this.tag = k();\n this.injectTheme()\n }\n\n injectTheme() {\n this.rootTheme != null && this.insert(j(\":root, .\" + _LIGHT_MODE_CLASS_NAME, this.rootTheme), 0), this.rootDarkTheme != null && this.insert(j(\".\" + _DARK_MODE_CLASS_NAME + \":root, .\" + _DARK_MODE_CLASS_NAME, this.rootDarkTheme), 0)\n }\n\n __injectCustomThemeForTesting(a, b) {\n b != null && this.insert(j(a, b), 0)\n }\n\n delete(a) {\n var b = this.rules.indexOf(a);\n b >= 0 || /*g(0, 2656, a)*/console.log(\"xxxxxxxxxxxxxx\");\n this.rules.splice(b, 1);\n if (this.isHeadless()) return;\n a = this.getTag();\n if (this.isSlow) a.removeChild(a.childNodes[b + 1]);\n else {\n a = a.sheet;\n a || /*g(0, 2657)*/console.log(\"xxxxxxxxxxxxxx\");\n a.deleteRule(b)\n }\n }\n\n normalizeRule(a) {\n var b = this.rootTheme;\n return this.supportsVariables || b == null ? a : a.replace(m, function (a, c) {\n return b[c]\n })\n }\n\n getInsertPositionForPriority(a) {\n var b = this.ruleForPriority.get(a);\n if (b != null) return this.rules.indexOf(b) + 1;\n b = Array.from(this.ruleForPriority.keys()).sort(function (a, b) {\n return b - a\n }).filter(function (b) {\n return b > a ? 1 : 0\n });\n if (b.length === 0) return this.getRuleCount();\n b = b.pop();\n return this.rules.indexOf(this.ruleForPriority.get(b))\n }\n\n insert(a, b, c) {\n this.injected === !1 && this.inject();\n c = this._isRTL && c != null ? c : a;\n if (this.externalRules.has(c.slice(0, c.indexOf(\"{\")).trim())) return;\n if (this.rules.includes(c)) return;\n a = this.normalizeRule(c);\n if (this.externalRules.has(a.slice(0, a.indexOf(\"{\")).trim())) return;\n c = this.getInsertPositionForPriority(b);\n this.rules.splice(c, 0, a);\n this.ruleForPriority.set(b, a);\n if (this.isHeadless()) return;\n b = this.getTag();\n if (this.isSlow) {\n var d = document.createTextNode(a);\n b.insertBefore(d, b.childNodes[c])\n } else {\n d = b.sheet;\n if (d != null) try {\n d.insertRule(a, c)\n } catch (a) {}\n }\n }\n}\n\nmodule.exports = StyleXSheet;\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex-theme/src/StyleXSheet.js?");
/***/ }),
/***/ "../stylex-theme/src/index.js":
/*!************************************!*\
!*** ../stylex-theme/src/index.js ***!
\************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nconst CometStylexSheet = __webpack_require__(/*! ./CometStyleXSheet */ \"../stylex-theme/src/CometStyleXSheet.js\");\nconst StyleXSheet = __webpack_require__(/*! ./StyleXSheet */ \"../stylex-theme/src/StyleXSheet.js\");\n\nmodule.exports = CometStylexSheet;\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex-theme/src/index.js?");
/***/ }),
/***/ "../stylex-theme/src/utils.js":
/*!************************************!*\
!*** ../stylex-theme/src/utils.js ***!
\************************************/
/***/ ((module) => {
eval("/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nconst canUseDOM = !!(\n typeof window !== 'undefined' &&\n window.document &&\n window.document.createElement\n);\n\nconst ExecutionEnvironment = {\n canUseDOM: canUseDOM,\n canUseWorkers: typeof Worker !== 'undefined',\n canUseEventListeners: canUseDOM && !!(window.addEventListener || window.attachEvent),\n canUseViewport: canUseDOM && !!window.screen\n};\n\nmodule.exports = ExecutionEnvironment;\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex-theme/src/utils.js?");
/***/ }),
/***/ "../stylex/index.js":
/*!**************************!*\
!*** ../stylex/index.js ***!
\**************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/*\n * Copyright 2020-present Ladifire & Ladifire open source team. All rights reserved.\n * This file is licensed to you under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License. You may obtain a copy\n * of the License at http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software distributed under\n * the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS\n * OF ANY KIND, either express or implied. See the License for the specific language\n * governing permissions and limitations under the License.\n */\n\nmodule.exports = __webpack_require__(/*! ./stylex.js */ \"../stylex/stylex.js\");\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex/index.js?");
/***/ }),
/***/ "../stylex/stylex.js":
/*!***************************!*\
!*** ../stylex/stylex.js ***!
\***************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Copyright (c) Ladifire, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n */\n\nconst CometStyleXSheet = __webpack_require__(/*! @ladifire-opensource/stylex-theme */ \"../stylex-theme/index.js\");\n\nconsole.log(\"CometStyleXSheet\", CometStyleXSheet);\n\nCometStyleXSheet.rootStyleSheet.injectTheme();\nconsole.log(\"\\n\\n\\n111\", CometStyleXSheet.rootStyleSheet);\nCometStyleXSheet.rootStyleSheet.injectThemeVariables({\n \"--black\": \"rgba(0, 0, 0, 0)\"\n});\n\nvar g = false;\n\nfunction h(a) {\n a = a.reverse();\n const b = {};\n while (a.length) {\n var c = a.pop();\n if (Array.isArray(c)) {\n for (var d = c.length - 1; d >= 0; d--) a.push(c[d]);\n continue\n }\n d = c;\n if (d != null && typeof d === \"object\")\n for (var e in d) {\n c = d[e];\n if (typeof c === \"string\") b[e] = c;\n else if (typeof c === \"object\") {\n var f;\n b[e] = (f = b[e]) != null ? f : {};\n Object.assign(b[e], c)\n }\n }\n }\n return b\n}\n\nfunction stylex(...args) {\n for (var a = arguments.length, b = new Array(a), c = 0; c < a; c++) b[c] = arguments[c];\n var d = h(b),\n e = \"\";\n for (var f in d)\n if (Boolean(d[f]))\n if (typeof d[f] === \"string\") e += e ? \" \" + d[f] : d[f];\n else if (typeof d[f] === \"object\") {\n var g = d[f];\n for (var i in g) {\n var j = g[i];\n e += e ? \" \" + j : j\n }\n }\n return e\n}\n\n/**\n * Create an stylex object, this is done by compiled and will caused error\n * if it exits in runtime code\n *\n * e.g:\n * const styles = stylex.create({\n * button: {\n * color: \"var(--accent)\",\n * backgroundColor: \"var(--secondary-color)\",\n * ...\n * },\n * })\n *\n * */\nstylex.create = function(...args) {\n throw new Error(\"stylex.create should never be called. It should be compiled away.\")\n};\n\n/**\n * Override a style property of style object, given by logic condition\n * It's mean: If true => color = \"red\", otherwise color = \"blue\"\n *\n * e.g:\n *\n * const {color} = props;\n *\n * return (\n * <div\n * className={stylex.dedupe(\n * {\n * position: \"relative\",\n * color: \"red\",\n * },\n * color === \"blue\" ? {\n * color: \"red\",\n * } : {},\n * )}\n * >\n * Component\n * </div>\n * )\n * */\nstylex.dedupe = function() {\n return stylex.apply(undefined, arguments)\n};\n\n/**\n * Compose multiple styles object into one\n * */\nstylex.compose = function () {\n for (var a = arguments.length, b = new Array(a), c = 0; c < a; c++) b[c] = arguments[c];\n return h(b)\n};\n\n/**\n * Create a keyframes animation\n * e.g:\n * const styles = stylex.create({\n * root: {\n * position: \"relative\",\n * animationName: stylex.keyframes({\n * '0%': {\n * transform: 'translateY(0)'\n * },\n * '28%': {\n * transform: 'translateY(-5px)'\n * },\n * '44%': {\n * transform: 'translateY(0)',\n * },\n * })\n * }\n * })\n * => will be transformed to:\n * ...animationName: \"sdert25s\", <== animation name\n * and an keyframes animation with name \"sdert25s\"\n * */\nstylex.keyframes = function (a) {\n throw new Error(\"stylex.keyframes should never be called. It should be compiled away.\")\n};\n\n/**\n * Inject compiled styles to css stylesheet if need (if it's never injected before)\n * */\nstylex.inject = function (a, c, d = null) {\n !g && a.indexOf(\"@keyframes\") === -1 && (g = !0), CometStyleXSheet.rootStyleSheet.insert(a, c, d)\n};\n\n/**\n * For quick uses\n * */\nstylex.absoluteFill = {\n bottom: 0,\n boxSizing: \"border-box\",\n right: 0,\n position: \"absolute\",\n left: 0,\n top: 0\n};\n\n/**\n * For quick uses\n * */\nstylex.absoluteCenter = {\n boxSizing: \"border-box\",\n left: \"50%\",\n position: \"absolute\",\n top: \"50%\",\n transform: \"translate(-50%, -50%)\"\n};\n\n/**\n * For quick uses\n * */\nstylex.blockBase = {\n borderStyle: \"solid\",\n borderWidth: 0,\n boxSizing: \"border-box\",\n display: \"block\",\n flexGrow: 1,\n flexShrink: 1,\n margin: 0,\n padding: 0,\n position: \"relative\",\n zIndex: 0\n};\n\n/**\n * For quick uses\n * */\nstylex.inlineBase = Object.assign({}, stylex.blockBase, {\n display: \"inline\"\n});\n\n/**\n * For quick uses\n * */\nstylex.buttonBase = {\n appearance: \"none\",\n backgroundColor: \"transparent\",\n borderStyle: \"solid\",\n borderWidth: 0,\n boxSizing: \"border-box\",\n margin: 0,\n padding: 0,\n position: \"relative\",\n textAlign: \"inherit\",\n zIndex: 0\n};\n\n/**\n * For quick uses\n * */\nstylex.flexBase = {\n alignItems: \"stretch\",\n borderStyle: \"solid\",\n borderWidth: 0,\n boxSizing: \"border-box\",\n display: \"flex\",\n flexDirection: \"column\",\n flexGrow: 1,\n flexShrink: 1,\n justifyContent: \"space-between\",\n margin: 0,\n minHeight: 0,\n minWidth: 0,\n padding: 0,\n position: \"relative\",\n zIndex: 0\n};\n\n/**\n * For quick uses\n * */\nstylex.flexInlineBase = Object.assign({}, stylex.flexBase, {\n display: \"inline-flex\"\n});\n\n/**\n * For quick uses\n * */\nstylex.linkBase = {\n backgroundColor: \"transparent\",\n backgroundImage: \"none\",\n boxSizing: \"border-box\",\n color: \"inherit\",\n cursor: \"pointer\",\n position: \"relative\",\n textDecoration: \"none\",\n zIndex: 0\n};\n\n/**\n * For quick uses\n * */\nstylex.listBase = {\n boxSizing: \"border-box\",\n listStyle: \"none\",\n marginBottom: 0,\n marginTop: 0,\n paddingLeft: 0\n};\n\n/**\n * For quick uses\n * */\nstylex.visuallyHidden = {\n clip: \"rect(0, 0, 0, 0)\",\n clipPath: \"inset(50%)\",\n height: 1,\n overflow: \"hidden\",\n position: \"absolute\",\n width: 1\n};\n\nmodule.exports = stylex;\n\n\n//# sourceURL=webpack://@ladifire-opensource/stylex-reactjs-examples/../stylex/stylex.js?");
/***/ })
/******/ });
/************************************************************************/
/******/ // The module cache
/******/ var __webpack_module_cache__ = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(__webpack_module_cache__[moduleId]) {
/******/ return __webpack_module_cache__[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = __webpack_module_cache__[moduleId] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = __webpack_modules__;
/******/
/******/ // the startup function
/******/ // It's empty as some runtime module handles the default behavior
/******/ __webpack_require__.x = x => {}
/************************************************************************/
/******/ /* webpack/runtime/hasOwnProperty shorthand */
/******/ (() => {
/******/ __webpack_require__.o = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
/******/ })();
/******/
/******/ /* webpack/runtime/jsonp chunk loading */
/******/ (() => {
/******/ // no baseURI
/******/
/******/ // object to store loaded and loading chunks
/******/ // undefined = chunk not loaded, null = chunk preloaded/prefetched
/******/ // Promise = chunk loading, 0 = chunk loaded
/******/ var installedChunks = {
/******/ "index": 0
/******/ };
/******/
/******/ var deferredModules = [
/******/ ["./src/index.tsx","vendors-node_modules_babel_runtime_helpers_interopRequireDefault_js-node_modules_babel_runtim-834677"]
/******/ ];
/******/ // no chunk on demand loading
/******/
/******/ // no prefetching
/******/
/******/ // no preloaded
/******/
/******/ // no HMR
/******/
/******/ // no HMR manifest
/******/
/******/ var checkDeferredModules = x => {};
/******/
/******/ // install a JSONP callback for chunk loading
/******/ var webpackJsonpCallback = (parentChunkLoadingFunction, data) => {
/******/ var [chunkIds, moreModules, runtime, executeModules] = data;
/******/ // add "moreModules" to the modules object,
/******/ // then flag all "chunkIds" as loaded and fire callback
/******/ var moduleId, chunkId, i = 0, resolves = [];
/******/ for(;i < chunkIds.length; i++) {
/******/ chunkId = chunkIds[i];
/******/ if(__webpack_require__.o(installedChunks, chunkId) && installedChunks[chunkId]) {
/******/ resolves.push(installedChunks[chunkId][0]);
/******/ }
/******/ installedChunks[chunkId] = 0;
/******/ }
/******/ for(moduleId in moreModules) {
/******/ if(__webpack_require__.o(moreModules, moduleId)) {
/******/ __webpack_require__.m[moduleId] = moreModules[moduleId];
/******/ }
/******/ }
/******/ if(runtime) runtime(__webpack_require__);
/******/ if(parentChunkLoadingFunction) parentChunkLoadingFunction(data);
/******/ while(resolves.length) {
/******/ resolves.shift()();
/******/ }
/******/
/******/ // add entry modules from loaded chunk to deferred list
/******/ if(executeModules) deferredModules.push.apply(deferredModules, executeModules);
/******/
/******/ // run deferred modules when all chunks ready
/******/ return checkDeferredModules();
/******/ }
/******/
/******/ var chunkLoadingGlobal = self["webpackChunk_ladifire_opensource_stylex_reactjs_examples"] = self["webpackChunk_ladifire_opensource_stylex_reactjs_examples"] || [];
/******/ chunkLoadingGlobal.forEach(webpackJsonpCallback.bind(null, 0));
/******/ chunkLoadingGlobal.push = webpackJsonpCallback.bind(null, chunkLoadingGlobal.push.bind(chunkLoadingGlobal));
/******/
/******/ function checkDeferredModulesImpl() {
/******/ var result;
/******/ for(var i = 0; i < deferredModules.length; i++) {
/******/ var deferredModule = deferredModules[i];
/******/ var fulfilled = true;
/******/ for(var j = 1; j < deferredModule.length; j++) {
/******/ var depId = deferredModule[j];
/******/ if(installedChunks[depId] !== 0) fulfilled = false;
/******/ }
/******/ if(fulfilled) {
/******/ deferredModules.splice(i--, 1);
/******/ result = __webpack_require__(__webpack_require__.s = deferredModule[0]);
/******/ }
/******/ }
/******/ if(deferredModules.length === 0) {
/******/ __webpack_require__.x();
/******/ __webpack_require__.x = x => {};
/******/ }
/******/ return result;
/******/ }
/******/ var startup = __webpack_require__.x;
/******/ __webpack_require__.x = () => {
/******/ // reset startup function so it can be called again when more startup code is added
/******/ __webpack_require__.x = startup || (x => {});
/******/ return (checkDeferredModules = checkDeferredModulesImpl)();
/******/ };
/******/ })();
/******/
/************************************************************************/
/******/ // run startup
/******/ return __webpack_require__.x();
/******/ })()
;
|
var expect = require('chai').expect,
path = require('path'),
EventEmitter = require('events').EventEmitter,
ScriptManager = require('../lib/ScriptManager').ScriptManager;
describe('ScriptManager', function() {
var manager;
var realMainAppScript = 'folder' + path.sep + 'App.js';
var mainAppScript = 'folder' + path.sep + 'app.js';
beforeEach(function() {
var frontendClientStub = {
sendEvent: function() { }
};
var debuggerClientStub = new EventEmitter();
manager = new ScriptManager([], frontendClientStub, debuggerClientStub);
manager.realMainAppScript = realMainAppScript;
manager.mainAppScript = mainAppScript;
});
describe('findSourceByID()', function() {
it('returns stored source', function() {
manager._sources['id'] = 'a-source';
expect(manager.findScriptByID('id')).to.equal('a-source');
});
it('returns undefined for unknown id', function() {
expect(manager.findScriptByID('unknown-id')).to.equal(undefined);
});
});
describe('reset()', function() {
it('removes all stored scripts', function() {
manager._sources['id'] = 'a-source';
manager.reset();
expect(manager.findScriptByID('id')).to.equal(undefined);
});
});
describe('normalizeName()', function() {
if (process.platform == 'win32') {
it('returns case sensitive name for main script on Windows', function() {
var name = manager.normalizeName(realMainAppScript);
expect(name).to.equal(mainAppScript);
});
} else {
it('returns unchanged name for main script on Linux', function() {
var name = manager.normalizeName('folder/app.js');
var normalized_name = manager.normalizeName(realMainAppScript);
expect(normalized_name).to.equal(realMainAppScript);
});
}
it('returns unchanged name for not main scripts', function() {
var name = 'folder/app1.js';
var normalized_name = manager.normalizeName(name);
expect(normalized_name).to.equal(name);
});
});
});
|
# Copyright 2016-2018 Dirk Thomas
# Licensed under the Apache License, Version 2.0
from collections import OrderedDict
from concurrent.futures import CancelledError
import locale
import os
from pathlib import Path
import re
import traceback
from colcon_core.environment_variable import EnvironmentVariable
from colcon_core.location import get_relative_package_index_path
from colcon_core.logging import colcon_logger
from colcon_core.plugin_system import instantiate_extensions
from colcon_core.plugin_system import order_extensions_grouped_by_priority
from colcon_core.plugin_system import SkipExtensionException
from colcon_core.subprocess import check_output
logger = colcon_logger.getChild(__name__)
"""Environment variable to enable all shell extensions."""
ALL_SHELLS_ENVIRONMENT_VARIABLE = EnvironmentVariable(
'COLCON_ALL_SHELLS', 'Flag to enable all shell extensions')
use_all_shell_extensions = os.environ.get(
ALL_SHELLS_ENVIRONMENT_VARIABLE.name, False)
class ShellExtensionPoint:
"""
The interface for shell extensions.
An shell extension generates the scripts for a specific shell to setup the
environment.
For each instance the attribute `SHELL_NAME` is being set to the basename
of the entry point registering the extension.
"""
"""The version of the shell extension interface."""
EXTENSION_POINT_VERSION = '2.0'
"""
The default priority of shell extensions.
A shell extension must use a higher priority than the default if and only
if it is a "primary" shell.
A "primary" shell does not depend on another shell to setup the
environment, e.g. `sh`.
An example for a "non-primary" shell would be `bash` which relies on the
`sh` shell extension to setup environment variables and only contributes
additional information like completion.
All "non-primary" shell extensions must use a priority equal to or lower
than the default.
"""
PRIORITY = 100
def get_file_extensions(self):
"""
Get the file extensions provided by this extension.
By default the extension name will be returned.
The method is intended to be overridden in a subclass.
:returns: the file extensions
:rtype: tuple
"""
return (self.SHELL_NAME, )
def create_prefix_script(self, prefix_path, merge_install):
"""
Create a script in the install prefix path.
The script should call each package specific script in order.
This method must be overridden in a subclass.
:param Path prefix_path: The path of the install prefix
:param bool merge_install: The flag if all packages share the same
install prefix
"""
raise NotImplementedError()
def _get_prefix_util_path(self):
"""
Get the absolute path of the `prefix_util.py` module.
:returns: The path of the module file
:rtype: Path
"""
return Path(__file__).parent / 'template' / 'prefix_util.py'
def create_package_script(self, prefix_path, pkg_name, hooks):
"""
Create a script for a specific package.
The script should call each hook script in order.
This method must be overridden in a subclass.
:param Path prefix_path: The package specific install prefix
:param str pkg_name: The package name
:param list hooks: The relative paths to the hook scripts
"""
raise NotImplementedError()
def create_hook_set_value(
self, env_hook_name, prefix_path, pkg_name, name, value,
):
"""
Create a hook script to set an environment variable value.
This method must be overridden in a subclass.
:param str env_hook_name: The name of the hook script
:param Path prefix_path: The path of the install prefix
:param str pkg_name: The package name
:param str name: The name of the environment variable
:param str value: The value to be set
:returns: The relative path to the created hook script
:rtype: Path
"""
raise NotImplementedError()
def create_hook_append_value(
self, env_hook_name, prefix_path, pkg_name, name, value,
):
"""
Create a hook script to append a value to an environment variable.
This method must be overridden in a subclass.
:param str env_hook_name: The name of the hook script
:param Path prefix_path: The path of the install prefix
:param str pkg_name: The package name
:param str name: The name of the environment variable
:param str value: The value to be appended
:returns: The relative path to the created hook script
:rtype: Path
"""
raise NotImplementedError()
def create_hook_prepend_value(
self, env_hook_name, prefix_path, pkg_name, name, subdirectory,
):
"""
Create a hook script to prepend a value to an environment variable.
This method must be overridden in a subclass.
:param str env_hook_name: The name of the hook script
:param Path prefix_path: The path of the install prefix
:param str pkg_name: The package name
:param str name: The name of the environment variable
:param str subdirectory: The subdirectory of the prefix path
:returns: The relative path to the created hook script
:rtype: Path
"""
raise NotImplementedError()
def create_hook_include_file(
self, env_hook_name, prefix_path, pkg_name, relative_path,
):
"""
Create a hook script to include another script.
This method must be overridden in a subclass.
:param str env_hook_name: The name of the hook script
:param Path prefix_path: The path of the install prefix
:param str pkg_name: The package name
:param str relative_path: The path of the included scripts
:returns: The relative path to the created hook script
:rtype: Path
"""
raise NotImplementedError()
async def generate_command_environment(
self, task_name, build_base, dependencies,
):
"""
Get the environment variables to invoke commands.
The method must be overridden in a subclass if and only if the shell
extension represents a "primary" shell (as defined in
:attribute:`ShellExtensionPoint.PRIORITY`).
:param str task_name: The name of the task
:param Path build_base: The base path of the build directory
:param set dependencies: The ordered dictionary mapping dependency
names to their paths
:returns: The environment
:rtype: dict
:raises SkipExtensionException: if the shell is not usable on the
current platform
"""
raise NotImplementedError()
def get_shell_extensions():
"""
Get the available shell extensions.
The extensions are grouped by their priority and each group is ordered by
the entry point name.
:rtype: OrderedDict
"""
extensions = instantiate_extensions(__name__)
for name, extension in extensions.items():
extension.SHELL_NAME = name
return order_extensions_grouped_by_priority(extensions)
async def get_command_environment(task_name, build_base, dependencies):
"""
Get the environment variables to invoke commands.
:param str task_name: The task name identifying a group of task extensions
:param str build_base: The path of the build base
:param dependencies: The ordered dictionary mapping dependency names to
their paths
"""
extensions = get_shell_extensions()
for priority in extensions.keys():
extensions_same_prio = extensions[priority]
for extension in extensions_same_prio.values():
try:
# use the environment of the first successful shell extension
return await extension.generate_command_environment(
task_name, Path(build_base), dependencies)
except NotImplementedError:
# skip extension, continue with next one
logger.debug(
"Skip shell extension '{extension.SHELL_NAME}' for "
'command environment'.format_map(locals()))
except SkipExtensionException as e: # noqa: F841
# skip extension, continue with next one
logger.info(
"Skip shell extension '{extension.SHELL_NAME}' for "
'command environment: {e}'.format_map(locals()))
except (CancelledError, RuntimeError):
# re-raise same exception to handle it in the executor
# without a traceback
raise
except Exception as e: # noqa: F841
# catch exceptions raised in shell extension
exc = traceback.format_exc()
logger.error(
'Exception in shell extension '
"'{extension.SHELL_NAME}': {e}\n{exc}"
.format_map(locals()))
# skip failing extension, continue with next one
raise RuntimeError(
'Could not find a shell extension for the command environment')
async def get_environment_variables(cmd, *, cwd=None, shell=True):
"""
Get the environment variables from the output of the command.
:param args: the sequence of program arguments
:param cwd: the working directory for the subprocess
:param shell: whether to use the shell as the program to execute
:rtype: dict
"""
output = await check_output(cmd, cwd=cwd, shell=shell)
env = OrderedDict()
for line in output.splitlines():
line = line.rstrip()
if not line:
continue
encoding = locale.getpreferredencoding()
try:
line = line.decode(encoding)
except UnicodeDecodeError:
line_replaced = line.decode(encoding=encoding, errors='replace')
logger.warning(
'Failed to decode line from the environment using the '
"encoding '{encoding}': {line_replaced}".format_map(locals()))
continue
parts = line.split('=', 1)
if len(parts) == 2 and re.match('^[a-zA-Z0-9_%]+$', parts[0]):
# add new environment variable
env[parts[0]] = parts[1]
else:
# assume a line without an equal sign or with a "key" which is not
# a valid name is a continuation of the previous line
if env:
env[list(env.keys())[-1]] += '\n' + line
assert len(env) > 0, "The environment shouldn't be empty"
return env
def create_environment_hook(
env_hook_name, prefix_path, pkg_name, name, subdirectory, *, mode='prepend'
):
"""
Create a hook script for each primary shell.
:param str env_hook_name: The name of the hook script
:param Path prefix_path: The path of the install prefix
:param str pkg_name: The package name
:param str name: The name of the environment variable
:param str subdirectory: The value to be appended
:param str mode: The mode how the new value should be combined with an
existing value, currently only the value `prepend` is supported
:returns: The relative paths to the created hook scripts
:rtype: list
"""
logger.log(
1, "create_environment_hook('%s', '%s')" % (pkg_name, env_hook_name))
hooks = []
extensions = get_shell_extensions()
for priority in extensions.keys():
# only consider primary shell extensions
if priority <= ShellExtensionPoint.PRIORITY:
break
extensions_same_prio = extensions[priority]
for extension in extensions_same_prio.values():
if mode == 'prepend':
try:
hook = extension.create_hook_prepend_value(
env_hook_name, prefix_path, pkg_name, name,
subdirectory)
assert isinstance(hook, Path), \
'create_hook_prepend_value() should return a Path ' \
'object'
except Exception as e: # noqa: F841
# catch exceptions raised in shell extension
exc = traceback.format_exc()
logger.error(
'Exception in shell extension '
"'{extension.SHELL_NAME}': {e}\n{exc}"
.format_map(locals()))
# skip failing extension, continue with next one
continue
hooks.append(hook)
else:
raise NotImplementedError()
if not hooks:
raise RuntimeError(
'Could not find a primary shell extension for creating an '
'environment hook')
return hooks
_get_colcon_prefix_path_warnings = set()
def get_colcon_prefix_path(*, skip=None):
"""
Get the paths from the COLCON_PREFIX_PATH environment variable.
For not existing paths a warning is being printed and the path is being
skipped.
Even for repeated invocation a warning is only being printed once for each
non existing path.
:param skip: The current prefix path to be skipped and not be included in
the return value
:returns: The list of prefix paths
:rtype: list
"""
global _get_colcon_prefix_path_warnings
prefix_path = []
colcon_prefix_path = os.environ.get('COLCON_PREFIX_PATH', '')
for path in colcon_prefix_path.split(os.pathsep):
if not path:
continue
if skip is not None and path == str(skip):
continue
if not os.path.exists(path):
if path not in _get_colcon_prefix_path_warnings:
logger.warning(
"The path '{path}' in the environment variable "
"COLCON_PREFIX_PATH doesn't exist".format_map(locals()))
_get_colcon_prefix_path_warnings.add(path)
continue
prefix_path.append(path)
return prefix_path
def check_dependency_availability(dependencies, *, script_filename):
"""
Check if all dependencies are available.
First the install base of the workspace is being checked.
Second all prefix paths set in the environment are considered.
In the second case a warning is logged to notify the user.
:param dependencies: The ordered dictionary mapping dependency names to
their paths. Packages which have been found in the environment are being
removed from the dictionary.
:param str script_filename: The filename of the package specific script to
check for
:raises RuntimeError: if any package isn't found in either of the locations
"""
missing = OrderedDict()
# check if the dependency exists in the install base of this workspace
for pkg_name, pkg_install_base in dependencies.items():
pkg_script = Path(
pkg_install_base) / 'share' / pkg_name / script_filename
if not pkg_script.exists():
missing[pkg_name] = pkg_script
# check if the dependency exists in any other prefix path
packages_in_env = find_installed_packages_in_environment()
env_packages = OrderedDict()
for pkg_name, pkg_install_base in list(missing.items()):
if pkg_name in packages_in_env:
env_packages[pkg_name] = packages_in_env[pkg_name]
# no need to source any script for this package
del dependencies[pkg_name]
del missing[pkg_name]
# warn about using packages from the environment
if env_packages:
logger.warning(
"The following packages are in the workspace but haven't been "
'built:' +
''.join('\n- %s' % name for name in env_packages.keys()) +
'\nThey are being used from the following locations instead:' +
''.join('\n- %s' % path for path in env_packages.values()) +
'\nTo suppress this warning ignore these packages in the ' +
'workspace:\n--packages-ignore ' + ' '.join(env_packages.keys()))
# raise error in case any dependencies are not matched
if missing:
raise RuntimeError(
'Failed to find the following files:' +
''.join('\n- %s' % path for path in missing.values()) +
'\nCheck that the following packages have been built:' +
''.join('\n- %s' % name for name in missing.keys()))
def find_installed_packages_in_environment():
"""
Find packages under the COLCON_PREFIX_PATH.
For each prefix path the package index is being read and the first time a
package is being found its install prefix is being added to the result.
:returns: The mapping from a package name to the prefix path
:rtype: OrderedDict
"""
packages = OrderedDict()
for prefix_path in get_colcon_prefix_path():
prefix_path = Path(prefix_path)
pkgs = find_installed_packages(prefix_path)
if pkgs is None:
logger.debug(
"Ignoring prefix path '{prefix_path}'".format_map(locals()))
continue
for pkg_name in sorted(pkgs.keys()):
# ignore packages with the same name in "lower" prefix path
if pkg_name in packages:
continue
packages[pkg_name] = pkgs[pkg_name]
return packages
def find_installed_packages(install_base):
"""
Find install packages under the install base path.
The path must contain a marker file with the install layout.
Based on the install layout the packages are discovered i different
locations.
:param Path install_base: The base path to find installed packages in
:returns: The mapping from a package name to the prefix path, None if the
path doesn't exist or doesn't contain a marker file with a valid install
layout
:rtype: OrderedDict or None
"""
marker_file = install_base / '.colcon_install_layout'
if not marker_file.is_file():
return None
install_layout = marker_file.read_text().rstrip()
if install_layout not in ('isolated', 'merged'):
return None
packages = {}
if install_layout == 'isolated':
# for each subdirectory look for the package specific file
for p in install_base.iterdir():
if not p.is_dir():
continue
if p.name.startswith('.'):
continue
marker = p / get_relative_package_index_path() / p.name
if marker.is_file():
packages[p.name] = p
else:
# find all files in the subdirectory
if (install_base / get_relative_package_index_path()).is_dir():
package_index = install_base / get_relative_package_index_path()
for p in package_index.iterdir():
if not p.is_file():
continue
if p.name.startswith('.'):
continue
packages[p.name] = install_base
return packages
|
#!/usr/bin/env python
# Wenchang Yang (wenchang@princeton.edu)
# Fri Aug 23 11:53:11 EDT 2019
#import os, os.path, sys
#import xarray as xr, numpy as np, pandas as pd
#import matplotlib.pyplot as plt
import xarray as xr
from numpy import exp, log
# physical parameters
Lv = 2.555e6 # J/kg
g = 9.81 #m/s**-2
c_p = 1005.7 # J/K/kg
Rd = 287 # J/K/kg
Rv = 461 # J/K/kg
epsilon = Rd/Rv
T0 = 273.15 # K
def saturated_vapor_pressure(T):
'''calculated saturated water vapor pressure (in Pa) given temperature T (in Kelvin)'''
return 610.94*exp(17.625*(T-T0)/(T-T0+243.04))
def mixing_ratio(p, e):
'''calculate mixing ration given air pressure (p) and water vapor pressure (e)'''
return epsilon*e/(p-e)
def mixing_ratio_by_q(q):
'''calculate mixing ratio given specific humidity q'''
return q/(1-q)
def vapor_pressure_by_mixing_ratio(p, r_v):
'''calculate water vapor pressure given air pressure and mixing ratio'''
return p*r_v/(epsilon+r_v)
def relative_humidity(q, p, T):
"""calculate relative humidity (0-1) given specific humidity q (kg/kg), air pressure p (Pa) and temperature T (K)"""
r_v = mixing_ratio_by_q(q)
e = vapor_pressure_by_mixing_ratio(p, r_v)
es = saturated_vapor_pressure(T)
RH = e/es
return RH
def moist_entropy(T, p, RH=None, q=None):
'''calculate moist entropy given air temperature (T), pressure (p) and relative humidity (RH, 0-1) or specific humidity (q).
The equation is: s = c_p*log(T) - Rd*log(p_d) + Lv*r_v/T - Rv*r_v*log(RH)'''
if RH is None:
assert q is not None, 'at least one of the two variables must be specified: relative humidity/specific humidity'
"""
r_v = mixing_ratio_by_q(q)
e = vapor_pressure_by_mixing_ratio(p, r_v)
RH = e/saturated_vapor_pressure(T)
"""
RH = relative_humidity(q, p, T)
else: # RH is from input directly
e = saturated_vapor_pressure(T) * RH
r_v = mixing_ratio(p, e)
s = c_p*log(T) - Rd*log(p-e) + Lv*r_v/T - Rv*r_v*log(RH)
s.attrs['long_name'] = 'moist entropy'
s.attrs['units'] = 'J/K/kg'
return s
def entropy_deficit(sst, slp, Tb, RHb, p_m, Tm, RHm, forGPI2010=False):
'''calculate entropy deficity defined in Tang and Emanuel, 2012.
sst: sea surface temperature (in Kelvin);
slp: sea level pressure (in Pa);
Tb: boundary layer air temperature;
RHb: boundary layer relative humidity (0-1);
p_m: middle troposphere pressure level (usually 6e4 Pa);
Tm: middle troposphere air temperature;
RHm: middle troposphere relative humidity (0-1);
forGPI2010 (default=False): calculate entropy deficit used in GPI (Emanuel 2010, use s_b instead of s_m_star in the numerator);
if False (default), calculate entropy deficit used in ventilation index from Tang and Emanuel 2012.'''
s_sst_star = moist_entropy(T=sst, p=slp, RH=1)
s_b = moist_entropy(T=Tb, p=slp, RH=RHb)
s_m_star = moist_entropy(T=Tm, p=p_m, RH=1)
s_m = moist_entropy(T=Tm, p=p_m, RH=RHm)
if forGPI2010:
chi = (s_b - s_m)/(s_sst_star - s_b).pipe(lambda x: x.where(x>0)) # exclude values <= 0
chi.attrs['long_name'] = 'entropy deficit in GPI from Emanuel 2010'
else:
chi = (s_m_star - s_m)/(s_sst_star - s_b).pipe(lambda x: x.where(x>0)) # exclude values <= 0
chi.attrs['long_name'] = 'entropy deficit in ventilation index from Tang and Emanuel 2012'
return chi
if __name__ == '__main__':
ifile = '/tigress/wenchang/MODEL_OUT/CTL1860_noleap_tigercpu_intelmpi_18_576PE/POSTP/10000101.atmos_month.nc'
ofile = 'test.nc'
ds = xr.open_dataset(ifile)
land_mask = ds.land_mask.load()
is_ocean = land_mask < 0.1
p_m = 6e4 # 600hPa
chi = entropy_deficit(sst=ds.t_surf,
slp=ds.ps,
Tb=ds.t_ref,
RHb=ds.rh_ref/100,
p_m=p_m,
Tm=ds.temp.interp(pfull=p_m/100).drop('pfull'),
RHm=ds.rh.interp(pfull=p_m/100).drop('pfull')/100)
|
# graphicsDisplay.py
# ------------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
from graphicsUtils import *
import math, time
from game import Directions
###########################
# GRAPHICS DISPLAY CODE #
###########################
# Most code by Dan Klein and John Denero written or rewritten for cs188, UC Berkeley.
# Some code from a Pacman implementation by LiveWires, and used / modified with permission.
DEFAULT_GRID_SIZE = 30.0
INFO_PANE_HEIGHT = 35
BACKGROUND_COLOR = formatColor(0,0,0)
WALL_COLOR = formatColor(0.0/255.0, 51.0/255.0, 255.0/255.0)
INFO_PANE_COLOR = formatColor(.4,.4,0)
SCORE_COLOR = formatColor(.9, .9, .9)
PACMAN_OUTLINE_WIDTH = 2
PACMAN_CAPTURE_OUTLINE_WIDTH = 4
GHOST_COLORS = []
GHOST_COLORS.append(formatColor(.9,0,0)) # Red
GHOST_COLORS.append(formatColor(0,.3,.9)) # Blue
GHOST_COLORS.append(formatColor(.98,.41,.07)) # Orange
GHOST_COLORS.append(formatColor(.1,.75,.7)) # Green
GHOST_COLORS.append(formatColor(1.0,0.6,0.0)) # Yellow
GHOST_COLORS.append(formatColor(.4,0.13,0.91)) # Purple
TEAM_COLORS = GHOST_COLORS[:2]
GHOST_SHAPE = [
( 0, 0.3 ),
( 0.25, 0.75 ),
( 0.5, 0.3 ),
( 0.75, 0.75 ),
( 0.75, -0.5 ),
( 0.5, -0.75 ),
(-0.5, -0.75 ),
(-0.75, -0.5 ),
(-0.75, 0.75 ),
(-0.5, 0.3 ),
(-0.25, 0.75 )
]
GHOST_SIZE = 0.65
SCARED_COLOR = formatColor(1,1,1)
GHOST_VEC_COLORS = [colorToVector(c) for c in GHOST_COLORS]
PACMAN_COLOR = formatColor(255.0/255.0,255.0/255.0,61.0/255)
PACMAN_SCALE = 0.5
#pacman_speed = 0.25
# Food
FOOD_COLOR = formatColor(1,1,1)
FOOD_SIZE = 0.1
# Laser
LASER_COLOR = formatColor(1,0,0)
LASER_SIZE = 0.02
# Capsule graphics
CAPSULE_COLOR = formatColor(1,1,1)
CAPSULE_SIZE = 0.25
# Drawing walls
WALL_RADIUS = 0.15
class InfoPane:
def __init__(self, layout, gridSize):
self.gridSize = gridSize
self.width = (layout.width) * gridSize
self.base = (layout.height + 1) * gridSize
self.height = INFO_PANE_HEIGHT
self.fontSize = 24
self.textColor = PACMAN_COLOR
self.drawPane()
def toScreen(self, pos, y = None):
"""
Translates a point relative from the bottom left of the info pane.
"""
if y == None:
x,y = pos
else:
x = pos
x = self.gridSize + x # Margin
y = self.base + y
return x,y
def drawPane(self):
self.scoreText = text( self.toScreen(0, 0 ), self.textColor, "SCORE: 0", "Times", self.fontSize, "bold")
def initializeGhostDistances(self, distances):
self.ghostDistanceText = []
size = 20
if self.width < 240:
size = 12
if self.width < 160:
size = 10
for i, d in enumerate(distances):
t = text( self.toScreen(self.width//2 + self.width//8 * i, 0), GHOST_COLORS[i+1], d, "Times", size, "bold")
self.ghostDistanceText.append(t)
def updateScore(self, score):
changeText(self.scoreText, "SCORE: % 4d" % score)
def setTeam(self, isBlue):
text = "RED TEAM"
if isBlue: text = "BLUE TEAM"
self.teamText = text( self.toScreen(300, 0 ), self.textColor, text, "Times", self.fontSize, "bold")
def updateGhostDistances(self, distances):
if len(distances) == 0: return
if 'ghostDistanceText' not in dir(self): self.initializeGhostDistances(distances)
else:
for i, d in enumerate(distances):
changeText(self.ghostDistanceText[i], d)
def drawGhost(self):
pass
def drawPacman(self):
pass
def drawWarning(self):
pass
def clearIcon(self):
pass
def updateMessage(self, message):
pass
def clearMessage(self):
pass
class PacmanGraphics:
def __init__(self, zoom=1.0, frameTime=0.0, capture=False):
self.have_window = 0
self.currentGhostImages = {}
self.pacmanImage = None
self.zoom = zoom
self.gridSize = DEFAULT_GRID_SIZE * zoom
self.capture = capture
self.frameTime = frameTime
def checkNullDisplay(self):
return False
def initialize(self, state, isBlue = False):
self.isBlue = isBlue
self.startGraphics(state)
# self.drawDistributions(state)
self.distributionImages = None # Initialized lazily
self.drawStaticObjects(state)
self.drawAgentObjects(state)
# Information
self.previousState = state
def startGraphics(self, state):
self.layout = state.layout
layout = self.layout
self.width = layout.width
self.height = layout.height
self.make_window(self.width, self.height)
self.infoPane = InfoPane(layout, self.gridSize)
self.currentState = layout
def drawDistributions(self, state):
walls = state.layout.walls
dist = []
for x in range(walls.width):
distx = []
dist.append(distx)
for y in range(walls.height):
( screen_x, screen_y ) = self.to_screen( (x, y) )
block = square( (screen_x, screen_y),
0.5 * self.gridSize,
color = BACKGROUND_COLOR,
filled = 1, behind=2)
distx.append(block)
self.distributionImages = dist
def drawStaticObjects(self, state):
layout = self.layout
self.drawWalls(layout.walls)
self.food = self.drawFood(layout.food)
self.capsules = self.drawCapsules(layout.capsules)
refresh()
def drawAgentObjects(self, state):
self.agentImages = [] # (agentState, image)
for index, agent in enumerate(state.agentStates):
if agent.isPacman:
image = self.drawPacman(agent, index)
self.agentImages.append( (agent, image) )
else:
image = self.drawGhost(agent, index)
self.agentImages.append( (agent, image) )
refresh()
def swapImages(self, agentIndex, newState):
"""
Changes an image from a ghost to a pacman or vis versa (for capture)
"""
prevState, prevImage = self.agentImages[agentIndex]
for item in prevImage: remove_from_screen(item)
if newState.isPacman:
image = self.drawPacman(newState, agentIndex)
self.agentImages[agentIndex] = (newState, image )
else:
image = self.drawGhost(newState, agentIndex)
self.agentImages[agentIndex] = (newState, image )
refresh()
def update(self, newState):
agentIndex = newState._agentMoved
agentState = newState.agentStates[agentIndex]
if self.agentImages[agentIndex][0].isPacman != agentState.isPacman: self.swapImages(agentIndex, agentState)
prevState, prevImage = self.agentImages[agentIndex]
if agentState.isPacman:
self.animatePacman(agentState, prevState, prevImage)
else:
self.moveGhost(agentState, agentIndex, prevState, prevImage)
self.agentImages[agentIndex] = (agentState, prevImage)
if newState._foodEaten != None:
self.removeFood(newState._foodEaten, self.food)
if newState._capsuleEaten != None:
self.removeCapsule(newState._capsuleEaten, self.capsules)
self.infoPane.updateScore(newState.score)
if 'ghostDistances' in dir(newState):
self.infoPane.updateGhostDistances(newState.ghostDistances)
def make_window(self, width, height):
grid_width = (width-1) * self.gridSize
grid_height = (height-1) * self.gridSize
screen_width = 2*self.gridSize + grid_width
screen_height = 2*self.gridSize + grid_height + INFO_PANE_HEIGHT
begin_graphics(screen_width,
screen_height,
BACKGROUND_COLOR,
"CS188 Pacman")
def drawPacman(self, pacman, index):
position = self.getPosition(pacman)
screen_point = self.to_screen(position)
endpoints = self.getEndpoints(self.getDirection(pacman))
width = PACMAN_OUTLINE_WIDTH
outlineColor = PACMAN_COLOR
fillColor = PACMAN_COLOR
if self.capture:
outlineColor = TEAM_COLORS[index % 2]
fillColor = GHOST_COLORS[index]
width = PACMAN_CAPTURE_OUTLINE_WIDTH
return [circle(screen_point, PACMAN_SCALE * self.gridSize,
fillColor = fillColor, outlineColor = outlineColor,
endpoints = endpoints,
width = width)]
def getEndpoints(self, direction, position=(0,0)):
x, y = position
pos = x - int(x) + y - int(y)
width = 30 + 80 * math.sin(math.pi* pos)
delta = width / 2
if (direction == 'West'):
endpoints = (180+delta, 180-delta)
elif (direction == 'North'):
endpoints = (90+delta, 90-delta)
elif (direction == 'South'):
endpoints = (270+delta, 270-delta)
else:
endpoints = (0+delta, 0-delta)
return endpoints
def movePacman(self, position, direction, image):
screenPosition = self.to_screen(position)
endpoints = self.getEndpoints( direction, position )
r = PACMAN_SCALE * self.gridSize
moveCircle(image[0], screenPosition, r, endpoints)
refresh()
def animatePacman(self, pacman, prevPacman, image):
if self.frameTime < 0:
print('Press any key to step forward, "q" to play')
keys = wait_for_keys()
if 'q' in keys:
self.frameTime = 0.1
if self.frameTime > 0.01 or self.frameTime < 0:
start = time.time()
fx, fy = self.getPosition(prevPacman)
px, py = self.getPosition(pacman)
frames = 4.0
for i in range(1,int(frames) + 1):
pos = px*i/frames + fx*(frames-i)/frames, py*i/frames + fy*(frames-i)/frames
self.movePacman(pos, self.getDirection(pacman), image)
refresh()
sleep(abs(self.frameTime) / frames)
else:
self.movePacman(self.getPosition(pacman), self.getDirection(pacman), image)
refresh()
def getGhostColor(self, ghost, ghostIndex):
if ghost.scaredTimer > 0:
return SCARED_COLOR
else:
return GHOST_COLORS[ghostIndex]
def drawGhost(self, ghost, agentIndex):
pos = self.getPosition(ghost)
dir = self.getDirection(ghost)
(screen_x, screen_y) = (self.to_screen(pos) )
coords = []
for (x, y) in GHOST_SHAPE:
coords.append((x*self.gridSize*GHOST_SIZE + screen_x, y*self.gridSize*GHOST_SIZE + screen_y))
colour = self.getGhostColor(ghost, agentIndex)
body = polygon(coords, colour, filled = 1)
WHITE = formatColor(1.0, 1.0, 1.0)
BLACK = formatColor(0.0, 0.0, 0.0)
dx = 0
dy = 0
if dir == 'North':
dy = -0.2
if dir == 'South':
dy = 0.2
if dir == 'East':
dx = 0.2
if dir == 'West':
dx = -0.2
leftEye = circle((screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2, WHITE, WHITE)
rightEye = circle((screen_x+self.gridSize*GHOST_SIZE*(0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2, WHITE, WHITE)
leftPupil = circle((screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08, BLACK, BLACK)
rightPupil = circle((screen_x+self.gridSize*GHOST_SIZE*(0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08, BLACK, BLACK)
ghostImageParts = []
ghostImageParts.append(body)
ghostImageParts.append(leftEye)
ghostImageParts.append(rightEye)
ghostImageParts.append(leftPupil)
ghostImageParts.append(rightPupil)
return ghostImageParts
def moveEyes(self, pos, dir, eyes):
(screen_x, screen_y) = (self.to_screen(pos) )
dx = 0
dy = 0
if dir == 'North':
dy = -0.2
if dir == 'South':
dy = 0.2
if dir == 'East':
dx = 0.2
if dir == 'West':
dx = -0.2
moveCircle(eyes[0],(screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2)
moveCircle(eyes[1],(screen_x+self.gridSize*GHOST_SIZE*(0.3+dx/1.5), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy/1.5)), self.gridSize*GHOST_SIZE*0.2)
moveCircle(eyes[2],(screen_x+self.gridSize*GHOST_SIZE*(-0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08)
moveCircle(eyes[3],(screen_x+self.gridSize*GHOST_SIZE*(0.3+dx), screen_y-self.gridSize*GHOST_SIZE*(0.3-dy)), self.gridSize*GHOST_SIZE*0.08)
def moveGhost(self, ghost, ghostIndex, prevGhost, ghostImageParts):
old_x, old_y = self.to_screen(self.getPosition(prevGhost))
new_x, new_y = self.to_screen(self.getPosition(ghost))
delta = new_x - old_x, new_y - old_y
for ghostImagePart in ghostImageParts:
move_by(ghostImagePart, delta)
refresh()
if ghost.scaredTimer > 0:
color = SCARED_COLOR
else:
color = GHOST_COLORS[ghostIndex]
edit(ghostImageParts[0], ('fill', color), ('outline', color))
self.moveEyes(self.getPosition(ghost), self.getDirection(ghost), ghostImageParts[-4:])
refresh()
def getPosition(self, agentState):
if agentState.configuration == None: return (-1000, -1000)
return agentState.getPosition()
def getDirection(self, agentState):
if agentState.configuration == None: return Directions.STOP
return agentState.configuration.getDirection()
def finish(self):
end_graphics()
def to_screen(self, point):
( x, y ) = point
#y = self.height - y
x = (x + 1)*self.gridSize
y = (self.height - y)*self.gridSize
return ( x, y )
# Fixes some TK issue with off-center circles
def to_screen2(self, point):
( x, y ) = point
#y = self.height - y
x = (x + 1)*self.gridSize
y = (self.height - y)*self.gridSize
return ( x, y )
def drawWalls(self, wallMatrix):
wallColor = WALL_COLOR
for xNum, x in enumerate(wallMatrix):
if self.capture and (xNum * 2) < wallMatrix.width: wallColor = TEAM_COLORS[0]
if self.capture and (xNum * 2) >= wallMatrix.width: wallColor = TEAM_COLORS[1]
for yNum, cell in enumerate(x):
if cell: # There's a wall here
pos = (xNum, yNum)
screen = self.to_screen(pos)
screen2 = self.to_screen2(pos)
# draw each quadrant of the square based on adjacent walls
wIsWall = self.isWall(xNum-1, yNum, wallMatrix)
eIsWall = self.isWall(xNum+1, yNum, wallMatrix)
nIsWall = self.isWall(xNum, yNum+1, wallMatrix)
sIsWall = self.isWall(xNum, yNum-1, wallMatrix)
nwIsWall = self.isWall(xNum-1, yNum+1, wallMatrix)
swIsWall = self.isWall(xNum-1, yNum-1, wallMatrix)
neIsWall = self.isWall(xNum+1, yNum+1, wallMatrix)
seIsWall = self.isWall(xNum+1, yNum-1, wallMatrix)
# NE quadrant
if (not nIsWall) and (not eIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (0,91), 'arc')
if (nIsWall) and (not eIsWall):
# vertical line
line(add(screen, (self.gridSize*WALL_RADIUS, 0)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-0.5)-1)), wallColor)
if (not nIsWall) and (eIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
if (nIsWall) and (eIsWall) and (not neIsWall):
# outer circle
circle(add(screen2, (self.gridSize*2*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (180,271), 'arc')
line(add(screen, (self.gridSize*2*WALL_RADIUS-1, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS+1)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(-0.5))), wallColor)
# NW quadrant
if (not nIsWall) and (not wIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (90,181), 'arc')
if (nIsWall) and (not wIsWall):
# vertical line
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, 0)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-0.5)-1)), wallColor)
if (not nIsWall) and (wIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5)-1, self.gridSize*(-1)*WALL_RADIUS)), wallColor)
if (nIsWall) and (wIsWall) and (not nwIsWall):
# outer circle
circle(add(screen2, (self.gridSize*(-2)*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (270,361), 'arc')
line(add(screen, (self.gridSize*(-2)*WALL_RADIUS+1, self.gridSize*(-1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5), self.gridSize*(-1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-2)*WALL_RADIUS+1)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(-0.5))), wallColor)
# SE quadrant
if (not sIsWall) and (not eIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (270,361), 'arc')
if (sIsWall) and (not eIsWall):
# vertical line
line(add(screen, (self.gridSize*WALL_RADIUS, 0)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(0.5)+1)), wallColor)
if (not sIsWall) and (eIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5+1, self.gridSize*(1)*WALL_RADIUS)), wallColor)
if (sIsWall) and (eIsWall) and (not seIsWall):
# outer circle
circle(add(screen2, (self.gridSize*2*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (90,181), 'arc')
line(add(screen, (self.gridSize*2*WALL_RADIUS-1, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*0.5, self.gridSize*(1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS-1)), add(screen, (self.gridSize*WALL_RADIUS, self.gridSize*(0.5))), wallColor)
# SW quadrant
if (not sIsWall) and (not wIsWall):
# inner circle
circle(screen2, WALL_RADIUS * self.gridSize, wallColor, wallColor, (180,271), 'arc')
if (sIsWall) and (not wIsWall):
# vertical line
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, 0)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(0.5)+1)), wallColor)
if (not sIsWall) and (wIsWall):
# horizontal line
line(add(screen, (0, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5)-1, self.gridSize*(1)*WALL_RADIUS)), wallColor)
if (sIsWall) and (wIsWall) and (not swIsWall):
# outer circle
circle(add(screen2, (self.gridSize*(-2)*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS)), WALL_RADIUS * self.gridSize-1, wallColor, wallColor, (0,91), 'arc')
line(add(screen, (self.gridSize*(-2)*WALL_RADIUS+1, self.gridSize*(1)*WALL_RADIUS)), add(screen, (self.gridSize*(-0.5), self.gridSize*(1)*WALL_RADIUS)), wallColor)
line(add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(2)*WALL_RADIUS-1)), add(screen, (self.gridSize*(-1)*WALL_RADIUS, self.gridSize*(0.5))), wallColor)
def isWall(self, x, y, walls):
if x < 0 or y < 0:
return False
if x >= walls.width or y >= walls.height:
return False
return walls[x][y]
def drawFood(self, foodMatrix ):
foodImages = []
color = FOOD_COLOR
for xNum, x in enumerate(foodMatrix):
if self.capture and (xNum * 2) <= foodMatrix.width: color = TEAM_COLORS[0]
if self.capture and (xNum * 2) > foodMatrix.width: color = TEAM_COLORS[1]
imageRow = []
foodImages.append(imageRow)
for yNum, cell in enumerate(x):
if cell: # There's food here
screen = self.to_screen((xNum, yNum ))
dot = circle( screen,
FOOD_SIZE * self.gridSize,
outlineColor = color, fillColor = color,
width = 1)
imageRow.append(dot)
else:
imageRow.append(None)
return foodImages
def drawCapsules(self, capsules ):
capsuleImages = {}
for capsule in capsules:
( screen_x, screen_y ) = self.to_screen(capsule)
dot = circle( (screen_x, screen_y),
CAPSULE_SIZE * self.gridSize,
outlineColor = CAPSULE_COLOR,
fillColor = CAPSULE_COLOR,
width = 1)
capsuleImages[capsule] = dot
return capsuleImages
def removeFood(self, cell, foodImages ):
x, y = cell
remove_from_screen(foodImages[x][y])
def removeCapsule(self, cell, capsuleImages ):
x, y = cell
remove_from_screen(capsuleImages[(x, y)])
def drawExpandedCells(self, cells):
"""
Draws an overlay of expanded grid positions for search agents
"""
n = float(len(cells))
baseColor = [1.0, 0.0, 0.0]
self.clearExpandedCells()
self.expandedCells = []
for k, cell in enumerate(cells):
screenPos = self.to_screen( cell)
cellColor = formatColor(*[(n-k) * c * .5 / n + .25 for c in baseColor])
block = square(screenPos,
0.5 * self.gridSize,
color = cellColor,
filled = 1, behind=2)
self.expandedCells.append(block)
if self.frameTime < 0:
refresh()
def clearExpandedCells(self):
if 'expandedCells' in dir(self) and len(self.expandedCells) > 0:
for cell in self.expandedCells:
remove_from_screen(cell)
def updateDistributions(self, distributions):
"Draws an agent's belief distributions"
# copy all distributions so we don't change their state
distributions = map(lambda x: x.copy(), distributions)
if self.distributionImages == None:
self.drawDistributions(self.previousState)
for x in range(len(self.distributionImages)):
for y in range(len(self.distributionImages[0])):
image = self.distributionImages[x][y]
weights = [dist[ (x,y) ] for dist in distributions]
if sum(weights) != 0:
pass
# Fog of war
color = [0.0,0.0,0.0]
colors = GHOST_VEC_COLORS[1:] # With Pacman
if self.capture: colors = GHOST_VEC_COLORS
for weight, gcolor in zip(weights, colors):
color = [min(1.0, c + 0.95 * g * weight ** .3) for c,g in zip(color, gcolor)]
changeColor(image, formatColor(*color))
refresh()
class FirstPersonPacmanGraphics(PacmanGraphics):
def __init__(self, zoom = 1.0, showGhosts = True, capture = False, frameTime=0):
PacmanGraphics.__init__(self, zoom, frameTime=frameTime)
self.showGhosts = showGhosts
self.capture = capture
def initialize(self, state, isBlue = False):
self.isBlue = isBlue
PacmanGraphics.startGraphics(self, state)
# Initialize distribution images
walls = state.layout.walls
dist = []
self.layout = state.layout
# Draw the rest
self.distributionImages = None # initialize lazily
self.drawStaticObjects(state)
self.drawAgentObjects(state)
# Information
self.previousState = state
def lookAhead(self, config, state):
if config.getDirection() == 'Stop':
return
else:
pass
# Draw relevant ghosts
allGhosts = state.getGhostStates()
visibleGhosts = state.getVisibleGhosts()
for i, ghost in enumerate(allGhosts):
if ghost in visibleGhosts:
self.drawGhost(ghost, i)
else:
self.currentGhostImages[i] = None
def getGhostColor(self, ghost, ghostIndex):
return GHOST_COLORS[ghostIndex]
def getPosition(self, ghostState):
if not self.showGhosts and not ghostState.isPacman and ghostState.getPosition()[1] > 1:
return (-1000, -1000)
else:
return PacmanGraphics.getPosition(self, ghostState)
def add(x, y):
return (x[0] + y[0], x[1] + y[1])
# Saving graphical output
# -----------------------
# Note: to make an animated gif from this postscript output, try the command:
# convert -delay 7 -loop 1 -compress lzw -layers optimize frame* out.gif
# convert is part of imagemagick (freeware)
SAVE_POSTSCRIPT = False
POSTSCRIPT_OUTPUT_DIR = 'frames'
FRAME_NUMBER = 0
import os
def saveFrame():
"Saves the current graphical output as a postscript file"
global SAVE_POSTSCRIPT, FRAME_NUMBER, POSTSCRIPT_OUTPUT_DIR
if not SAVE_POSTSCRIPT: return
if not os.path.exists(POSTSCRIPT_OUTPUT_DIR): os.mkdir(POSTSCRIPT_OUTPUT_DIR)
name = os.path.join(POSTSCRIPT_OUTPUT_DIR, 'frame_%08d.ps' % FRAME_NUMBER)
FRAME_NUMBER += 1
writePostscript(name) # writes the current canvas
|
import React from 'react';
import './MoodSelect.css';
import Typing from 'react-typing-animation';
import EMA from './../../img/EMAhappy.png';
function SadGrief (){
return (
<div className="std=msg">
<img className="EMAs_Image" src= {EMA} alt="empathy-protocol" />
<Typing>
<p className="std-msg">
Ugh! Being bored is SOO lame!
<br />
(ಠ ∩ ಠ)
<br />
Let me know if you want me to
keep you company!
</p>
</Typing>
</div>
)
}
export default SadGrief;
|
import Skeleton from './Skeleton';
export {
Skeleton,
};
|
from enum import IntEnum, unique
from django.contrib.auth import get_user_model
from django.db import models
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from django.utils import timezone
from comment.models import Comment
from comment.managers import ReactionManager, ReactionInstanceManager
class Reaction(models.Model):
comment = models.OneToOneField(Comment, on_delete=models.CASCADE)
likes = models.PositiveIntegerField(default=0)
dislikes = models.PositiveIntegerField(default=0)
objects = ReactionManager()
def _increase_count(self, field):
self.refresh_from_db()
setattr(self, field, models.F(field) + 1)
self.save(update_fields=[field])
def _decrease_count(self, field):
self.refresh_from_db()
setattr(self, field, models.F(field) - 1)
self.save(update_fields=[field])
def increase_reaction_count(self, reaction):
if reaction == ReactionInstance.ReactionType.LIKE.value:
self._increase_count('likes')
else:
self._increase_count('dislikes')
def decrease_reaction_count(self, reaction):
if reaction == ReactionInstance.ReactionType.LIKE.value:
self._decrease_count('likes')
else:
self._decrease_count('dislikes')
class ReactionInstance(models.Model):
@unique
class ReactionType(IntEnum):
LIKE = 1
DISLIKE = 2
CHOICES = [(r.value, r.name) for r in ReactionType]
reaction = models.ForeignKey(Reaction, related_name='reactions', on_delete=models.CASCADE)
user = models.ForeignKey(get_user_model(), related_name='reactions', on_delete=models.CASCADE)
reaction_type = models.SmallIntegerField(choices=CHOICES)
date_reacted = models.DateTimeField(auto_now=timezone.now())
objects = ReactionInstanceManager()
class Meta:
unique_together = ['user', 'reaction']
@receiver(post_delete, sender=ReactionInstance)
def delete_reaction_instance(sender, instance, using, **kwargs):
instance.reaction.decrease_reaction_count(instance.reaction_type)
@receiver(post_save, sender=ReactionInstance)
def add_count(sender, instance, created, raw, using, update_fields, **kwargs):
if created:
instance.reaction.increase_reaction_count(instance.reaction_type)
@receiver(post_save, sender=Comment)
def add_reaction(sender, instance, created, raw, using, update_fields, **kwargs):
if created:
Reaction.objects.create(comment=instance)
|
""" Morphological Post-Processing for Turkish
(c) 2018 by Mehmet Dolgun, m.dolgun@yahoo.com
All functionality is provided by main class: TurkishPostProcessor
special characters
Vowel Harmony:
^ : make previous vowel back, i.e. aIuo->eiUO e.g kol-A -> kola, rol^-A -> role
H : high vowels, using harmony rules aI->I, ei->i, uo->u, UO->U e.g ev-Hm -> evim, yol-Hm -> yolum
A : low,unrounded vowels, using harmony rules aIuo->a, eiUO->e e.g. ev-lAr -> evler yol-lAr -> yollar
Consonants
N : used as last char of same suffixes (e.g. 3d sing possesive), realized to n only if it is followed by a vowel e.g. tereyağıN-ı->tereyağını, tereyağıN-ım ->tereyağım
@ : drops the next vowel if it takes a vowel-starting suffix e.g. bur@un-u -> burnu, bur@un-da -> burunda
+ : duplicates previous letter if it takes a vowel-starting suffix e.g. hak+-ı -> hakkı, hak+-ta -> hakta
? : soften previous consonant e.g. kitap?-ı -> kitabı, hap-ı -> hapı
Note that ^, $, +, ?, N symbols is used only in word root, N is required for compound nouns
"""
import re
class PostProcessError(Exception):
""" Raised when a suffix cannot be applied to word stem """
pass
class TurkishPostProcessor:
rules = [
( "(y\?)(?=[ZNHAY])", "y"), # su[y?]ZHN=>su[y]ZHN, su[y?]NHn=>su[y]NHn, su[y?]Hm=>su[y]um
( "(nk\?)(?=[ZNHA]|Y[aIeiouOUHA])", "ng"), # re[nk?]ZHN=>re[ng]i, re[nk?]NHn=>re[ng]in, re[nk?]Hm=>re[ng]im, re[nk?]YA=>re[ng]e
( "([pCtk]\?)(?=[ZNHAV]|Y[aIeiouOUHA])", {'p?':'b', 'C?':'c', 't?':'d', 'k?':'G'} ), # kita[p?]ZHN=>kita[b]ZHN, kita[p?]NHn=>kita[b]NHn, kita[p?]Hm=>kita[b]Hm, kita[p?]YA=>kita[b]?YA
( "(?<=[pCtkSfsh])([?+^]?Y?D)","t"), # ip[D]A=>ipte, ip[YD]H=>ipti, kitap[?D]A=>kitapta, kitap[?YD]H=>kitaptı, hak[+D]A=>hakta, hak[+YD]H=>haktı
( "([pbctdk]\+)(?=[ZNHA]|Y[aIeiouOUHA])", {'p+':'bb', 'b+':'bb', 'c+':'cc', 't+':'tt', 'd+':'dd', 'k+':'kk' } ),
( "([aIeiouOUA]V)(?=yor)" , "H" ),
( "([VD])" , {"V":"H", "D":"d"} ), # sev[V]yor=>sev[H]yor, ev[D]A=>ev[d]A
( "(y?[?+])", ""), # su(y?)=>su, kitap(?)=>kitap(), su(y?)DA=>su()DA, kitap(?)DA
( "(?<=[aIeiouOUA])(H)" , "" ), # oda(H)m=> oda()m
( "(NY)(?=[^aIeiouOUAH])", "y" ),
( "(NY)(?=[AH])", "n" ),
( "(?<=[aIeiouOUAH])([NZY])(?!$| )", {'N':'n', 'Z':'s', 'Y':'y'} ), # oda[Z]HN=>oda[s]HN, oda[N]Hn=>oda[n]Hn, oda[Y]H=>oda[y]H
( "(@[IiuU])(?=[^aIeiouOU](?:[ZNHA]|Y[aIeiouOUHA]))", ""), # bur(@u)nZHN=>bur()nZHN, bur(@u)nNHn=>bur()nNHn, bur(@u)nHm=>bur()nHm, bur(@u)nYA=>bur()nYA
( "([@NZY])", "") # ev(Z)HN=>ev()HN, ev(N)Hn=>ev()Hn, ev(Y)H=>ev()H, bur(@)un=>bur()un
]
vowel = { 'a': 0, 'I':0 , 'e':1, 'i':1, 'o':2, 'u':2 , 'O':3, 'U':3 }
H = [ 'I', 'i', 'u', 'U' ]
A = [ 'a', 'e', 'a', 'e' ]
outtab = { 71:'ğ', 85:'ü', 79:'ö', 67:'ç', 83:'ş', 73:'ı' } # internal to unicode
#outtab = { 71:'ð', 85:'ü', 79:'ö', 67:'ç', 83:'þ', 73:'ý' } # internal to codepage
intab = { # to internal
252:'U', 220:'U', 246:'O', 214:'O', 231:'C', 199:'C', # from common unicode & codepage
240:'G', 208:'G', 222:'S', 222:'S', 253:'I', 221:'i', # from code page
287:'G', 286:'G', 351:'S', 350:'S', 305:'I', 304:'i' # from unicode
}
def __init__(self):
relist, self.replist = zip(*TurkishPostProcessor.rules)
self.rx = re.compile('|'.join(relist))
def handle_match(self,match):
for idx,val in enumerate(match.groups()):
if val:
repl = self.replist[idx]
if type(repl) == dict:
return repl[val]
else:
return repl
def vowel_harmony(word):
voweltype = 0
out = []
for char in word:
if char in TurkishPostProcessor.vowel:
voweltype = TurkishPostProcessor.vowel[char]
elif char == 'H':
char = TurkishPostProcessor.H[voweltype]
elif char == 'A':
char = TurkishPostProcessor.A[voweltype]
voweltype &= 1
elif char == '^':
voweltype |= 1
continue
out.append(char)
return "".join(out)
def __call__(self,text):
#print(text)
items = text.split()
previdx = None
for idx,item in enumerate(items):
if item == "+copy":
back_word = items[idx-1]
items[idx] = None
elif item == "+paste":
items[idx] = back_word
previdx = idx
elif item.startswith("+"):
if previdx is None:
raise PostProcessError("sent: %s Word stem not found" % text)
#try:
# dicidx,sufidx = self.suff_idxs[item[1:]]
#except KeyError:
# raise PostProcessError("Postprocess Error: sent: %s Unknown suffix: %s" % (text, item))
#try:
# prev = items[previdx]
# items[previdx] = self.suff_dict_list[dicidx][prev][sufidx]
# items[idx] = None
#except KeyError:
# try:
# items[idx] = self.suff_dict_list[dicidx][""][sufidx]
# except KeyError:
# raise PostProcessError("Postprocess Error: sent: %s word: %s, suffix: %s" % (text, prev, item))
try:
prev = items[previdx]
items[previdx] = self.suff_dict[prev+item]
items[idx] = None
except KeyError:
try:
items[idx] = self.suff_dict[item]
except KeyError:
raise PostProcessError("sent: %s word: %s, suffix: %s" % (text, prev, item))
else:
previdx = idx
text = " ".join(item for item in items if item)
#return TurkishPostProcessor.vowel_harmony(
# self.rx.sub(
# self.handle_match,
# text.replace(' -','').translate(self.intab)
# )
#).translate(self.outtab)
inp = text.replace(' -','').translate(self.intab)
result = self.rx.sub(self.handle_match, inp)
result = TurkishPostProcessor.vowel_harmony(result)
result = result.translate(self.outtab).replace("' "," ")
return result
def main():
morpher = TurkishPostProcessor()
words = [ "ev", "araba", "kitap?", "hak+", "tank", "yatak?", "bur@un", "renk?", "rol^", "suy?" ]
suffixes = [ "Hm", "YH", "YlA", "DA", "NHn", "ZHN", "YDH"]
for word in words:
for suffix in suffixes:
nword = word + suffix
print(nword, morpher(nword))
words = [ "oda" ]
suffixes = [ "ZHNYH", "ZHNYlA", "ZHNDA", "ZHNNHn" ]
for word in words:
for suffix in suffixes:
nword = word + suffix
print(nword, morpher(nword))
words = [ "gel", "git?", "oku", "ara" ]
suffixes = [ "DHm", "VyorYHm", "YAcAk?YHm", "mAlHYHm", "mHşYDHm", "DHYDHm"]
for word in words:
for suffix in suffixes:
nword = word + suffix
nword = nword.replace("-","")
print(nword, morpher(nword))
if __name__ == "__main__":
# execute only if run as a script
main()
|
var class_ext_1_1_net_1_1_j_s_o_n_date_time_json_converter =
[
[ "CanConvert", "db/dbb/class_ext_1_1_net_1_1_j_s_o_n_date_time_json_converter.html#a39c32e35b626711bcec0d22d444c53c3", null ],
[ "ReadJson", "db/dbb/class_ext_1_1_net_1_1_j_s_o_n_date_time_json_converter.html#ad832406a54dd397ce04ff1285470c96d", null ],
[ "WriteJson", "db/dbb/class_ext_1_1_net_1_1_j_s_o_n_date_time_json_converter.html#ada7996288d668abd8543922eca9e9f90", null ],
[ "RenderMilliseconds", "db/dbb/class_ext_1_1_net_1_1_j_s_o_n_date_time_json_converter.html#a08a8d025e8044e5608d6ac89eb7e48c0", null ]
];
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "python_private.h" // must be first!
#include "qpid/dispatch/python_embedded.h"
#include "qpid/dispatch/server.h"
#include "config.h"
#include "dispatch_private.h"
#include "entity.h"
#include "policy.h"
#include "server_private.h"
#include "timer_private.h"
#include "qpid/dispatch/alloc.h"
#include "qpid/dispatch/amqp.h"
#include "qpid/dispatch/ctools.h"
#include "qpid/dispatch/failoverlist.h"
#include "qpid/dispatch/log.h"
#include "qpid/dispatch/platform.h"
#include "qpid/dispatch/proton_utils.h"
#include "qpid/dispatch/threading.h"
#include <proton/event.h>
#include <proton/listener.h>
#include <proton/netaddr.h>
#include <proton/proactor.h>
#include <proton/raw_connection.h>
#include <proton/sasl.h>
#include <inttypes.h>
#include <stdio.h>
#include <string.h>
struct qd_server_t {
qd_dispatch_t *qd;
const int thread_count; /* Immutable */
const char *container_name;
const char *sasl_config_path;
const char *sasl_config_name;
pn_proactor_t *proactor;
qd_container_t *container;
qd_log_source_t *log_source;
qd_log_source_t *protocol_log_source; // Log source for the PROTOCOL module
void *start_context;
sys_cond_t *cond;
sys_mutex_t *lock;
qd_connection_list_t conn_list;
int pause_requests;
int threads_paused;
int pause_next_sequence;
int pause_now_serving;
uint64_t next_connection_id;
void *py_displayname_obj;
qd_http_server_t *http;
sys_mutex_t *conn_activation_lock;
};
#define HEARTBEAT_INTERVAL 1000
ALLOC_DEFINE(qd_listener_t);
ALLOC_DEFINE(qd_connector_t);
ALLOC_DEFINE(qd_deferred_call_t);
ALLOC_DEFINE(qd_connection_t);
const char *MECH_EXTERNAL = "EXTERNAL";
//Allowed uidFormat fields.
const char CERT_COUNTRY_CODE = 'c';
const char CERT_STATE = 's';
const char CERT_CITY_LOCALITY = 'l';
const char CERT_ORGANIZATION_NAME = 'o';
const char CERT_ORGANIZATION_UNIT = 'u';
const char CERT_COMMON_NAME = 'n';
const char CERT_FINGERPRINT_SHA1 = '1';
const char CERT_FINGERPRINT_SHA256 = '2';
const char CERT_FINGERPRINT_SHA512 = '5';
char *COMPONENT_SEPARATOR = ";";
static const int BACKLOG = 50; /* Listening backlog */
static bool setup_ssl_sasl_and_open(qd_connection_t *ctx); // true if ssl, sasl, and open succeeded
static qd_failover_item_t *qd_connector_get_conn_info(qd_connector_t *ct);
/**
* This function is set as the pn_transport->tracer and is invoked when proton tries to write the log message to pn_transport->tracer
*/
void transport_tracer(pn_transport_t *transport, const char *message)
{
qd_connection_t *ctx = (qd_connection_t*) pn_transport_get_context(transport);
if (ctx) {
// The PROTOCOL module is used exclusively for logging protocol related tracing. The protocol could be AMQP, HTTP, TCP etc.
qd_log(ctx->server->protocol_log_source, QD_LOG_TRACE, "[C%"PRIu64"]:%s", ctx->connection_id, message);
}
}
void connection_transport_tracer(pn_transport_t *transport, const char *message)
{
qd_connection_t *ctx = (qd_connection_t*) pn_transport_get_context(transport);
if (ctx) {
// Unconditionally write the log at TRACE level to the log file.
qd_log_impl_v1(ctx->server->protocol_log_source, QD_LOG_TRACE, __FILE__, __LINE__, "[C%"PRIu64"]:%s", ctx->connection_id, message);
}
}
/**
* Save displayNameService object instance and ImportModule address
* Called with qd_python_lock held
*/
qd_error_t qd_register_display_name_service(qd_dispatch_t *qd, void *displaynameservice)
{
if (displaynameservice) {
qd->server->py_displayname_obj = displaynameservice;
Py_XINCREF((PyObject *)qd->server->py_displayname_obj);
return QD_ERROR_NONE;
}
else {
return qd_error(QD_ERROR_VALUE, "displaynameservice is not set");
}
}
/**
* Returns a char pointer to a user id which is constructed from components specified in the config->ssl_uid_format.
* Parses through each component and builds a semi-colon delimited string which is returned as the user id.
*/
static const char *transport_get_user(qd_connection_t *conn, pn_transport_t *tport)
{
const qd_server_config_t *config =
conn->connector ? &conn->connector->config : &conn->listener->config;
if (config->ssl_uid_format) {
// The ssl_uid_format length cannot be greater that 7
assert(strlen(config->ssl_uid_format) < 8);
//
// The tokens in the uidFormat strings are delimited by comma. Load the individual components of the uidFormat
// into the components[] array. The maximum number of components that are allowed are 7 namely, c, s, l, o, u, n, (1 or 2 or 5)
//
char components[8];
//The strcpy() function copies the string pointed to by src, including the terminating null byte ('\0'), to the buffer pointed to by dest.
strncpy(components, config->ssl_uid_format, 7);
const char *country_code = 0;
const char *state = 0;
const char *locality_city = 0;
const char *organization = 0;
const char *org_unit = 0;
const char *common_name = 0;
//
// SHA1 is 20 octets (40 hex characters); SHA256 is 32 octets (64 hex characters).
// SHA512 is 64 octets (128 hex characters)
//
char fingerprint[129] = "\0";
int uid_length = 0;
int semi_colon_count = -1;
int component_count = strlen(components);
for (int x = 0; x < component_count ; x++) {
// accumulate the length into uid_length on each pass so we definitively know the number of octets to malloc.
if (components[x] == CERT_COUNTRY_CODE) {
country_code = pn_ssl_get_remote_subject_subfield(pn_ssl(tport), PN_SSL_CERT_SUBJECT_COUNTRY_NAME);
if (country_code) {
uid_length += strlen((const char *)country_code);
semi_colon_count++;
}
}
else if (components[x] == CERT_STATE) {
state = pn_ssl_get_remote_subject_subfield(pn_ssl(tport), PN_SSL_CERT_SUBJECT_STATE_OR_PROVINCE);
if (state) {
uid_length += strlen((const char *)state);
semi_colon_count++;
}
}
else if (components[x] == CERT_CITY_LOCALITY) {
locality_city = pn_ssl_get_remote_subject_subfield(pn_ssl(tport), PN_SSL_CERT_SUBJECT_CITY_OR_LOCALITY);
if (locality_city) {
uid_length += strlen((const char *)locality_city);
semi_colon_count++;
}
}
else if (components[x] == CERT_ORGANIZATION_NAME) {
organization = pn_ssl_get_remote_subject_subfield(pn_ssl(tport), PN_SSL_CERT_SUBJECT_ORGANIZATION_NAME);
if(organization) {
uid_length += strlen((const char *)organization);
semi_colon_count++;
}
}
else if (components[x] == CERT_ORGANIZATION_UNIT) {
org_unit = pn_ssl_get_remote_subject_subfield(pn_ssl(tport), PN_SSL_CERT_SUBJECT_ORGANIZATION_UNIT);
if(org_unit) {
uid_length += strlen((const char *)org_unit);
semi_colon_count++;
}
}
else if (components[x] == CERT_COMMON_NAME) {
common_name = pn_ssl_get_remote_subject_subfield(pn_ssl(tport), PN_SSL_CERT_SUBJECT_COMMON_NAME);
if(common_name) {
uid_length += strlen((const char *)common_name);
semi_colon_count++;
}
}
else if (components[x] == CERT_FINGERPRINT_SHA1 || components[x] == CERT_FINGERPRINT_SHA256 || components[x] == CERT_FINGERPRINT_SHA512) {
// Allocate the memory for message digest
int out = 0;
int fingerprint_length = 0;
if(components[x] == CERT_FINGERPRINT_SHA1) {
fingerprint_length = 40;
out = pn_ssl_get_cert_fingerprint(pn_ssl(tport), fingerprint, fingerprint_length + 1, PN_SSL_SHA1);
}
else if (components[x] == CERT_FINGERPRINT_SHA256) {
fingerprint_length = 64;
out = pn_ssl_get_cert_fingerprint(pn_ssl(tport), fingerprint, fingerprint_length + 1, PN_SSL_SHA256);
}
else if (components[x] == CERT_FINGERPRINT_SHA512) {
fingerprint_length = 128;
out = pn_ssl_get_cert_fingerprint(pn_ssl(tport), fingerprint, fingerprint_length + 1, PN_SSL_SHA512);
}
(void) out; // avoid 'out unused' compiler warnings if NDEBUG undef'ed
assert (out != PN_ERR);
uid_length += fingerprint_length;
semi_colon_count++;
}
else {
// This is an unrecognized component. log a critical error
qd_log(conn->server->log_source, QD_LOG_CRITICAL, "[C%"PRIu64"] Unrecognized component '%c' in uidFormat ", conn->connection_id, components[x]);
return 0;
}
}
if(uid_length > 0) {
char *user_id = malloc((uid_length + semi_colon_count + 1) * sizeof(char)); // the +1 is for the '\0' character
//
// We have allocated memory for user_id. We are responsible for freeing this memory. Set conn->free_user_id
// to true so that we know that we have to free the user_id
//
conn->free_user_id = true;
memset(user_id, 0, uid_length + semi_colon_count + 1);
// The components in the user id string must appear in the same order as it appears in the component string. that is
// why we have this loop
for (int x=0; x < component_count ; x++) {
if (components[x] == CERT_COUNTRY_CODE) {
if (country_code) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) country_code);
}
}
else if (components[x] == CERT_STATE) {
if (state) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) state);
}
}
else if (components[x] == CERT_CITY_LOCALITY) {
if (locality_city) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) locality_city);
}
}
else if (components[x] == CERT_ORGANIZATION_NAME) {
if (organization) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) organization);
}
}
else if (components[x] == CERT_ORGANIZATION_UNIT) {
if (org_unit) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) org_unit);
}
}
else if (components[x] == CERT_COMMON_NAME) {
if (common_name) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) common_name);
}
}
else if (components[x] == CERT_FINGERPRINT_SHA1 || components[x] == CERT_FINGERPRINT_SHA256 || components[x] == CERT_FINGERPRINT_SHA512) {
if (strlen((char *) fingerprint) > 0) {
if(*user_id != '\0')
strcat(user_id, COMPONENT_SEPARATOR);
strcat(user_id, (char *) fingerprint);
}
}
}
if (config->ssl_uid_name_mapping_file) {
// Translate extracted id into display name
qd_python_lock_state_t lock_state = qd_python_lock();
PyObject *result = PyObject_CallMethod((PyObject *)conn->server->py_displayname_obj, "query", "(ss)", config->ssl_profile, user_id );
if (result) {
free(user_id);
user_id = py_string_2_c(result);
Py_XDECREF(result);
} else {
qd_log(conn->server->log_source, QD_LOG_DEBUG, "[C%"PRIu64"] Internal: failed to read displaynameservice query result", conn->connection_id);
}
qd_python_unlock(lock_state);
}
qd_log(conn->server->log_source, QD_LOG_DEBUG, "User id is '%s' ", user_id);
return user_id;
}
}
else //config->ssl_uid_format not specified, just return the username provided by the proton transport.
return pn_transport_get_user(tport);
return 0;
}
void qd_connection_set_user(qd_connection_t *conn)
{
pn_transport_t *tport = pn_connection_transport(conn->pn_conn);
pn_sasl_t *sasl = pn_sasl(tport);
if (sasl) {
const char *mech = pn_sasl_get_mech(sasl);
conn->user_id = pn_transport_get_user(tport);
// We want to set the user name only if it is not already set and the selected sasl mechanism is EXTERNAL
if (mech && strcmp(mech, MECH_EXTERNAL) == 0) {
const char *user_id = transport_get_user(conn, tport);
if (user_id)
conn->user_id = user_id;
}
}
}
qd_error_t qd_entity_refresh_sslProfile(qd_entity_t* entity, void *impl)
{
return QD_ERROR_NONE;
}
qd_error_t qd_entity_refresh_authServicePlugin(qd_entity_t* entity, void *impl)
{
return QD_ERROR_NONE;
}
static qd_error_t listener_setup_ssl(qd_connection_t *ctx, const qd_server_config_t *config, pn_transport_t *tport)
{
pn_ssl_domain_t *domain = pn_ssl_domain(PN_SSL_MODE_SERVER);
if (!domain) return qd_error(QD_ERROR_RUNTIME, "No SSL support");
// setup my identifying cert:
if (pn_ssl_domain_set_credentials(domain,
config->ssl_certificate_file,
config->ssl_private_key_file,
config->ssl_password)) {
pn_ssl_domain_free(domain);
return qd_error(QD_ERROR_RUNTIME, "Cannot set SSL credentials");
}
// for peer authentication:
if (config->ssl_trusted_certificate_db) {
if (pn_ssl_domain_set_trusted_ca_db(domain, config->ssl_trusted_certificate_db)) {
pn_ssl_domain_free(domain);
return qd_error(QD_ERROR_RUNTIME, "Cannot set trusted SSL CA" );
}
}
if (config->ssl_ciphers) {
if (pn_ssl_domain_set_ciphers(domain, config->ssl_ciphers)) {
pn_ssl_domain_free(domain);
return qd_error(QD_ERROR_RUNTIME, "Cannot set ciphers. The ciphers string might be invalid. Use openssl ciphers -v <ciphers> to validate");
}
}
if (config->ssl_protocols) {
if (pn_ssl_domain_set_protocols(domain, config->ssl_protocols)) {
pn_ssl_domain_free(domain);
return qd_error(QD_ERROR_RUNTIME, "Cannot set protocols. The protocols string might be invalid. This list is a space separated string of the allowed TLS protocols (TLSv1 TLSv1.1 TLSv1.2)");
}
}
const char *trusted = config->ssl_trusted_certificate_db;
// do we force the peer to send a cert?
if (config->ssl_require_peer_authentication) {
if (!trusted || pn_ssl_domain_set_peer_authentication(domain, PN_SSL_VERIFY_PEER, trusted)) {
pn_ssl_domain_free(domain);
return qd_error(QD_ERROR_RUNTIME, "Cannot set peer authentication");
}
}
ctx->ssl = pn_ssl(tport);
if (!ctx->ssl || pn_ssl_init(ctx->ssl, domain, 0)) {
pn_ssl_domain_free(domain);
return qd_error(QD_ERROR_RUNTIME, "Cannot initialize SSL");
}
// By default adding ssl to a transport forces encryption to be required, so if it's not set that here
if (!config->ssl_required) {
pn_transport_require_encryption(tport, false);
}
pn_ssl_domain_free(domain);
return QD_ERROR_NONE;
}
static void decorate_connection(qd_server_t *qd_server, pn_connection_t *conn, const qd_server_config_t *config)
{
//
// Set the container name
//
pn_connection_set_container(conn, qd_server->container_name);
//
// Advertise our container capabilities.
//
{
// offered: extension capabilities this router supports
pn_data_t *ocaps = pn_connection_offered_capabilities(conn);
pn_data_put_array(ocaps, false, PN_SYMBOL);
pn_data_enter(ocaps);
pn_data_put_symbol(ocaps, pn_bytes(strlen(QD_CAPABILITY_ANONYMOUS_RELAY), (char*) QD_CAPABILITY_ANONYMOUS_RELAY));
pn_data_put_symbol(ocaps, pn_bytes(strlen(QD_CAPABILITY_STREAMING_LINKS), (char*) QD_CAPABILITY_STREAMING_LINKS));
pn_data_exit(ocaps);
// The desired-capability list defines which extension capabilities the
// sender MAY use if the receiver offers them (i.e., they are in the
// offered-capabilities list received by the sender of the
// desired-capabilities). The sender MUST NOT attempt to use any
// capabilities it did not declare in the desired-capabilities
// field.
ocaps = pn_connection_desired_capabilities(conn);
pn_data_put_array(ocaps, false, PN_SYMBOL);
pn_data_enter(ocaps);
pn_data_put_symbol(ocaps, pn_bytes(strlen(QD_CAPABILITY_ANONYMOUS_RELAY), (char*) QD_CAPABILITY_ANONYMOUS_RELAY));
pn_data_put_symbol(ocaps, pn_bytes(strlen(QD_CAPABILITY_STREAMING_LINKS), (char*) QD_CAPABILITY_STREAMING_LINKS));
pn_data_exit(ocaps);
}
//
// Create the connection properties map
//
pn_data_put_map(pn_connection_properties(conn));
pn_data_enter(pn_connection_properties(conn));
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_PRODUCT_KEY), QD_CONNECTION_PROPERTY_PRODUCT_KEY));
pn_data_put_string(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_PRODUCT_VALUE), QD_CONNECTION_PROPERTY_PRODUCT_VALUE));
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_VERSION_KEY), QD_CONNECTION_PROPERTY_VERSION_KEY));
pn_data_put_string(pn_connection_properties(conn),
pn_bytes(strlen(QPID_DISPATCH_VERSION), QPID_DISPATCH_VERSION));
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_CONN_ID), QD_CONNECTION_PROPERTY_CONN_ID));
qd_connection_t *qd_conn = pn_connection_get_context(conn);
pn_data_put_int(pn_connection_properties(conn), qd_conn->connection_id);
if (config && config->inter_router_cost > 1) {
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_COST_KEY), QD_CONNECTION_PROPERTY_COST_KEY));
pn_data_put_int(pn_connection_properties(conn), config->inter_router_cost);
}
if (config) {
qd_failover_list_t *fol = config->failover_list;
if (fol) {
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_FAILOVER_LIST_KEY), QD_CONNECTION_PROPERTY_FAILOVER_LIST_KEY));
pn_data_put_list(pn_connection_properties(conn));
pn_data_enter(pn_connection_properties(conn));
int fol_count = qd_failover_list_size(fol);
for (int i = 0; i < fol_count; i++) {
pn_data_put_map(pn_connection_properties(conn));
pn_data_enter(pn_connection_properties(conn));
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_FAILOVER_NETHOST_KEY), QD_CONNECTION_PROPERTY_FAILOVER_NETHOST_KEY));
pn_data_put_string(pn_connection_properties(conn),
pn_bytes(strlen(qd_failover_list_host(fol, i)), qd_failover_list_host(fol, i)));
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_FAILOVER_PORT_KEY), QD_CONNECTION_PROPERTY_FAILOVER_PORT_KEY));
pn_data_put_string(pn_connection_properties(conn),
pn_bytes(strlen(qd_failover_list_port(fol, i)), qd_failover_list_port(fol, i)));
if (qd_failover_list_scheme(fol, i)) {
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_FAILOVER_SCHEME_KEY), QD_CONNECTION_PROPERTY_FAILOVER_SCHEME_KEY));
pn_data_put_string(pn_connection_properties(conn),
pn_bytes(strlen(qd_failover_list_scheme(fol, i)), qd_failover_list_scheme(fol, i)));
}
if (qd_failover_list_hostname(fol, i)) {
pn_data_put_symbol(pn_connection_properties(conn),
pn_bytes(strlen(QD_CONNECTION_PROPERTY_FAILOVER_HOSTNAME_KEY), QD_CONNECTION_PROPERTY_FAILOVER_HOSTNAME_KEY));
pn_data_put_string(pn_connection_properties(conn),
pn_bytes(strlen(qd_failover_list_hostname(fol, i)), qd_failover_list_hostname(fol, i)));
}
pn_data_exit(pn_connection_properties(conn));
}
pn_data_exit(pn_connection_properties(conn));
}
// Append any user-configured properties. conn_props is a pn_data_t PN_MAP
// type. Append the map elements - not the map itself!
//
if (config->conn_props) {
pn_data_t *outp = pn_connection_properties(conn);
pn_data_rewind(config->conn_props);
pn_data_next(config->conn_props);
assert(pn_data_type(config->conn_props) == PN_MAP);
const size_t count = pn_data_get_map(config->conn_props);
pn_data_enter(config->conn_props);
for (size_t i = 0; i < count / 2; ++i) {
// key: the key must be of type Symbol. The python agent has
// validated the keys as ASCII strings, but the JSON converter does
// not provide a Symbol type so all the keys in conn_props are
// PN_STRING.
pn_data_next(config->conn_props);
assert(pn_data_type(config->conn_props) == PN_STRING);
pn_data_put_symbol(outp, pn_data_get_string(config->conn_props));
// put value
pn_data_next(config->conn_props);
qdpn_data_insert(outp, config->conn_props);
}
}
}
pn_data_exit(pn_connection_properties(conn));
}
/* Wake function for proactor-manaed connections */
static void connection_wake(qd_connection_t *ctx) {
if (ctx->pn_conn) pn_connection_wake(ctx->pn_conn);
}
/* Construct a new qd_connection. Thread safe.
* Does not allocate any managed objects and therefore
* does not take ENTITY_CACHE lock.
*/
qd_connection_t *qd_server_connection_impl(qd_server_t *server, qd_server_config_t *config, qd_connection_t *ctx)
{
assert(ctx);
ZERO(ctx);
ctx->pn_conn = pn_connection();
ctx->deferred_call_lock = sys_mutex();
ctx->role = strdup(config->role);
if (!ctx->pn_conn || !ctx->deferred_call_lock || !ctx->role) {
if (ctx->pn_conn) pn_connection_free(ctx->pn_conn);
if (ctx->deferred_call_lock) sys_mutex_free(ctx->deferred_call_lock);
free(ctx->role);
free_qd_connection_t(ctx);
return NULL;
}
ctx->server = server;
ctx->wake = connection_wake; /* Default, over-ridden for HTTP connections */
pn_connection_set_context(ctx->pn_conn, ctx);
DEQ_ITEM_INIT(ctx);
DEQ_INIT(ctx->deferred_calls);
DEQ_INIT(ctx->free_link_session_list);
sys_mutex_lock(server->lock);
ctx->connection_id = server->next_connection_id++;
DEQ_INSERT_TAIL(server->conn_list, ctx);
sys_mutex_unlock(server->lock);
decorate_connection(ctx->server, ctx->pn_conn, config);
return ctx;
}
/* Construct a new qd_connection. Thread safe.
* Allocates a qd_connection_t object and therefore
* takes ENTITY_CACHE lock.
*/
qd_connection_t *qd_server_connection(qd_server_t *server, qd_server_config_t *config)
{
qd_connection_t *ctx = new_qd_connection_t();
if (!ctx) return NULL;
return qd_server_connection_impl(server, config, ctx);
}
static void on_accept(pn_event_t *e, qd_listener_t *listener)
{
assert(pn_event_type(e) == PN_LISTENER_ACCEPT);
pn_listener_t *pn_listener = pn_event_listener(e);
qd_connection_t *ctx = qd_server_connection(listener->server, &listener->config);
if (!ctx) {
qd_log(listener->server->log_source, QD_LOG_CRITICAL,
"Allocation failure during accept to %s", listener->config.host_port);
return;
}
ctx->listener = listener;
qd_log(listener->server->log_source, QD_LOG_TRACE,
"[C%"PRIu64"]: Accepting incoming connection to '%s'",
ctx->connection_id, ctx->listener->config.host_port);
/* Asynchronous accept, configure the transport on PN_CONNECTION_BOUND */
pn_listener_accept(pn_listener, ctx->pn_conn);
}
/* Log the description, set the transport condition (name, description) close the transport tail. */
void connect_fail(qd_connection_t *ctx, const char *name, const char *description, ...)
{
va_list ap;
va_start(ap, description);
qd_verror(QD_ERROR_RUNTIME, description, ap);
va_end(ap);
if (ctx->pn_conn) {
pn_transport_t *t = pn_connection_transport(ctx->pn_conn);
/* Normally this is closing the transport but if not bound close the connection. */
pn_condition_t *cond = t ? pn_transport_condition(t) : pn_connection_condition(ctx->pn_conn);
if (cond && !pn_condition_is_set(cond)) {
va_start(ap, description);
pn_condition_vformat(cond, name, description, ap);
va_end(ap);
}
if (t) {
pn_transport_close_tail(t);
} else {
pn_connection_close(ctx->pn_conn);
}
}
}
/* Get the host IP address for the remote end */
static void set_rhost_port(qd_connection_t *ctx) {
pn_transport_t *tport = pn_connection_transport(ctx->pn_conn);
const struct sockaddr* sa = pn_netaddr_sockaddr(pn_transport_remote_addr(tport));
size_t salen = pn_netaddr_socklen(pn_transport_remote_addr(tport));
if (sa && salen) {
char rport[NI_MAXSERV] = "";
int err = getnameinfo(sa, salen,
ctx->rhost, sizeof(ctx->rhost), rport, sizeof(rport),
NI_NUMERICHOST | NI_NUMERICSERV);
if (!err) {
snprintf(ctx->rhost_port, sizeof(ctx->rhost_port), "%s:%s", ctx->rhost, rport);
}
}
}
/* Configure the transport once it is bound to the connection */
static void on_connection_bound(qd_server_t *server, pn_event_t *e) {
pn_connection_t *pn_conn = pn_event_connection(e);
qd_connection_t *ctx = pn_connection_get_context(pn_conn);
pn_transport_t *tport = pn_connection_transport(pn_conn);
pn_transport_set_context(tport, ctx); /* for transport_tracer */
//
// Proton pushes out its trace to transport_tracer() which in turn writes a trace
// message to the qdrouter log
// If trace level logging is enabled on the PROTOCOL module, set PN_TRACE_FRM as the transport trace
// and also set the transport tracer callback.
// Note here that if trace level logging is enabled on the DEFAULT module, all modules are logging at trace level too.
//
if (qd_log_enabled(ctx->server->protocol_log_source, QD_LOG_TRACE)) {
pn_transport_trace(tport, PN_TRACE_FRM);
pn_transport_set_tracer(tport, transport_tracer);
}
const qd_server_config_t *config = NULL;
if (ctx->listener) { /* Accepting an incoming connection */
config = &ctx->listener->config;
const char *name = config->host_port;
pn_transport_set_server(tport);
set_rhost_port(ctx);
sys_mutex_lock(server->lock); /* Policy check is not thread safe */
ctx->policy_counted = qd_policy_socket_accept(server->qd->policy, ctx->rhost);
sys_mutex_unlock(server->lock);
if (!ctx->policy_counted) {
pn_transport_close_tail(tport);
pn_transport_close_head(tport);
return;
}
// Set up SSL
if (config->ssl_profile) {
qd_log(ctx->server->log_source, QD_LOG_TRACE, "[C%"PRIu64"] Configuring SSL on %s", ctx->connection_id, name);
if (listener_setup_ssl(ctx, config, tport) != QD_ERROR_NONE) {
connect_fail(ctx, QD_AMQP_COND_INTERNAL_ERROR, "%s on %s", qd_error_message(), name);
return;
}
}
//
// Set up SASL
//
sys_mutex_lock(ctx->server->lock);
pn_sasl_t *sasl = pn_sasl(tport);
if (ctx->server->sasl_config_path)
pn_sasl_config_path(sasl, ctx->server->sasl_config_path);
pn_sasl_config_name(sasl, ctx->server->sasl_config_name);
if (config->sasl_mechanisms)
pn_sasl_allowed_mechs(sasl, config->sasl_mechanisms);
pn_transport_require_auth(tport, config->requireAuthentication);
pn_transport_require_encryption(tport, config->requireEncryption);
pn_sasl_set_allow_insecure_mechs(sasl, config->allowInsecureAuthentication);
sys_mutex_unlock(ctx->server->lock);
qd_log(ctx->server->log_source, QD_LOG_INFO, "[C%"PRIu64"] Accepted connection to %s from %s",
ctx->connection_id, name, ctx->rhost_port);
} else if (ctx->connector) { /* Establishing an outgoing connection */
config = &ctx->connector->config;
if (!setup_ssl_sasl_and_open(ctx)) {
qd_log(ctx->server->log_source, QD_LOG_ERROR, "[C%"PRIu64"] Connection aborted due to internal setup error",
ctx->connection_id);
pn_transport_close_tail(tport);
pn_transport_close_head(tport);
return;
}
} else { /* No connector and no listener */
connect_fail(ctx, QD_AMQP_COND_INTERNAL_ERROR, "unknown Connection");
return;
}
//
// Common transport configuration.
//
pn_transport_set_max_frame(tport, config->max_frame_size);
pn_transport_set_channel_max(tport, config->max_sessions - 1);
pn_transport_set_idle_timeout(tport, config->idle_timeout_seconds * 1000);
}
static void invoke_deferred_calls(qd_connection_t *conn, bool discard)
{
if (!conn)
return;
// Lock access to deferred_calls, other threads may concurrently add to it. Invoke
// the calls outside of the critical section.
//
sys_mutex_lock(conn->deferred_call_lock);
qd_deferred_call_t *dc;
while ((dc = DEQ_HEAD(conn->deferred_calls))) {
DEQ_REMOVE_HEAD(conn->deferred_calls);
sys_mutex_unlock(conn->deferred_call_lock);
dc->call(dc->context, discard);
free_qd_deferred_call_t(dc);
sys_mutex_lock(conn->deferred_call_lock);
}
sys_mutex_unlock(conn->deferred_call_lock);
}
void qd_container_handle_event(qd_container_t *container, pn_event_t *event, pn_connection_t *pn_conn, qd_connection_t *qd_conn);
void qd_conn_event_batch_complete(qd_container_t *container, qd_connection_t *qd_conn, bool conn_closed);
static void handle_event_with_context(pn_event_t *e, qd_server_t *qd_server, qd_handler_context_t *context)
{
if (context && context->handler) {
context->handler(e, qd_server, context->context);
}
}
static void do_handle_raw_connection_event(pn_event_t *e, qd_server_t *qd_server)
{
handle_event_with_context(e, qd_server, (qd_handler_context_t*) pn_raw_connection_get_context(pn_event_raw_connection(e)));
}
static void do_handle_listener(pn_event_t *e, qd_server_t *qd_server)
{
handle_event_with_context(e, qd_server, (qd_handler_context_t*) pn_listener_get_context(pn_event_listener(e)));
}
pn_proactor_t *qd_server_proactor(qd_server_t *qd_server)
{
return qd_server->proactor;
}
static void handle_listener(pn_event_t *e, qd_server_t *qd_server, void *context) {
qd_log_source_t *log = qd_server->log_source;
qd_listener_t *li = (qd_listener_t*) context;
const char *host_port = li->config.host_port;
const char *port = li->config.port;
switch (pn_event_type(e)) {
case PN_LISTENER_OPEN: {
if (strcmp(port, "0") == 0) {
// If a 0 (zero) is specified for a port, get the actual listening port from the listener.
pn_listener_t *l = pn_event_listener(e);
const pn_netaddr_t *na = pn_listener_addr(l);
char str[PN_MAX_ADDR] = "";
pn_netaddr_str(na, str, sizeof(str));
// "str" contains the host and port on which this listener is listening.
if (li->config.name)
qd_log(log, QD_LOG_NOTICE, "Listening on %s (%s)", str, li->config.name);
else
qd_log(log, QD_LOG_NOTICE, "Listening on %s", str);
}
else {
qd_log(log, QD_LOG_NOTICE, "Listening on %s", host_port);
}
break;
}
case PN_LISTENER_ACCEPT:
qd_log(log, QD_LOG_TRACE, "Accepting connection on %s", host_port);
on_accept(e, li);
break;
case PN_LISTENER_CLOSE:
if (li->pn_listener) {
pn_condition_t *cond = pn_listener_condition(li->pn_listener);
if (pn_condition_is_set(cond)) {
qd_log(log, QD_LOG_ERROR, "Listener error on %s: %s (%s)", host_port,
pn_condition_get_description(cond),
pn_condition_get_name(cond));
if (li->exit_on_error) {
qd_log(log, QD_LOG_CRITICAL, "Shutting down, required listener failed %s",
host_port);
exit(1);
}
} else {
qd_log(log, QD_LOG_TRACE, "Listener closed on %s", host_port);
}
pn_listener_set_context(li->pn_listener, 0);
li->pn_listener = 0;
qd_listener_decref(li);
}
break;
default:
break;
}
}
bool qd_connector_has_failover_info(qd_connector_t* ct)
{
if (ct && DEQ_SIZE(ct->conn_info_list) > 1)
return true;
return false;
}
static void qd_connection_free(qd_connection_t *qd_conn)
{
qd_server_t *qd_server = qd_conn->server;
qd_connector_t *connector = qd_conn->connector;
// If this is a dispatch connector uncouple it from the
// connection and restart the re-connect timer if necessary
if (connector) {
sys_mutex_lock(connector->lock);
connector->qd_conn = 0; // this connection to be freed
if (connector->state != CXTR_STATE_DELETED) {
// Increment the connection index by so that we can try connecting to the failover url (if any).
bool has_failover = qd_connector_has_failover_info(connector);
long delay = connector->delay;
if (has_failover) {
// Go thru the failover list round robin.
// IMPORTANT: Note here that we set the re-try timer to 1 second.
// We want to quickly keep cycling thru the failover urls every second.
delay = 1000;
}
connector->state = CXTR_STATE_CONNECTING;
qd_timer_schedule(connector->timer, delay);
}
sys_mutex_unlock(connector->lock);
qd_connector_decref(connector); // drop connection's reference
}
sys_mutex_lock(qd_server->lock);
DEQ_REMOVE(qd_server->conn_list, qd_conn);
sys_mutex_unlock(qd_server->lock);
// If counted for policy enforcement, notify it has closed
if (qd_conn->policy_counted) {
qd_policy_socket_close(qd_server->qd->policy, qd_conn);
}
invoke_deferred_calls(qd_conn, true); // Discard any pending deferred calls
sys_mutex_free(qd_conn->deferred_call_lock);
qd_policy_settings_free(qd_conn->policy_settings);
if (qd_conn->free_user_id) free((char*)qd_conn->user_id);
if (qd_conn->timer) qd_timer_free(qd_conn->timer);
free(qd_conn->name);
free(qd_conn->role);
sys_mutex_lock(qd_server->conn_activation_lock);
free_qd_connection_t(qd_conn);
sys_mutex_unlock(qd_server->conn_activation_lock);
/* Note: pn_conn is freed by the proactor */
}
static void timeout_on_handhsake(void *context, bool discard)
{
if (discard)
return;
qd_connection_t *ctx = (qd_connection_t*) context;
pn_transport_t *tport = pn_connection_transport(ctx->pn_conn);
pn_transport_close_head(tport);
connect_fail(ctx, QD_AMQP_COND_NOT_ALLOWED, "Timeout waiting for initial handshake");
}
static void startup_timer_handler(void *context)
{
//
// This timer fires for a connection if it has not had a REMOTE_OPEN
// event in a time interval from the CONNECTION_INIT event. Close
// down the transport in an IO thread reserved for that connection.
//
qd_connection_t *ctx = (qd_connection_t*) context;
qd_timer_free(ctx->timer);
ctx->timer = 0;
qd_connection_invoke_deferred(ctx, timeout_on_handhsake, context);
}
static void qd_increment_conn_index(qd_connection_t *ctx)
{
if (ctx->connector) {
qd_failover_item_t *item = qd_connector_get_conn_info(ctx->connector);
if (item->retries == 1) {
ctx->connector->conn_index += 1;
if (ctx->connector->conn_index > DEQ_SIZE(ctx->connector->conn_info_list))
ctx->connector->conn_index = 1;
item->retries = 0;
}
else
item->retries += 1;
}
}
/* Events involving a connection or listener are serialized by the proactor so
* only one event per connection / listener will be processed at a time.
*/
static bool handle(qd_server_t *qd_server, pn_event_t *e, pn_connection_t *pn_conn, qd_connection_t *ctx)
{
switch (pn_event_type(e)) {
case PN_PROACTOR_INTERRUPT:
/* Interrupt the next thread */
pn_proactor_interrupt(qd_server->proactor);
/* Stop the current thread */
return false;
case PN_PROACTOR_TIMEOUT:
qd_timer_visit();
break;
case PN_LISTENER_OPEN:
case PN_LISTENER_ACCEPT:
case PN_LISTENER_CLOSE:
do_handle_listener(e, qd_server);
break;
case PN_CONNECTION_INIT: {
const qd_server_config_t *config = ctx && ctx->listener ? &ctx->listener->config : 0;
if (config && config->initial_handshake_timeout_seconds > 0) {
ctx->timer = qd_timer(qd_server->qd, startup_timer_handler, ctx);
qd_timer_schedule(ctx->timer, config->initial_handshake_timeout_seconds * 1000);
}
break;
}
case PN_CONNECTION_BOUND:
on_connection_bound(qd_server, e);
break;
case PN_CONNECTION_REMOTE_OPEN:
// If we are transitioning to the open state, notify the client via callback.
if (ctx && ctx->timer) {
qd_timer_free(ctx->timer);
ctx->timer = 0;
}
if (ctx && !ctx->opened) {
ctx->opened = true;
if (ctx->connector) {
ctx->connector->delay = 2000; // Delay re-connect in case there is a recurring error
qd_failover_item_t *item = qd_connector_get_conn_info(ctx->connector);
if (item)
item->retries = 0;
}
}
break;
case PN_CONNECTION_WAKE:
invoke_deferred_calls(ctx, false);
break;
case PN_TRANSPORT_ERROR:
{
pn_transport_t *transport = pn_event_transport(e);
pn_condition_t* condition = transport ? pn_transport_condition(transport) : NULL;
if (ctx && ctx->connector) { /* Outgoing connection */
qd_increment_conn_index(ctx);
const qd_server_config_t *config = &ctx->connector->config;
// note: will transition back to STATE_CONNECTING when ctx is freed (pn_connection_free)
ctx->connector->state = CXTR_STATE_FAILED;
char conn_msg[300];
if (condition && pn_condition_is_set(condition)) {
qd_format_string(conn_msg, 300, "[C%"PRIu64"] Connection to %s failed: %s %s", ctx->connection_id, config->host_port,
pn_condition_get_name(condition), pn_condition_get_description(condition));
strcpy(ctx->connector->conn_msg, conn_msg);
qd_log(qd_server->log_source, QD_LOG_INFO, conn_msg);
} else {
qd_format_string(conn_msg, 300, "[C%"PRIu64"] Connection to %s failed", ctx->connection_id, config->host_port);
strcpy(ctx->connector->conn_msg, conn_msg);
qd_log(qd_server->log_source, QD_LOG_INFO, conn_msg);
}
} else if (ctx && ctx->listener) { /* Incoming connection */
if (condition && pn_condition_is_set(condition)) {
qd_log(ctx->server->log_source, QD_LOG_INFO, "[C%"PRIu64"] Connection from %s (to %s) failed: %s %s",
ctx->connection_id, ctx->rhost_port, ctx->listener->config.host_port, pn_condition_get_name(condition),
pn_condition_get_description(condition));
}
}
}
break;
case PN_RAW_CONNECTION_CONNECTED:
case PN_RAW_CONNECTION_CLOSED_READ:
case PN_RAW_CONNECTION_CLOSED_WRITE:
case PN_RAW_CONNECTION_DISCONNECTED:
case PN_RAW_CONNECTION_NEED_READ_BUFFERS:
case PN_RAW_CONNECTION_NEED_WRITE_BUFFERS:
case PN_RAW_CONNECTION_READ:
case PN_RAW_CONNECTION_WRITTEN:
case PN_RAW_CONNECTION_WAKE:
do_handle_raw_connection_event(e, qd_server);
break;
default:
break;
} // Switch event type
if (ctx)
qd_container_handle_event(qd_server->container, e, pn_conn, ctx);
return true;
}
static void *thread_run(void *arg)
{
qd_server_t *qd_server = (qd_server_t*)arg;
bool running = true;
while (running) {
pn_event_batch_t *events = pn_proactor_wait(qd_server->proactor);
pn_event_t * e;
qd_connection_t *qd_conn = 0;
pn_connection_t *pn_conn = 0;
while (running && (e = pn_event_batch_next(events))) {
pn_connection_t *conn = pn_event_connection(e);
if (!pn_conn)
pn_conn = conn;
assert(pn_conn == conn);
if (!qd_conn)
qd_conn = !!pn_conn ? (qd_connection_t*) pn_connection_get_context(pn_conn) : 0;
running = handle(qd_server, e, conn, qd_conn);
/* Free the connection after all other processing is complete */
if (qd_conn && pn_event_type(e) == PN_TRANSPORT_CLOSED) {
qd_conn_event_batch_complete(qd_server->container, qd_conn, true);
pn_connection_set_context(pn_conn, NULL);
qd_connection_free(qd_conn);
qd_conn = 0;
}
}
//
// Notify the container that the batch is complete so it can do after-batch
// processing.
//
if (qd_conn)
qd_conn_event_batch_complete(qd_server->container, qd_conn, false);
pn_proactor_done(qd_server->proactor, events);
}
return NULL;
}
static qd_failover_item_t *qd_connector_get_conn_info(qd_connector_t *ct) {
qd_failover_item_t *item = DEQ_HEAD(ct->conn_info_list);
if (DEQ_SIZE(ct->conn_info_list) > 1) {
for (int i=1; i < ct->conn_index; i++) {
item = DEQ_NEXT(item);
}
}
return item;
}
/* Timer callback to try/retry connection open */
static void try_open_lh(qd_connector_t *connector, qd_connection_t *connection)
{
// connection until pn_proactor_connect is called below
qd_connection_t *qd_conn = qd_server_connection_impl(connector->server, &connector->config, connection);
if (!qd_conn) { /* Try again later */
qd_log(connector->server->log_source, QD_LOG_CRITICAL, "Allocation failure connecting to %s",
connector->config.host_port);
connector->delay = 10000;
connector->state = CXTR_STATE_CONNECTING;
qd_timer_schedule(connector->timer, connector->delay);
return;
}
qd_conn->connector = connector;
sys_atomic_inc(&connector->ref_count);
connector->qd_conn = qd_conn;
connector->state = CXTR_STATE_OPEN;
connector->delay = 5000;
//
// Set the hostname on the pn_connection. This hostname will be used by proton as the
// hostname in the open frame.
//
qd_failover_item_t *item = qd_connector_get_conn_info(connector);
char *current_host = item->host;
char *host_port = item->host_port;
pn_connection_set_hostname(qd_conn->pn_conn, current_host);
// Set the sasl user name and password on the proton connection object. This has to be
// done before pn_proactor_connect which will bind a transport to the connection
const qd_server_config_t *config = &connector->config;
if(config->sasl_username)
pn_connection_set_user(qd_conn->pn_conn, config->sasl_username);
if (config->sasl_password)
pn_connection_set_password(qd_conn->pn_conn, config->sasl_password);
qd_log(connector->server->log_source, QD_LOG_TRACE,
"[C%"PRIu64"] Connecting to %s", qd_conn->connection_id, host_port);
/* Note: the transport is configured in the PN_CONNECTION_BOUND event */
pn_proactor_connect(connector->server->proactor, qd_conn->pn_conn, host_port);
// at this point the qd_conn may now be scheduled on another thread
}
static bool setup_ssl_sasl_and_open(qd_connection_t *ctx)
{
qd_connector_t *ct = ctx->connector;
const qd_server_config_t *config = &ct->config;
pn_transport_t *tport = pn_connection_transport(ctx->pn_conn);
//
// Set up SSL if appropriate
//
if (config->ssl_profile) {
pn_ssl_domain_t *domain = pn_ssl_domain(PN_SSL_MODE_CLIENT);
if (!domain) {
qd_error(QD_ERROR_RUNTIME, "SSL domain allocation failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
return false;
}
bool failed = false;
// set our trusted database for checking the peer's cert:
if (config->ssl_trusted_certificate_db) {
if (pn_ssl_domain_set_trusted_ca_db(domain, config->ssl_trusted_certificate_db)) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"SSL CA configuration failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
}
// peer must provide a cert
if (pn_ssl_domain_set_peer_authentication(domain,
PN_SSL_VERIFY_PEER,
config->ssl_trusted_certificate_db)) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"SSL peer auth configuration failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
// configure our certificate if the peer requests one:
if (config->ssl_certificate_file) {
if (pn_ssl_domain_set_credentials(domain,
config->ssl_certificate_file,
config->ssl_private_key_file,
config->ssl_password)) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"SSL local certificate configuration failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
}
if (config->ssl_ciphers) {
if (pn_ssl_domain_set_ciphers(domain, config->ssl_ciphers)) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"SSL cipher configuration failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
}
if (config->ssl_protocols) {
if (pn_ssl_domain_set_protocols(domain, config->ssl_protocols)) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"Permitted TLS protocols configuration failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
}
//If ssl is enabled and verify_host_name is true, instruct proton to verify peer name
if (config->verify_host_name) {
if (pn_ssl_domain_set_peer_authentication(domain, PN_SSL_VERIFY_PEER_NAME, NULL)) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"SSL peer host name verification configuration failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
}
if (!failed) {
ctx->ssl = pn_ssl(tport);
if (pn_ssl_init(ctx->ssl, domain, 0) != 0) {
qd_log(ct->server->log_source, QD_LOG_ERROR,
"SSL domain internal initialization failed for connection [C%"PRIu64"] to %s:%s",
ctx->connection_id, config->host, config->port);
failed = true;
}
}
pn_ssl_domain_free(domain);
if (failed) {
return false;
}
}
//
// Set up SASL
//
sys_mutex_lock(ct->server->lock);
pn_sasl_t *sasl = pn_sasl(tport);
if (config->sasl_mechanisms)
pn_sasl_allowed_mechs(sasl, config->sasl_mechanisms);
pn_sasl_set_allow_insecure_mechs(sasl, config->allowInsecureAuthentication);
sys_mutex_unlock(ct->server->lock);
pn_connection_open(ctx->pn_conn);
return true;
}
// (re)connection timer callback used by connector
//
static void try_open_cb(void *context)
{
qd_connector_t *ct = (qd_connector_t*) context;
// Allocate connection before taking connector lock to avoid
// CONNECTOR - ENTITY_CACHE lock inversion deadlock window.
qd_connection_t *ctx = new_qd_connection_t();
if (!ctx) {
qd_log(ct->server->log_source, QD_LOG_CRITICAL, "Allocation failure connecting to %s",
ct->config.host_port);
ct->delay = 10000;
ct->state = CXTR_STATE_CONNECTING;
qd_timer_schedule(ct->timer, ct->delay);
return;
}
sys_mutex_lock(ct->lock);
if (ct->state == CXTR_STATE_CONNECTING || ct->state == CXTR_STATE_INIT) {
// else deleted or failed - on failed wait until after connection is freed
// and state is set to CXTR_STATE_CONNECTING (timer is rescheduled then)
try_open_lh(ct, ctx);
ctx = 0; // owned by ct
}
sys_mutex_unlock(ct->lock);
free_qd_connection_t(ctx); // noop if ctx == 0
}
qd_server_t *qd_server(qd_dispatch_t *qd, int thread_count, const char *container_name,
const char *sasl_config_path, const char *sasl_config_name)
{
/* Initialize const members, 0 initialize all others. */
qd_server_t tmp = { .thread_count = thread_count };
qd_server_t *qd_server = NEW(qd_server_t);
if (qd_server == 0)
return 0;
memcpy(qd_server, &tmp, sizeof(tmp));
qd_server->qd = qd;
qd_server->log_source = qd_log_source("SERVER");
qd_server->protocol_log_source = qd_log_source("PROTOCOL");
qd_server->container_name = container_name;
qd_server->sasl_config_path = sasl_config_path;
qd_server->sasl_config_name = sasl_config_name;
qd_server->proactor = pn_proactor();
qd_server->container = 0;
qd_server->start_context = 0;
qd_server->lock = sys_mutex();
qd_server->conn_activation_lock = sys_mutex();
qd_server->cond = sys_cond();
DEQ_INIT(qd_server->conn_list);
qd_timer_initialize();
qd_server->pause_requests = 0;
qd_server->threads_paused = 0;
qd_server->pause_next_sequence = 0;
qd_server->pause_now_serving = 0;
qd_server->next_connection_id = 1;
qd_server->py_displayname_obj = 0;
qd_server->http = qd_http_server(qd_server, qd_server->log_source);
qd_log(qd_server->log_source, QD_LOG_INFO, "Container Name: %s", qd_server->container_name);
return qd_server;
}
qd_http_server_t *qd_server_http(qd_server_t *qd_server) {
return qd_server->http;
}
void qd_server_free(qd_server_t *qd_server)
{
if (!qd_server) return;
qd_connection_t *ctx = DEQ_HEAD(qd_server->conn_list);
while (ctx) {
qd_log(qd_server->log_source, QD_LOG_INFO,
"[C%"PRIu64"] Closing connection on shutdown",
ctx->connection_id);
DEQ_REMOVE_HEAD(qd_server->conn_list);
if (ctx->pn_conn) {
pn_transport_t *tport = pn_connection_transport(ctx->pn_conn);
if (tport)
pn_transport_set_context(tport, 0); /* for transport_tracer */
qd_session_cleanup(ctx);
pn_connection_set_context(ctx->pn_conn, 0);
}
invoke_deferred_calls(ctx, true); // Discard any pending deferred calls
if (ctx->free_user_id)
free((char*)ctx->user_id);
sys_mutex_free(ctx->deferred_call_lock);
free(ctx->name);
free(ctx->role);
if (ctx->policy_settings)
free_qd_policy_settings_t(ctx->policy_settings);
if (ctx->connector) {
ctx->connector->qd_conn = 0;
qd_connector_decref(ctx->connector);
}
free_qd_connection_t(ctx);
ctx = DEQ_HEAD(qd_server->conn_list);
}
pn_proactor_free(qd_server->proactor);
qd_timer_finalize();
sys_mutex_free(qd_server->lock);
sys_mutex_free(qd_server->conn_activation_lock);
sys_cond_free(qd_server->cond);
Py_XDECREF((PyObject *)qd_server->py_displayname_obj);
free(qd_server);
}
void qd_server_set_container(qd_dispatch_t *qd, qd_container_t *container)
{
qd->server->container = container;
}
void qd_server_trace_all_connections()
{
qd_dispatch_t *qd = qd_dispatch_get_dispatch();
if (qd->server) {
sys_mutex_lock(qd->server->lock);
qd_connection_list_t conn_list = qd->server->conn_list;
qd_connection_t *conn = DEQ_HEAD(conn_list);
while(conn) {
//
// If there is already a tracer on the transport, nothing to do, move on to the next connection.
//
pn_transport_t *tport = pn_connection_transport(conn->pn_conn);
if (! pn_transport_get_tracer(tport)) {
pn_transport_trace(tport, PN_TRACE_FRM);
pn_transport_set_tracer(tport, transport_tracer);
}
conn = DEQ_NEXT(conn);
}
sys_mutex_unlock(qd->server->lock);
}
}
static double normalize_memory_size(const uint64_t bytes, const char **suffix)
{
static const char * const units[] = {"B", "KiB", "MiB", "GiB", "TiB"};
const int units_ct = 5;
const double base = 1024.0;
double value = (double)bytes;
for (int i = 0; i < units_ct; ++i) {
if (value < base) {
if (suffix)
*suffix = units[i];
return value;
}
value /= base;
}
if (suffix)
*suffix = units[units_ct - 1];
return value;
}
void qd_server_run(qd_dispatch_t *qd)
{
qd_server_t *qd_server = qd->server;
int i;
assert(qd_server);
assert(qd_server->container); // Server can't run without a container
qd_log(qd_server->log_source,
QD_LOG_NOTICE, "Operational, %d Threads Running (process ID %ld)",
qd_server->thread_count, (long)getpid());
const uintmax_t ram_size = qd_platform_memory_size();
const uint64_t vm_size = qd_router_memory_usage();
if (ram_size && vm_size) {
const char *suffix_vm = 0;
const char *suffix_ram = 0;
double vm = normalize_memory_size(vm_size, &suffix_vm);
double ram = normalize_memory_size(ram_size, &suffix_ram);
qd_log(qd_server->log_source, QD_LOG_NOTICE,
"Process VmSize %.2f %s (%.2f %s available memory)",
vm, suffix_vm, ram, suffix_ram);
}
#ifndef NDEBUG
qd_log(qd_server->log_source, QD_LOG_INFO, "Running in DEBUG Mode");
#endif
int n = qd_server->thread_count - 1; /* Start count-1 threads + use current thread */
sys_thread_t **threads = (sys_thread_t **)qd_calloc(n, sizeof(sys_thread_t*));
for (i = 0; i < n; i++) {
threads[i] = sys_thread(thread_run, qd_server);
}
thread_run(qd_server); /* Use the current thread */
for (i = 0; i < n; i++) {
sys_thread_join(threads[i]);
sys_thread_free(threads[i]);
}
free(threads);
qd_log(qd_server->log_source, QD_LOG_NOTICE, "Shut Down");
}
void qd_server_stop(qd_dispatch_t *qd)
{
/* Interrupt the proactor, async-signal-safe */
pn_proactor_interrupt(qd->server->proactor);
}
void qd_server_activate(qd_connection_t *ctx)
{
if (ctx) ctx->wake(ctx);
}
void qd_connection_set_context(qd_connection_t *conn, void *context)
{
conn->user_context = context;
}
void *qd_connection_get_context(qd_connection_t *conn)
{
return conn->user_context;
}
void *qd_connection_get_config_context(qd_connection_t *conn)
{
return conn->context;
}
void qd_connection_set_link_context(qd_connection_t *conn, void *context)
{
conn->link_context = context;
}
void *qd_connection_get_link_context(qd_connection_t *conn)
{
return conn->link_context;
}
pn_connection_t *qd_connection_pn(qd_connection_t *conn)
{
return conn->pn_conn;
}
bool qd_connection_inbound(qd_connection_t *conn)
{
return conn->listener != 0;
}
uint64_t qd_connection_connection_id(qd_connection_t *conn)
{
return conn->connection_id;
}
const qd_server_config_t *qd_connection_config(const qd_connection_t *conn)
{
if (conn->listener)
return &conn->listener->config;
if (conn->connector)
return &conn->connector->config;
return NULL;
}
void qd_connection_invoke_deferred(qd_connection_t *conn, qd_deferred_t call, void *context)
{
if (!conn)
return;
qd_connection_invoke_deferred_impl(conn, call, context, new_qd_deferred_call_t());
}
void qd_connection_invoke_deferred_impl(qd_connection_t *conn, qd_deferred_t call, void *context, void *dct)
{
if (!conn)
return;
qd_deferred_call_t *dc = (qd_deferred_call_t*)dct;
DEQ_ITEM_INIT(dc);
dc->call = call;
dc->context = context;
sys_mutex_lock(conn->deferred_call_lock);
DEQ_INSERT_TAIL(conn->deferred_calls, dc);
sys_mutex_unlock(conn->deferred_call_lock);
sys_mutex_lock(conn->server->conn_activation_lock);
qd_server_activate(conn);
sys_mutex_unlock(conn->server->conn_activation_lock);
}
void *qd_connection_new_qd_deferred_call_t()
{
return new_qd_deferred_call_t();
}
void qd_connection_free_qd_deferred_call_t(void *dct)
{
free_qd_deferred_call_t((qd_deferred_call_t *)dct);
}
qd_listener_t *qd_server_listener(qd_server_t *server)
{
qd_listener_t *li = new_qd_listener_t();
if (!li) return 0;
ZERO(li);
sys_atomic_init(&li->ref_count, 1);
li->server = server;
li->http = NULL;
li->type.context = li;
li->type.handler = &handle_listener;
return li;
}
static bool qd_listener_listen_pn(qd_listener_t *li) {
li->pn_listener = pn_listener();
if (li->pn_listener) {
pn_listener_set_context(li->pn_listener, &li->type);
pn_proactor_listen(li->server->proactor, li->pn_listener, li->config.host_port,
BACKLOG);
sys_atomic_inc(&li->ref_count); /* In use by proactor, PN_LISTENER_CLOSE will dec */
/* Listen is asynchronous, log "listening" message on PN_LISTENER_OPEN event */
} else {
qd_log(li->server->log_source, QD_LOG_CRITICAL, "No memory listening on %s",
li->config.host_port);
}
return li->pn_listener;
}
static bool qd_listener_listen_http(qd_listener_t *li) {
if (li->server->http) {
/* qd_lws_listener holds a reference to li, will decref when closed */
qd_http_server_listen(li->server->http, li);
return li->http;
} else {
qd_log(li->server->log_source, QD_LOG_ERROR, "No HTTP support to listen on %s",
li->config.host_port);
return false;
}
}
bool qd_listener_listen(qd_listener_t *li) {
if (li->pn_listener || li->http) /* Already listening */
return true;
return li->config.http ? qd_listener_listen_http(li) : qd_listener_listen_pn(li);
}
void qd_listener_decref(qd_listener_t* li)
{
if (li && sys_atomic_dec(&li->ref_count) == 1) {
qd_server_config_free(&li->config);
free_qd_listener_t(li);
}
}
qd_connector_t *qd_server_connector(qd_server_t *server)
{
qd_connector_t *connector = new_qd_connector_t();
if (!connector) return 0;
ZERO(connector);
sys_atomic_init(&connector->ref_count, 1);
DEQ_INIT(connector->conn_info_list);
connector->lock = sys_mutex();
if (!connector->lock)
goto error;
connector->timer = qd_timer(server->qd, try_open_cb, connector);
if (!connector->timer)
goto error;
connector->conn_msg = (char*) malloc(300);
if (!connector->conn_msg)
goto error;
memset(connector->conn_msg, 0, 300);
connector->server = server;
connector->conn_index = 1;
connector->state = CXTR_STATE_INIT;
return connector;
error:
connector->state = CXTR_STATE_DELETED;
qd_connector_decref(connector);
return 0;
}
const char *qd_connector_policy_vhost(qd_connector_t* ct)
{
return ct->policy_vhost;
}
bool qd_connector_connect(qd_connector_t *ct)
{
sys_mutex_lock(ct->lock);
// expect: do not attempt to connect an already connected qd_connection
assert(ct->qd_conn == 0);
ct->qd_conn = 0;
ct->delay = 0;
ct->state = CXTR_STATE_CONNECTING;
qd_timer_schedule(ct->timer, ct->delay);
sys_mutex_unlock(ct->lock);
return true;
}
void qd_connector_decref(qd_connector_t* connector)
{
if (!connector) return;
if (sys_atomic_dec(&connector->ref_count) == 1) {
// expect both mgmt and qd_connection no longer reference this
assert(connector->state == CXTR_STATE_DELETED);
assert(connector->qd_conn == 0);
qd_server_config_free(&connector->config);
qd_timer_free(connector->timer);
if (connector->lock) sys_mutex_free(connector->lock);
qd_failover_item_t *item = DEQ_HEAD(connector->conn_info_list);
while (item) {
DEQ_REMOVE_HEAD(connector->conn_info_list);
free(item->scheme);
free(item->host);
free(item->port);
free(item->hostname);
free(item->host_port);
free(item);
item = DEQ_HEAD(connector->conn_info_list);
}
if (connector->policy_vhost) free(connector->policy_vhost);
free(connector->conn_msg);
free_qd_connector_t(connector);
}
}
__attribute__((weak)) // permit replacement by dummy implementation in unit_tests
void qd_server_timeout(qd_server_t *server, qd_duration_t duration) {
pn_proactor_set_timeout(server->proactor, duration);
}
qd_dispatch_t* qd_server_dispatch(qd_server_t *server) { return server->qd; }
const char* qd_connection_name(const qd_connection_t *c) {
if (c->connector) {
return c->connector->config.host_port;
} else {
return c->rhost_port;
}
}
qd_connector_t* qd_connection_connector(const qd_connection_t *c) {
return c->connector;
}
const qd_server_config_t *qd_connector_config(const qd_connector_t *c) {
return &c->config;
}
qd_lws_listener_t *qd_listener_http(qd_listener_t *li) {
return li->http;
}
const char* qd_connection_remote_ip(const qd_connection_t *c) {
return c->rhost;
}
/* Expose event handling for HTTP connections */
bool qd_connection_handle(qd_connection_t *c, pn_event_t *e) {
if (!c)
return false;
pn_connection_t *pn_conn = pn_event_connection(e);
qd_connection_t *qd_conn = !!pn_conn ? (qd_connection_t*) pn_connection_get_context(pn_conn) : 0;
handle(c->server, e, pn_conn, qd_conn);
if (qd_conn && pn_event_type(e) == PN_TRANSPORT_CLOSED) {
pn_connection_set_context(pn_conn, NULL);
qd_connection_free(qd_conn);
return false;
}
return true;
}
uint64_t qd_server_allocate_connection_id(qd_server_t *server)
{
uint64_t id;
sys_mutex_lock(server->lock);
id = server->next_connection_id++;
sys_mutex_unlock(server->lock);
return id;
}
bool qd_connection_strip_annotations_in(const qd_connection_t *c) {
return c->strip_annotations_in;
}
sys_mutex_t *qd_server_get_activation_lock(qd_server_t * server)
{
return server->conn_activation_lock;
}
uint64_t qd_connection_max_message_size(const qd_connection_t *c) {
return (c && c->policy_settings) ? c->policy_settings->spec.maxMessageSize : 0;
}
|
""" from https://github.com/keithito/tacotron """
import re
valid_symbols = [
"AA",
"AA0",
"AA1",
"AA2",
"AE",
"AE0",
"AE1",
"AE2",
"AH",
"AH0",
"AH1",
"AH2",
"AO",
"AO0",
"AO1",
"AO2",
"AW",
"AW0",
"AW1",
"AW2",
"AY",
"AY0",
"AY1",
"AY2",
"B",
"CH",
"D",
"DH",
"EH",
"EH0",
"EH1",
"EH2",
"ER",
"ER0",
"ER1",
"ER2",
"EY",
"EY0",
"EY1",
"EY2",
"F",
"G",
"HH",
"IH",
"IH0",
"IH1",
"IH2",
"IY",
"IY0",
"IY1",
"IY2",
"JH",
"K",
"L",
"M",
"N",
"NG",
"OW",
"OW0",
"OW1",
"OW2",
"OY",
"OY0",
"OY1",
"OY2",
"P",
"R",
"S",
"SH",
"T",
"TH",
"UH",
"UH0",
"UH1",
"UH2",
"UW",
"UW0",
"UW1",
"UW2",
"V",
"W",
"Y",
"Z",
"ZH",
]
_valid_symbol_set = set(valid_symbols)
class CMUDict:
"""Thin wrapper around CMUDict data. http://www.speech.cs.cmu.edu/cgi-bin/cmudict"""
def __init__(self, file_or_path, keep_ambiguous=True):
if isinstance(file_or_path, str):
with open(file_or_path, encoding="latin-1") as f:
entries = _parse_cmudict(f)
else:
entries = _parse_cmudict(file_or_path)
if not keep_ambiguous:
entries = {word: pron for word, pron in entries.items() if len(pron) == 1}
self._entries = entries
def __len__(self):
return len(self._entries)
def lookup(self, word):
"""Returns list of ARPAbet pronunciations of the given word."""
return self._entries.get(word.upper())
_alt_re = re.compile(r"\([0-9]+\)")
def _parse_cmudict(file):
cmudict = {}
for line in file:
if len(line) and (line[0] >= "A" and line[0] <= "Z" or line[0] == "'"):
parts = line.split(" ")
word = re.sub(_alt_re, "", parts[0])
pronunciation = _get_pronunciation(parts[1])
if pronunciation:
if word in cmudict:
cmudict[word].append(pronunciation)
else:
cmudict[word] = [pronunciation]
return cmudict
def _get_pronunciation(s):
parts = s.strip().split(" ")
for part in parts:
if part not in _valid_symbol_set:
return None
return " ".join(parts)
|
/*
IITH Compilers
authors: S Venkata Keerthy, D Tharun
email: {cs17mtech11018, cs15mtech11002}@iith.ac.in
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef _FRONTENDS_P4_EMITLLVMIR_H_
#define _FRONTENDS_P4_EMITLLVMIR_H_
#include "ir/ir.h"
#include "ir/visitor.h"
#include "frontends/common/resolveReferences/referenceMap.h"
#include "frontends/p4/typeChecking/typeChecker.h"
#include "llvm/ADT/APFloat.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/IR/BasicBlock.h"
#include "llvm/IR/Constants.h"
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/Function.h"
#include "llvm/IR/IRBuilder.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Module.h"
#include "llvm/IR/Type.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/IR/Value.h"
#include "llvm/IR/Metadata.h"
#include "llvm/IR/Module.h"
#include "llvm/IR/LegacyPassManager.h"
#include "llvm/InitializePasses.h"
#include "llvm/Transforms/IPO/PassManagerBuilder.h"
#include "llvm/Transforms/Scalar.h"
#include "frontends/p4/methodInstance.h"
#include <algorithm>
#include <cctype>
#include <cstdio>
#include <cstdlib>
#include <map>
#include <iterator>
#include <memory>
#include <string>
#include <vector>
#include "iostream"
#include "lib/nullstream.h"
#include "llvm/ADT/APInt.h"
using namespace llvm;
#define VERBOSE 1
#endif /* _FRONTENDS_P4_LLVMIR_H_ */
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#Created on Thu Nov 9 21:38:24 2017
#@author: ryanday
#MIT License
#Copyright (c) 2018 Ryan Patrick Day
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import numpy as np
import chinook.SlaterKoster as SK
import chinook.rotation_lib as rot_lib
import chinook.Ylm as Ylm
hb = 6.626*10**-34/(2*np.pi)
c = 3.0*10**8
q = 1.602*10**-19
A = 10.0**-10
me = 9.11*10**-31
mn = 1.67*10**-27
kb = 1.38*10**-23
def txt_build(filename,cutoff,renorm,offset,tol,Nonsite):
'''
Build Hamiltonian from textfile, input is of form
o1,o2,x12,y12,z12,t12, output in form [o1,o2,x12,y12,z12,t12].
To be explicit, each row of the textfile is used to generate a
k-space Hamiltonian matrix element of the form:
.. math::
H_{1,2}(k) = t_{1,2} e^{i (k_x x_{1,2} + k_y y_{1,2} + k_z z_{1,2})}
*args*:
- **filename**: string, name of file
- **cutoff**: float, maximum distance of hopping allowed, Angstrom
- **renorm**: float, renormalization of the bandstructure
- **offset**: float, energy offset of chemical potential, electron volts
- **tol**: float, minimum Hamiltonian matrix element amplitude
- **Nonsite**: int, number of basis states, use to apply **offset**
*return*:
- **Hlist**: the list of Hamiltonian matrix elements
***
'''
Hlist = []
with open(filename,'r') as origin:
for line in origin:
spl = line.split(',')
try:
R = np.array([float(spl[2]),float(spl[3]),float(spl[4])])
Hval = complex(spl[5])
except:
print(line,spl)
continue
if len(spl)>6:
Hval+=1.0j*float(spl[6])
if abs(Hval)>tol and np.linalg.norm(R)<cutoff:
Hval*=renorm
tmp = [int(spl[0]),int(spl[1]),R[0],R[1],R[2],Hval]
Hlist.append(tmp)
if abs(offset)>0:
for ii in range(Nonsite):
Hlist.append([ii,ii,0,0,0,-offset])
origin.close()
return Hlist
def sk_build(avec,basis,Vdict,cutoff,tol,renorm,offset):
'''
Build SK model from using D-matrices, rather than a list of SK terms from table.
This can handle orbitals of arbitrary orbital angular momentum in principal,
but right now implemented for up to and including f-electrons.
NOTE: f-hoppings require thorough testing
*args*:
- **avec**: numpy array 3x3 float, lattice vectors
- **basis**: list of orbital objects
- **Vdict**: dictionary, or list of dictionaries, of Slater-Koster integrals/ on-site energies
- **cutoff**: float or list of float, indicating range where Vdict is applicable
- **tol**: float, threshold value below which hoppings are neglected
- **offset**: float, offset for Fermi level
*return*:
- **H_raw**: list of Hamiltonian matrix elements, in form [o1,o2,x12,y12,z12,t12]
***
'''
Vdict,cutoff,pts = cluster_init(Vdict,cutoff,avec) #build region of lattice points, containing at least the cutoff distance
V = Vdict[0]
if basis[0].spin!=basis[-1].spin: #only calculate explicitly for a single spin species
brange = int(len(basis)/2)
else:
brange = len(basis)
SK_matrices = SK.SK_full(basis[:brange]) #generate the generalized Slater-Koster matrices, as functions of R and potential V
index_orbitals = index_ordering(basis[:brange]) #define the indices associated with the various orbital shells in the basis,
H_raw = on_site(basis[:brange],V,offset) #fill in the on-site energies
for i1 in index_orbitals:
for i2 in index_orbitals:
if index_orbitals[i1][index_orbitals[i1]>-1].min()<=index_orbitals[i2][index_orbitals[i2]>-1].min():
o1o2 = (i1[0],i2[0],i1[1],i2[1],i1[2],i2[2])
R12 = (np.array(i2[3:6])-np.array(i1[3:6]))
SKmat = SK_matrices[o1o2]
for p in pts: #iterate over the points in the cluster
Rij = R12 + np.dot(p,avec)
Rijn = np.linalg.norm(Rij) #compute norm of the vector
#
if 0<Rijn<cutoff[-1]: #only proceed if within the cutoff distance
V = Vdict[np.where(Rijn>=cutoff)[0][-1]]
Vlist = Vlist_gen(V,o1o2)
if Vlist is None:
continue
elif len(Vlist)==0:
continue
Euler_A,Euler_B,Euler_y = rot_lib.Euler(rot_lib.rotate_v1v2(Rij,np.array([0,0,1])))
SKvals = mirror_SK([vi for vi in Vlist])
SKmat_num = SKmat(Euler_A,Euler_B,Euler_y,SKvals) #explicitly compute the relevant Hopping matrix for this vector and these shells
if abs(SKmat_num).max()>tol:
append = mat_els(Rij,SKmat_num,tol,index_orbitals[i1],index_orbitals[i2])
H_raw = H_raw + append
return H_raw #finally return the list of Hamiltonian matrix elements
def on_site(basis,V,offset):
'''
On-site matrix element calculation. Try both anl and alabel formats,
if neither is defined, default the onsite energy to 0.0 eV
*args*:
- **basis**: list of orbitals defining the tight-binding basis
- **V**: dictionary, Slater Koster terms
- **offset**: float, EF shift
*return*:
- **Ho**: list of Hamiltonian matrix elements
***
'''
Ho = []
for oi in basis:
try:
H = V['{:d}{:d}{:d}'.format(oi.atom,oi.n,oi.l)]
except KeyError:
try:
H = V['{:d}{:s}'.format(oi.atom,oi.label)]
except KeyError:
H = 0.0
Ho.append([oi.index,oi.index,0.0,0.0,0.0,float(H-offset)])
return Ho
def mat_els(Rij,SKmat,tol,i1,i2):
'''
Extract the pertinent, and non-zero elements of the Slater-Koster matrix
and transform to the conventional form of Hamiltonian list entries
(o1,o2,Rij0,Rij1,Rij2,H12(Rij))
*args*:
- **Rij**: numpy array of 3 float, relevant connecting vector
- **SKmat**: numpy array of float, matrix of hopping elements
for the coupling of two orbital shells
- **tol**: float, minimum hopping included in model
- **i1**, **i2**: int,int, proper index ordering for the relevant
instance of the orbital shells involved in hopping
*return*:
- **out**: list of Hamiltonian matrix elements, extracted from the
ordered SKmat, in form [[o1,o2,x12,y12,z12,H12],...]
***
'''
inds = np.where(abs(SKmat)>tol)
out = []
for ii in range(len(inds[0])):
i_1 = i1[inds[0][ii]]
i_2 = i2[inds[1][ii]]
if -1<i_1<=i_2:
out.append([i_1,i_2,*Rij,SKmat[inds[0][ii],inds[1][ii]]])
return out
def index_ordering(basis):
'''
We use an universal ordering convention for defining the Slater-Koster matrices
which may (and most likely will) not match the ordering chosen by the user.
To account for this, we define a dictionary which gives the ordering, relative
to the normal order convention defined here, associated with a given a-n-l shell
at each site in the lattice basis.
*args*:
- **basis**: list of orbital objects
*return*:
- **indexing**: dictionary of key-value pairs (a,n,l,x,y,z):numpy.array([...])
***
'''
normal_order = {0:{'':0},1:{'x':0,'y':1,'z':2},2:{'xz':0,'yz':1,'xy':2,'ZR':3,'XY':4},3:{'z3':0,'xz2':1,'yz2':2,'xzy':3,'zXY':4,'xXY':5,'yXY':6}}
indexing = {}
for b in basis:
anl = (b.atom,b.n,b.l,*np.around(b.pos,4))
if anl not in indexing.keys():
indexing[anl] = -1*np.ones(2*b.l+1)
indexing[anl][normal_order[b.l][b.label[2:]]] = b.index
return indexing
def Vlist_gen(V,pair):
'''
Select the relevant hopping matrix elements to be used in defining the value
of the Slater-Koster matrix elements for a given pair of orbitals. Handles situation where
insufficient parameters have been passed to system.
*args*:
- **V**: dictionary of Slater-Koster hopping terms
- **pair**: tuple of int defining the orbitals to be paired, (a1,a2,n1,n2,l1,l2)
*return*:
- **Vvals**: numpy array of Vllx related to a given pairing, e.g. for s-p np.array([Vsps,Vspp])
***
'''
order = {'S':0,'P':1,'D':2,'F':3,0:'S',1:'P',2:'D',3:'F'}
vstring = '{:d}{:d}{:d}{:d}{:d}{:d}'.format(*pair[:6])
l = max(pair[4],pair[5])
if len(V.keys())<(l+1):
print('WARNING, insufficient number of Slater-Koster parameters passed: filling missing values with zeros.')
for l_index in range(l+1):
hopping_type = vstring+order[l_index]
if hopping_type not in V.keys():
V[hopping_type] = 0
try:
Vkeys = np.array(sorted([[l-order[vi[-1]],vi] for vi in V if vi[:-1]==vstring]))[:,1]
Vvals = np.array([V[vk] for vk in Vkeys])
except IndexError:
vstring = '{:d}{:d}{:d}{:d}{:d}{:d}'.format(pair[1],pair[0],pair[3],pair[2],pair[5],pair[4])
try:
Vkeys = np.array(sorted([[l-order[vi[-1]],vi] for vi in V if vi[:-1]==vstring]))[:,1]
pre = (-1)**(pair[4]+pair[5]) #relative parity of the two coupled states
Vvals = pre*np.array([V[vk] for vk in Vkeys])
except IndexError:
return None
return Vvals
def mirror_SK(SK_in):
'''
Generate a list of values which is the input appended with its mirror
reflection. The mirror boundary condition suppresses the duplicate of the
last value. e.g. [0,1,2,3,4] --> [0,1,2,3,4,3,2,1,0],
['r','a','c','e','c','a','r'] --> ['r','a','c','e','c','a','r','a','c','e','c','a','r']
Intended here to take an array of Slater-Koster hopping terms and reflect about
its last entry i.e. [Vsps,Vspp] -> [Vsps,Vspp,Vsps]
*args*:
- **SK_in**: iterable, of arbitrary length and data-type
*return*:
- list of values with same data-type as input
***
'''
return list(SK_in) + (SK_in[-2::-1])
def cluster_init(Vdict,cutoff,avec):
'''
Generate a cluster of neighbouring lattice points to use
in defining the hopping paths--ensuring that it extends
sufficiently far enough to capture even the largest hopping vectors.
Also reforms the SK dictionary and cutoff lengths to be in list format.
Returns an array of lattice points which go safely to the edge of the cutoff range.
*args*:
- **Vdict**: dictionary, or list of dictionaries of Slater Koster matrix elements
- **cutoff**: float, or list of float
- **avec**: numpy array of 3x3 float
*return*:
- **Vdict**: list of length 1 if a single dictionary passed, else unmodified
- **cutoff**: numpy array, append 0 to the beginning of the cutoff list,
else leave it alone.
- **pts**: numpy array of lattice vector indices for a region of lattice points around
the origin.
***
'''
if isinstance(cutoff,(int,float)) and not isinstance(cutoff,bool):
cutoff = np.array([0.0,cutoff])
Vdict = [Vdict]
else:
if cutoff[0]>0:
cutoff.insert(0,0)
cutoff = np.array(cutoff)
else:
cutoff = np.array(cutoff)
pt_max = np.ceil(np.array([(cutoff).max()/np.linalg.norm(avec[i]) for i in range(len(avec))]).max())
pts = region(int(pt_max)+1)
return Vdict,cutoff,pts
###############################################################################
#########################Spin Orbit Coupling###################################
###############################################################################
def spin_double(H,lb):
'''
Duplicate the kinetic Hamiltonian terms to extend over the spin-duplicated
orbitals, which are by construction in same order and appended to end of the
original basis.
*args*:
- **H**: list, Hamiltonian matrix elements [[o1,o2,x,y,z,H12],...]
- **lb**: int, length of basis before spin duplication
*return*:
- **h2** modified copy of **H**, filled with kinetic terms for both
spin species
***
'''
lenb = int(lb/2)
h2 = []
for i in range(len(H)):
h2.append([H[i][0]+lenb,H[i][1]+lenb,H[i][2],H[i][3],H[i][4],H[i][5]])
return h2
def SO(basis):
'''
Generate L.S matrix-elements for a given basis.
This is generic to all l, except the normal_order, which is defined here up to
and including the f electrons.
Otherwise, this method is generic to any orbital angular momentum.
In the factors dictionary defined here indicates the weight of the
different :math:`L_iS_i` terms. The keys are tuples of (L+/-/z,S+/-/z)
in a bit of a cryptic way: for L, (0,1,2) ->(-1,0,1) and
for S, (-1,0,1) = S1-S2 with S1,2 = +/- 1 here
L+,L-,Lz matrices are defined for each l shell in the basis,
transformed into the basis of the tight-binding model.
The nonzero terms will then just be used along with the spin and
weighted by the factor value, and slotted into a len(**basis**)xlen(**basis**) matrix **HSO**
*args*:
- **basis**: list of orbital objects
*return*:
- **HSO**: list of matrix elements in standard format [o1,o2,0,0,0,H12]
***
'''
Md = Ylm.Yproj(basis)
normal_order = {0:{'':0},1:{'x':0,'y':1,'z':2},2:{'xz':0,'yz':1,'xy':2,'ZR':3,'XY':4},3:{'z3':0,'xz2':1,'yz2':2,'xzy':3,'zXY':4,'xXY':5,'yXY':6}}
factors = {(2,-1):0.5,(0,1):0.5,(1,0):1.0}
L,al = {},[]
HSO = []
for o in basis[:int(len(basis)/2)]:
if (o.atom,o.n,o.l) not in al:
al.append((o.atom,o.n,o.l))
Mdn = Md[(o.atom,o.n,o.l,-1)]
Mup = Md[(o.atom,o.n,o.l,1)]
Mdnp = np.linalg.inv(Mdn)
Mupp = np.linalg.inv(Mup)
L[(o.atom,o.n,o.l)] = [np.dot(Mupp,np.dot(Lm(o.l),Mdn)),np.dot(Mupp,np.dot(Lz(o.l),Mup)),np.dot(Mdnp,np.dot(Lp(o.l),Mup))]
for o1 in basis:
for o2 in basis:
if o1.index<=o2.index:
LS_val = 0.0
if np.linalg.norm(o1.pos-o2.pos)<0.0001 and o1.l==o2.l and o1.n==o2.n:
inds = (normal_order[o1.l][o1.label[2:]],normal_order[o2.l][o2.label[2:]])
ds = (o1.spin-o2.spin)/2.
if ds==0:
s=0.5*np.sign(o1.spin)
else:
s=1.0
for f in factors:
if f[1]==ds:
LS_val+=o1.lam*factors[f]*L[(o1.atom,o1.n,o1.l)][f[0]][inds]*s
HSO.append([o1.index,o2.index,0.,0.,0.,LS_val])
return HSO
def Lp(l):
'''
L+ operator in the :math:`l`, :math:`m_l` basis, organized with
(0,0) = |l,l>... (2l,2l) = |l,-l>
The nonzero elements are on the upper diagonal
*arg*:
- **l**: int orbital angular momentum
*return*:
- **M**: numpy array (2l+1,2l+1) of real float
***
'''
M = np.zeros((2*l+1,2*l+1))
r = np.arange(0,2*l,1)
M[r,r+1]=1.0
vals = [0]+[np.sqrt(l*(l+1)-(l-m)*(l-m+1)) for m in range(1,2*l+1)]
M = M*vals
return M
def Lm(l):
'''
L- operator in the l,m_l basis, organized with
(0,0) = |l,l>... (2l,2l) = |l,-l>
The nonzero elements are on the upper diagonal
*arg*:
- **l**: int orbital angular momentum
*return*:
- **M**: numpy array (2l+1,2l+1) of real float
***
'''
M = np.zeros((2*l+1,2*l+1))
r = np.arange(1,2*l+1,1)
M[r,r-1]=1.0
vals = [np.sqrt(l*(l+1)-(l-m)*(l-m-1)) for m in range(0,2*l)]+[0]
M = M*vals
return M
def Lz(l):
'''
Lz operator in the l,:math:`m_l` basis
*arg*:
- **l**: int orbital angular momentum
*return*:
- numpy array (2*l+1,2*l+1)
***
'''
return np.identity(2*l+1)*np.array([l-m for m in range(2*l+1)])
def AFM_order(basis,dS,p_up,p_dn):
'''
Add antiferromagnetism to the tight-binding model, by adding a different on-site energy to
orbitals of different spin character, on the designated sites.
*args*:
- **basis**: list, orbital objects
- **dS**: float, size of spin-splitting (eV)
- **p_up**, **p_dn**: numpy array of float indicating the orbital positions
for the AFM order
*return*:
- **h_AF**: list of matrix elements, as conventionally arranged [[o1,o2,0,0,0,H12],...]
***
'''
h_AF = []
for bi in basis:
if np.linalg.norm(bi.pos-p_up)==0:
if bi.spin<0:
h_AF.append([bi.index,bi.index,0,0,0,dS])
else:
h_AF.append([bi.index,bi.index,0,0,0,-dS])
elif np.linalg.norm(bi.pos-p_dn)==0:
if bi.spin<0:
h_AF.append([bi.index,bi.index,0,0,0,-dS])
else:
h_AF.append([bi.index,bi.index,0,0,0,dS])
return h_AF
def FM_order(basis,dS):
'''
Add ferromagnetism to the system. Take dS to assume that the splitting puts
spin-up lower in energy by dS,and viceversa for spin-down. This directly
modifies the *TB_model*'s **mat_els** attribute
*args*:
- **basis**: list, of orbital objects in basis
- **dS**: float, energy of the spin splitting (eV)
*return*:
- list of matrix elements [[o1,o2,0,0,0,H12],...]
***
'''
return [[bi.index,bi.index,0,0,0,-np.sign(bi.spin)*dS] for bi in basis]
#def Efield(basis,field,orbital_type='Slater'):
'''
Define a set of matrix elements which introduce an electric field, treated at the level of a dipole operator.
TODO
'''
# return None
def region(num):
'''
Generate a symmetric grid of points in number of lattice vectors.
*args*:
- **num**: int, grid will have size 2*num+1 in each direction
*return*:
- numpy array of size ((2*num+1)**3,3) with centre value of first entry
of (-num,-num,-num),...,(0,0,0),...,(num,num,num)
***
'''
num_symm = 2*num+1
return np.array([[int(i/num_symm**2)-num,int(i/num_symm)%num_symm-num,i%num_symm-num] for i in range((num_symm)**3)])
|