text
stringlengths 3
1.05M
|
|---|
#!/usr/bin/env python3
'''
Move a motor back and forth using the TMCM6212 module
Created on 28.02.2019
@author: JM
'''
import PyTrinamic
from PyTrinamic.connections.ConnectionManager import ConnectionManager
from PyTrinamic.modules.TMCM6212.TMCM_6212 import TMCM_6212
import time
PyTrinamic.showInfo()
connectionManager = ConnectionManager() # If no Interface is selected , the default interface is usb_tmcl
myInterface = connectionManager.connect()
Module_6212 = TMCM_6212(myInterface)
DEFAULT_MOTOR = 0
print("Preparing parameters")
Module_6212.setMaxAcceleration(9000)
print("Rotating")
Module_6212.rotate(40000)
time.sleep(5);
print("Stopping")
Module_6212.stop()
print("ActualPostion")
print(Module_6212.getActualPosition())
time.sleep(5);
print("Doubling moved distance")
Module_6212.moveBy(Module_6212.getActualPosition(), 50000)
Module_6212.getAxisParameter(Module_6212.APs.ActualPosition)
while not(Module_6212.positionReached()):
pass
print("Furthest point reached")
print(Module_6212.getActualPosition())
time.sleep(5)
print("Moving back to 0")
Module_6212.moveTo(0, 100000)
# Wait until position 0 is reached
while not(Module_6212.positionReached()):
pass
print("Reached Position 0")
print()
myInterface.close()
|
import React from 'react'
function Popup({ selected, closePopup }) {
return (
<section className="popup">
<div className="content">
<h2>{ selected.Title } <span>({ selected.Year })</span></h2>
<p className="rating">Rating: {selected.imdbRating}</p>
<div className="plot">
<img src={selected.Poster} alt="Selected Poster" />
<p>{selected.Plot}</p>
</div>
<button className="close" onClick={closePopup}>Close</button>
</div>
</section>
)
}
export default Popup
|
var Products;
exports.setModel = function (modelo) {
Products = modelo;
};
exports.create = function(req, res){
res.render('save', {
title: 'Carro De Compras',
put: false,
action: '/additem',
products: new Products({
title: '',
description: '',
price: '',
pathImage: ''
})
});
};
exports.store = function(req, res){
var products = new Products({
title: req.body.title,
description: req.body.description,
price: req.body.price,
pathImage: req.body.pathImage
});
products.save(function(error, documento){
if(error){
res.send('Error al intentar agregar el item.');
}else{
res.redirect('/');
}
});
};
|
from flask import Flask, render_template, request, make_response, jsonify, url_for, redirect
from flask_cors import *
from HDUCal.schedule2ics import Schedule2ICS
from HDUCal.schedule2json import Schedule2JSON
from HDUCal.gain_schedule import GainSchedule
import os
import json
app = Flask(__name__)
CORS(app, supports_credentials=True)
@app.route('/schedule', methods=['GET'])
def index():
return render_template('index.html')
@app.route('/schedule/ics', methods=['POST'])
def icsschedule():
account = request.form['account']
password = request.form['password']
semester_start = request.form['date']
# print(semester_start)
year = '2020-2021'
term = '1'
raw_schedule = GainSchedule(account, password).run()
result = Schedule2ICS(raw_schedule, 1).run(semester_start)
response = make_response(result)
response.headers['Content-Type'] = 'text/calendar'
response.headers['Content-Disposition'] = "attachment; filename=\"{}.ics\"".format(account)
return response
@app.route('/schedule/json', methods=['GET'])
def jsonschedule():
account = request.args.get('xh')
password = request.args.get('pwd')
save = (request.args.get('save') if request.args.get('save') != None else 'false')
year = request.args.get('year')
term = request.args.get('term')
if year == None and term == None:
year = '2020-2021'
term = '2'
if account == None or password == None:
result = {
"status": "error",
"msg": "please input your account or password"
}
return make_response(jsonify(result))
else:
raw_schedule = GainSchedule(account, password, year, term).run()
result = Schedule2JSON(account, raw_schedule).run(save)
if isinstance(result, bool) and not result:
return {"status": False, "msg": "登录失败,学号或密码出错"}
else:
if save == 'true':
# 跳转到保存地址
print(result)
return redirect('/schedule/json/'+result)
else:
# 直接返回 json 数据
return make_response(jsonify(result))
@app.route('/schedule/json/<name>')
def jsonscheduleapi(name):
with open('data/' + name + '.json', 'r', encoding='utf8') as f:
result = json.load(f)
return make_response(jsonify(result))
if __name__ == "__main__":
port = (os.environ['HDUCPORT'] if 'HDUCPORT' in os.environ else 3000)
isdebug = (os.environ['HDUCDEBUG'] if 'HDUCDEBUG' in os.environ else True)
app.run(debug=isdebug, host='0.0.0.0', port=port)
|
from collections.abc import Iterable
from math import sqrt
from numbers import Real
from functools import partial
from warnings import warn
from operator import attrgetter
from openmc import (
XPlane, YPlane, Plane, ZCylinder, Quadric, Cylinder, XCylinder,
YCylinder, Material, Universe, Cell)
from openmc.checkvalue import (
check_type, check_value, check_length, check_less_than,
check_iterable_type)
import openmc.data
def borated_water(boron_ppm, temperature=293., pressure=0.1013, temp_unit='K',
press_unit='MPa', density=None, **kwargs):
"""Return a Material with the composition of boron dissolved in water.
The water density can be determined from a temperature and pressure, or it
can be set directly.
The concentration of boron has no effect on the stoichiometric ratio of H
and O---they are fixed at 2-1.
Parameters
----------
boron_ppm : float
The weight fraction in parts-per-million of elemental boron in the
water.
temperature : float
Temperature in [K] used to compute water density.
pressure : float
Pressure in [MPa] used to compute water density.
temp_unit : {'K', 'C', 'F'}
The units used for the `temperature` argument.
press_unit : {'MPa', 'psi'}
The units used for the `pressure` argument.
density : float
Water density in [g / cm^3]. If specified, this value overrides the
temperature and pressure arguments.
**kwargs
All keyword arguments are passed to the created Material object.
Returns
-------
openmc.Material
"""
# Perform any necessary unit conversions.
check_value('temperature unit', temp_unit, ('K', 'C', 'F'))
if temp_unit == 'K':
T = temperature
elif temp_unit == 'C':
T = temperature + 273.15
elif temp_unit == 'F':
T = (temperature + 459.67) * 5.0 / 9.0
check_value('pressure unit', press_unit, ('MPa', 'psi'))
if press_unit == 'MPa':
P = pressure
elif press_unit == 'psi':
P = pressure * 0.006895
# Set the density of water, either from an explicitly given density or from
# temperature and pressure.
if density is not None:
water_density = density
else:
water_density = openmc.data.water_density(T, P)
# Compute the density of the solution.
solution_density = water_density / (1 - boron_ppm * 1e-6)
# Compute the molar mass of pure water.
hydrogen = openmc.Element('H')
oxygen = openmc.Element('O')
M_H2O = 0.0
for iso_name, frac, junk in hydrogen.expand(2.0, 'ao'):
M_H2O += frac * openmc.data.atomic_mass(iso_name)
for iso_name, frac, junk in oxygen.expand(1.0, 'ao'):
M_H2O += frac * openmc.data.atomic_mass(iso_name)
# Compute the molar mass of boron.
boron = openmc.Element('B')
M_B = 0.0
for iso_name, frac, junk in boron.expand(1.0, 'ao'):
M_B += frac * openmc.data.atomic_mass(iso_name)
# Compute the number fractions of each element.
frac_H2O = (1 - boron_ppm * 1e-6) / M_H2O
frac_H = 2 * frac_H2O
frac_O = frac_H2O
frac_B = boron_ppm * 1e-6 / M_B
# Build the material.
if density is None:
out = openmc.Material(temperature=T, **kwargs)
else:
out = openmc.Material(**kwargs)
out.add_element('H', frac_H, 'ao')
out.add_element('O', frac_O, 'ao')
out.add_element('B', frac_B, 'ao')
out.set_density('g/cc', solution_density)
out.add_s_alpha_beta('c_H_in_H2O')
return out
def rectangular_prism(width, height, axis='z', origin=(0., 0.),
boundary_type='transmission', corner_radius=0.):
"""Get an infinite rectangular prism from four planar surfaces.
.. versionchanged:: 0.11
This function was renamed from `get_rectangular_prism` to
`rectangular_prism`.
Parameters
----------
width: float
Prism width in units of cm. The width is aligned with the y, x,
or x axes for prisms parallel to the x, y, or z axis, respectively.
height: float
Prism height in units of cm. The height is aligned with the z, z,
or y axes for prisms parallel to the x, y, or z axis, respectively.
axis : {'x', 'y', 'z'}
Axis with which the infinite length of the prism should be aligned.
Defaults to 'z'.
origin: Iterable of two floats
Origin of the prism. The two floats correspond to (y,z), (x,z) or
(x,y) for prisms parallel to the x, y or z axis, respectively.
Defaults to (0., 0.).
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic'}
Boundary condition that defines the behavior for particles hitting the
surfaces comprising the rectangular prism (default is 'transmission').
corner_radius: float
Prism corner radius in units of cm. Defaults to 0.
Returns
-------
openmc.Region
The inside of a rectangular prism
"""
check_type('width', width, Real)
check_type('height', height, Real)
check_type('corner_radius', corner_radius, Real)
check_value('axis', axis, ['x', 'y', 'z'])
check_type('origin', origin, Iterable, Real)
# Define function to create a plane on given axis
def plane(axis, name, value):
cls = globals()['{}Plane'.format(axis.upper())]
return cls(name='{} {}'.format(name, axis),
boundary_type=boundary_type,
**{axis + '0': value})
if axis == 'x':
x1, x2 = 'y', 'z'
elif axis == 'y':
x1, x2 = 'x', 'z'
else:
x1, x2 = 'x', 'y'
# Get cylinder class corresponding to given axis
cyl = globals()['{}Cylinder'.format(axis.upper())]
# Create rectangular region
min_x1 = plane(x1, 'minimum', -width/2 + origin[0])
max_x1 = plane(x1, 'maximum', width/2 + origin[0])
min_x2 = plane(x2, 'minimum', -height/2 + origin[1])
max_x2 = plane(x2, 'maximum', height/2 + origin[1])
prism = +min_x1 & -max_x1 & +min_x2 & -max_x2
# Handle rounded corners if given
if corner_radius > 0.:
args = {'R': corner_radius, 'boundary_type': boundary_type}
args[x1 + '0'] = origin[0] - width/2 + corner_radius
args[x2 + '0'] = origin[1] - height/2 + corner_radius
x1_min_x2_min = cyl(name='{} min {} min'.format(x1, x2), **args)
args[x1 + '0'] = origin[0] - width/2 + corner_radius
args[x2 + '0'] = origin[1] - height/2 + corner_radius
x1_min_x2_min = cyl(name='{} min {} min'.format(x1, x2), **args)
args[x1 + '0'] = origin[0] - width/2 + corner_radius
args[x2 + '0'] = origin[1] + height/2 - corner_radius
x1_min_x2_max = cyl(name='{} min {} max'.format(x1, x2), **args)
args[x1 + '0'] = origin[0] + width/2 - corner_radius
args[x2 + '0'] = origin[1] - height/2 + corner_radius
x1_max_x2_min = cyl(name='{} max {} min'.format(x1, x2), **args)
args[x1 + '0'] = origin[0] + width/2 - corner_radius
args[x2 + '0'] = origin[1] + height/2 - corner_radius
x1_max_x2_max = cyl(name='{} max {} max'.format(x1, x2), **args)
x1_min = plane(x1, 'min', -width/2 + origin[0] + corner_radius)
x1_max = plane(x1, 'max', width/2 + origin[0] - corner_radius)
x2_min = plane(x2, 'min', -height/2 + origin[1] + corner_radius)
x2_max = plane(x2, 'max', height/2 + origin[1] - corner_radius)
corners = (+x1_min_x2_min & -x1_min & -x2_min) | \
(+x1_min_x2_max & -x1_min & +x2_max) | \
(+x1_max_x2_min & +x1_max & -x2_min) | \
(+x1_max_x2_max & +x1_max & +x2_max)
prism = prism & ~corners
return prism
def get_rectangular_prism(*args, **kwargs):
warn("get_rectangular_prism(...) has been renamed rectangular_prism(...). "
"Future versions of OpenMC will not accept get_rectangular_prism.",
FutureWarning)
return rectangular_prism(*args, **kwargs)
def hexagonal_prism(edge_length=1., orientation='y', origin=(0., 0.),
boundary_type='transmission', corner_radius=0.):
"""Create a hexagon region from six surface planes.
.. versionchanged:: 0.11
This function was renamed from `get_hexagonal_prism` to
`hexagonal_prism`.
Parameters
----------
edge_length : float
Length of a side of the hexagon in cm
orientation : {'x', 'y'}
An 'x' orientation means that two sides of the hexagon are parallel to
the x-axis and a 'y' orientation means that two sides of the hexagon are
parallel to the y-axis.
origin: Iterable of two floats
Origin of the prism. Defaults to (0., 0.).
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic'}
Boundary condition that defines the behavior for particles hitting the
surfaces comprising the hexagonal prism (default is 'transmission').
corner_radius: float
Prism corner radius in units of cm. Defaults to 0.
Returns
-------
openmc.Region
The inside of a hexagonal prism
"""
l = edge_length
x, y = origin
if orientation == 'y':
right = XPlane(x0=x + sqrt(3.)/2*l, boundary_type=boundary_type)
left = XPlane(x0=x - sqrt(3.)/2*l, boundary_type=boundary_type)
c = sqrt(3.)/3.
# y = -x/sqrt(3) + a
upper_right = Plane(a=c, b=1., d=l+x*c+y, boundary_type=boundary_type)
# y = x/sqrt(3) + a
upper_left = Plane(a=-c, b=1., d=l-x*c+y, boundary_type=boundary_type)
# y = x/sqrt(3) - a
lower_right = Plane(a=-c, b=1., d=-l-x*c+y, boundary_type=boundary_type)
# y = -x/sqrt(3) - a
lower_left = Plane(a=c, b=1., d=-l+x*c+y, boundary_type=boundary_type)
prism = -right & +left & -upper_right & -upper_left & \
+lower_right & +lower_left
if boundary_type == 'periodic':
right.periodic_surface = left
upper_right.periodic_surface = lower_left
lower_right.periodic_surface = upper_left
elif orientation == 'x':
top = YPlane(y0=y + sqrt(3.)/2*l, boundary_type=boundary_type)
bottom = YPlane(y0=y - sqrt(3.)/2*l, boundary_type=boundary_type)
c = sqrt(3.)
# y = -sqrt(3)*(x - a)
upper_right = Plane(a=c, b=1., d=c*l+x*c+y, boundary_type=boundary_type)
# y = sqrt(3)*(x + a)
lower_right = Plane(a=-c, b=1., d=-c*l-x*c+y,
boundary_type=boundary_type)
# y = -sqrt(3)*(x + a)
lower_left = Plane(a=c, b=1., d=-c*l+x*c+y, boundary_type=boundary_type)
# y = sqrt(3)*(x + a)
upper_left = Plane(a=-c, b=1., d=c*l-x*c+y, boundary_type=boundary_type)
prism = -top & +bottom & -upper_right & +lower_right & \
+lower_left & -upper_left
if boundary_type == 'periodic':
top.periodic_surface = bottom
upper_right.periodic_surface = lower_left
lower_right.periodic_surface = upper_left
# Handle rounded corners if given
if corner_radius > 0.:
if boundary_type == 'periodic':
raise ValueError('Periodic boundary conditions not permitted when '
'rounded corners are used.')
c = sqrt(3.)/2
t = l - corner_radius/c
# Cylinder with corner radius and boundary type pre-applied
cyl1 = partial(ZCylinder, r=corner_radius, boundary_type=boundary_type)
cyl2 = partial(ZCylinder, r=corner_radius/(2*c),
boundary_type=boundary_type)
if orientation == 'x':
x_min_y_min_in = cyl1(name='x min y min in', x0=x-t/2, y0=y-c*t)
x_min_y_max_in = cyl1(name='x min y max in', x0=x+t/2, y0=y-c*t)
x_max_y_min_in = cyl1(name='x max y min in', x0=x-t/2, y0=y+c*t)
x_max_y_max_in = cyl1(name='x max y max in', x0=x+t/2, y0=y+c*t)
x_min_in = cyl1(name='x min in', x0=x-t, y0=y)
x_max_in = cyl1(name='x max in', x0=x+t, y0=y)
x_min_y_min_out = cyl2(name='x min y min out', x0=x-l/2, y0=y-c*l)
x_min_y_max_out = cyl2(name='x min y max out', x0=x+l/2, y0=y-c*l)
x_max_y_min_out = cyl2(name='x max y min out', x0=x-l/2, y0=y+c*l)
x_max_y_max_out = cyl2(name='x max y max out', x0=x+l/2, y0=y+c*l)
x_min_out = cyl2(name='x min out', x0=x-l, y0=y)
x_max_out = cyl2(name='x max out', x0=x+l, y0=y)
corners = (+x_min_y_min_in & -x_min_y_min_out |
+x_min_y_max_in & -x_min_y_max_out |
+x_max_y_min_in & -x_max_y_min_out |
+x_max_y_max_in & -x_max_y_max_out |
+x_min_in & -x_min_out |
+x_max_in & -x_max_out)
elif orientation == 'y':
x_min_y_min_in = cyl1(name='x min y min in', x0=x-c*t, y0=y-t/2)
x_min_y_max_in = cyl1(name='x min y max in', x0=x-c*t, y0=y+t/2)
x_max_y_min_in = cyl1(name='x max y min in', x0=x+c*t, y0=y-t/2)
x_max_y_max_in = cyl1(name='x max y max in', x0=x+c*t, y0=y+t/2)
y_min_in = cyl1(name='y min in', x0=x, y0=y-t)
y_max_in = cyl1(name='y max in', x0=x, y0=y+t)
x_min_y_min_out = cyl2(name='x min y min out', x0=x-c*l, y0=y-l/2)
x_min_y_max_out = cyl2(name='x min y max out', x0=x-c*l, y0=y+l/2)
x_max_y_min_out = cyl2(name='x max y min out', x0=x+c*l, y0=y-l/2)
x_max_y_max_out = cyl2(name='x max y max out', x0=x+c*l, y0=y+l/2)
y_min_out = cyl2(name='y min out', x0=x, y0=y-l)
y_max_out = cyl2(name='y max out', x0=x, y0=y+l)
corners = (+x_min_y_min_in & -x_min_y_min_out |
+x_min_y_max_in & -x_min_y_max_out |
+x_max_y_min_in & -x_max_y_min_out |
+x_max_y_max_in & -x_max_y_max_out |
+y_min_in & -y_min_out |
+y_max_in & -y_max_out)
prism = prism & ~corners
return prism
def get_hexagonal_prism(*args, **kwargs):
warn("get_hexagonal_prism(...) has been renamed hexagonal_prism(...). "
"Future versions of OpenMC will not accept get_hexagonal_prism.",
FutureWarning)
return hexagonal_prism(*args, **kwargs)
def cylinder_from_points(p1, p2, r, **kwargs):
"""Return cylinder defined by two points passing through its center.
Parameters
----------
p1, p2 : 3-tuples
Coordinates of two points that pass through the center of the cylinder
r : float
Radius of the cylinder
kwargs : dict
Keyword arguments passed to the :class:`openmc.Quadric` constructor
Returns
-------
openmc.Quadric
Quadric surface representing the cylinder.
"""
# Get x, y, z coordinates of two points
x1, y1, z1 = p1
x2, y2, z2 = p2
# Define intermediate terms
dx = x2 - x1
dy = y2 - y1
dz = z2 - z1
cx = y1*z2 - y2*z1
cy = x2*z1 - x1*z2
cz = x1*y2 - x2*y1
# Given p=(x,y,z), p1=(x1, y1, z1), p2=(x2, y2, z2), the equation for the
# cylinder can be derived as r = |(p - p1) ⨯ (p - p2)| / |p2 - p1|.
# Expanding out all terms and grouping according to what Quadric expects
# gives the following coefficients.
kwargs['a'] = dy*dy + dz*dz
kwargs['b'] = dx*dx + dz*dz
kwargs['c'] = dx*dx + dy*dy
kwargs['d'] = -2*dx*dy
kwargs['e'] = -2*dy*dz
kwargs['f'] = -2*dx*dz
kwargs['g'] = 2*(cy*dz - cz*dy)
kwargs['h'] = 2*(cz*dx - cx*dz)
kwargs['j'] = 2*(cx*dy - cy*dx)
kwargs['k'] = cx*cx + cy*cy + cz*cz - (dx*dx + dy*dy + dz*dz)*r*r
return Quadric(**kwargs)
def subdivide(surfaces):
"""Create regions separated by a series of surfaces.
This function allows regions to be constructed from a set of a surfaces that
are "in order". For example, if you had four instances of
:class:`openmc.ZPlane` at z=-10, z=-5, z=5, and z=10, this function would
return a list of regions corresponding to z < -10, -10 < z < -5, -5 < z < 5,
5 < z < 10, and 10 < z. That is, for n surfaces, n+1 regions are returned.
Parameters
----------
surfaces : sequence of openmc.Surface
Surfaces separating regions
Returns
-------
list of openmc.Region
Regions formed by the given surfaces
"""
regions = [-surfaces[0]]
for s0, s1 in zip(surfaces[:-1], surfaces[1:]):
regions.append(+s0 & -s1)
regions.append(+surfaces[-1])
return regions
def pin(surfaces, items, subdivisions=None, divide_vols=True,
**kwargs):
"""Convenience function for building a fuel pin
Parameters
----------
surfaces : iterable of :class:`openmc.Cylinder`
Cylinders used to define boundaries
between items. All cylinders must be
concentric and of the same orientation, e.g.
all :class:`openmc.ZCylinder`
items : iterable
Objects to go between ``surfaces``. These can be anything
that can fill a :class:`openmc.Cell`, including
:class:`openmc.Material`, or other :class:`openmc.Universe`
objects. There must be one more item than surfaces,
which will span all space outside the final ring.
subdivisions : None or dict of int to int
Dictionary describing which rings to subdivide and how
many times. Keys are indexes of the annular rings
to be divided. Will construct equal area rings
divide_vols : bool
If this evaluates to ``True``, then volumes of subdivided
:class:`openmc.Material` instances will also be divided by the
number of divisions. Otherwise the volume of the
original material will not be modified before subdivision
kwargs:
Additional key-word arguments to be passed to
:class:`openmc.Universe`, like ``name="Fuel pin"``
Returns
-------
:class:`openmc.Universe`
Universe of concentric cylinders filled with the desired
items
"""
if "cells" in kwargs:
raise SyntaxError(
"Cells will be set by this function, not from input arguments.")
check_type("items", items, Iterable)
check_length("surfaces", surfaces, len(items) - 1, len(items) - 1)
# Check that all surfaces are of similar orientation
check_type("surface", surfaces[0], Cylinder)
surf_type = type(surfaces[0])
check_iterable_type("surfaces", surfaces[1:], surf_type)
# Check for increasing radii and equal centers
if surf_type is ZCylinder:
center_getter = attrgetter("x0", "y0")
elif surf_type is YCylinder:
center_getter = attrgetter("x0", "z0")
elif surf_type is XCylinder:
center_getter = attrgetter("z0", "y0")
else:
raise TypeError(
"Not configured to interpret {} surfaces".format(
surf_type.__name__))
centers = set()
prev_rad = 0
for ix, surf in enumerate(surfaces):
cur_rad = surf.r
if cur_rad <= prev_rad:
raise ValueError(
"Surfaces do not appear to be increasing in radius. "
"Surface {} at index {} has radius {:7.3e} compared to "
"previous radius of {:7.5e}".format(
surf.id, ix, cur_rad, prev_rad))
prev_rad = cur_rad
centers.add(center_getter(surf))
if len(centers) > 1:
raise ValueError(
"Surfaces do not appear to be concentric. The following "
"centers were found: {}".format(centers))
if subdivisions is not None:
check_length("subdivisions", subdivisions, 1, len(surfaces))
orig_indexes = list(subdivisions.keys())
check_iterable_type("ring indexes", orig_indexes, int)
check_iterable_type(
"number of divisions", list(subdivisions.values()), int)
for ix in orig_indexes:
if ix < 0:
subdivisions[len(surfaces) + ix] = subdivisions.pop(ix)
# Dissallow subdivision on outer most, infinite region
check_less_than(
"outer ring", max(subdivisions), len(surfaces), equality=True)
# ensure ability to concatenate
if not isinstance(items, list):
items = list(items)
if not isinstance(surfaces, list):
surfaces = list(surfaces)
# generate equal area divisions
# Adding N - 1 new regions
# N - 2 surfaces are made
# Original cell is not removed, but not occupies last ring
for ring_index in reversed(sorted(subdivisions.keys())):
nr = subdivisions[ring_index]
new_surfs = []
lower_rad = 0.0 if ring_index == 0 else surfaces[ring_index - 1].r
upper_rad = surfaces[ring_index].r
area_term = (upper_rad ** 2 - lower_rad ** 2) / nr
for new_index in range(nr - 1):
lower_rad = sqrt(area_term + lower_rad ** 2)
new_surfs.append(surf_type(r=lower_rad))
surfaces = (
surfaces[:ring_index] + new_surfs + surfaces[ring_index:])
filler = items[ring_index]
if (divide_vols and hasattr(filler, "volume")
and filler.volume is not None):
filler.volume /= nr
items[ring_index:ring_index] = [
filler.clone() for _i in range(nr - 1)]
# Build the universe
regions = subdivide(surfaces)
cells = [Cell(fill=f, region=r) for r, f in zip(regions, items)]
return Universe(cells=cells, **kwargs)
|
/**
* AnyChart is lightweight robust charting library with great API and Docs, that works with your stack and has tons of chart types and features.
*
* Modules: pareto, theme-pareto
* Version: 8.4.0.1922 (2018-10-04)
* License: https://www.anychart.com/buy/
* Contact: sales@anychart.com
* Copyright: AnyChart.com 2018. All rights reserved.
*/
(function(global,factory){if(typeof module==='object'&&typeof module.exports==='object'){var wrapper=function(w){if(!w.document){throw Error('AnyChart requires a window with a document');}factory.call(w,w,w.document);try{w.acgraph.isNodeJS=Object.prototype.toString.call(global.process)=="[object process]";}catch(e){};return w.anychart;};module.exports=global.document?wrapper(global):wrapper;}else{factory.call(global,window,document)}})(typeof window!=='undefined'?window:this,function(window,document,opt_noGlobal){var $,_,$_=this.anychart;if($_&&(_=$_._)){$=$_.$}else{throw Error('anychart-base.min.js module should be included first. See modules explanation at https://docs.anychart.com/Quick_Start/Modules for details');$={};_={}}if(!_.pareto){_.pareto=1;(function($){var jQ=function(a){$.cq.call(this,a);this.o=[];this.f=[];this.xg=[];this.b=0},kQ=function(a,b,c,d,e){$.Gy.call(this,a,b,c,d,e)},mQ=function(a,b,c){if($.n(c)&&-1<c){var d=null;a=a.data();$.n(a)&&(a=a.Uk(c),$.K(a,jQ)||$.K(a,lQ))&&(d={},d.cf=$.Bl(a.ZA(c),2),d.rf=$.Bl(a.FE(c),2))}d&&(b.cf=d.cf,b.rf=d.rf)},lQ=function(a){$.cq.call(this,a)},nQ=function(){$.fz.call(this);this.hb("pareto");this.K=$.ur();this.K.Eh(0).ki(100);this.Id="pareto"},oQ=function(a){if($.n(a.f)){var b=a.f;$.Xp(b);b=b.b;var c=a.Sh(0),
d;c?d=c.Xa():d=a.Xa();d.Eh(0);b?d.ki(b):d.ki(1)}},vfa=function(a,b){return $.en(a)?$.en(b)?-1:1:$.en(b)?-1:b-a},pQ=function(a){var b=new nQ;b.ca.defaultSeriesType="column";b.mi();b.tf();b.data(a);return b};$.H(jQ,$.cq);$.g=jQ.prototype;$.g.oa=1;$.g.kj=function(a,b,c){a=jQ.B.kj.call(this,a,b,c);"value"==c&&(a=$.O(a),(0,window.isNaN)(a)||0>a)&&(a=0);return a};
$.g.sD=function(){this.o=[];this.f=[];this.xg=[];this.b=0;this.I(1);for(var a=this.aa(),b;a.advance();)b=a.get("value"),this.xg.push(b),this.b+=b;if(this.xg.length)if(0==this.b)for(a=0;a<this.xg.length;a++)this.o[a]=0,this.f[a]=0;else for(this.o[0]=this.f[0]=100*this.xg[0]/this.b,a=1;a<this.xg.length;a++)this.o[a]=100*this.xg[a]/this.b,this.f[a]=this.f[a-1]+100*this.xg[a]/this.b;return null};$.g.WF=function(a){this.Vr=null;$.W(a,16)&&this.u(1,16)};$.g.ZA=function(a){return this.f[a]};$.g.FE=function(a){return this.o[a]};
$.g.Pj=function(a){return this.xg[a]};$.H(kQ,$.Gy);$.g=kQ.prototype;$.g.JC={"%BubbleSize":"size","%RangeStart":"low","%RangeEnd":"high","%XValue":"x","%CF":"cf","%RF":"rf"};$.g.Ro=function(a,b){var c=kQ.B.Ro.call(this,a,b),d=this.data(),e;$.n(d)&&$.n(c.index)&&-1<(e=Number(c.index.value))&&(d=d.Uk(e),$.K(d,jQ)||$.K(d,lQ))&&(c.cf={value:$.Bl(d.ZA(e),2),type:"number"},c.rf={value:$.Bl(d.FE(e),2),type:"number"});return c};$.g.Ee=function(a,b){var c=kQ.B.Ee.call(this,a,b);mQ(this,c,c.index);return c};
$.g.Qh=function(a){a=kQ.B.Qh.call(this,a);mQ(this,a,a.index);return a};$.g.ud=function(a){var b=kQ.B.ud.call(this,a);mQ(this,b,a);return b};$.H(lQ,$.cq);lQ.prototype.kj=function(a,b,c){return"value"==c?this.Ad.ZA(b):lQ.B.kj.call(this,a,b,c)};lQ.prototype.ZA=function(a){return this.Ad.ZA(a)};lQ.prototype.FE=function(a){return this.Ad.FE(a)};$.H(nQ,$.fz);
nQ.prototype.data=function(a,b){if($.n(a)){if(a){var c=a.title||a.caption;c&&this.title(c);a.rows&&(a=a.rows)}if(this.cG!==a){this.cG=a;$.Zc(this.bl);$.K(a,$.Wp)?this.Ad=this.bl=a.Ll():$.K(a,$.fq)?this.Ad=this.bl=a.we():this.Ad=(this.bl=new $.fq($.A(a)||$.z(a)?a:null,b)).we();$.V(this);this.f&&$.Fp(this.f,this.Ke,this);$.Zc(this.f);this.f=new jQ(this.Ad.sort("value",vfa));$.U(this.f,this.Ke,this);oQ(this);c=this.Sh(0);var d=this.Sh(1);this.Ha&&$.Zc(this.Ha);this.Ha=this.f.Ll();c||(c=this.column());
c.data(this.Ha);this.Ya&&$.Zc(this.Ya);this.Ya=new lQ(this.f);d||(d=this.line().clip(!1).Bb(!0).Xa(this.K));d.data(this.Ya);this.ba(!0)}return this}return this.f};nQ.prototype.Ke=function(a){$.W(a,16)&&oQ(this)};var qQ={},rQ=$.Hy|7864320;qQ.area={xb:1,Cb:1,Ib:[$.HD,$.$D,$.VD,$.PD,$.GD,$.aE,$.WD,$.OD,$.JD,$.bE,$.XD,$.cE],Gb:null,zb:null,sb:rQ,vb:"value",ub:"zero"};qQ.bar={xb:6,Cb:2,Ib:[$.jE,$.JD,$.RD,$.cE,$.TD,$.XD,$.YD,$.bE],Gb:null,zb:null,sb:rQ,vb:"value",ub:"zero"};
qQ.box={xb:3,Cb:2,Ib:[$.jE,$.JD,$.MK,$.NK,$.OK],Gb:null,zb:null,sb:rQ,vb:"highest",ub:"lowest"};qQ.bubble={xb:4,Cb:2,Ib:[$.MD,$.ND,$.QD,$.SD],Gb:null,zb:null,sb:rQ,vb:"value",ub:"value"};qQ.candlestick={xb:5,Cb:2,Ib:[$.TD,$.XD,$.YD,$.bE],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};qQ.column={xb:6,Cb:2,Ib:[$.jE,$.JD,$.RD,$.cE,$.TD,$.XD,$.YD,$.bE],Gb:null,zb:null,sb:rQ,vb:"value",ub:"zero"};qQ["jump-line"]={xb:19,Cb:2,Ib:[$.HD,$.ZD,$.UD,$.ID],Gb:null,zb:null,sb:rQ,vb:"value",ub:"value"};
qQ.stick={xb:20,Cb:2,Ib:[$.HD,$.ZD,$.UD,$.ID],Gb:null,zb:null,sb:rQ,vb:"value",ub:"zero"};qQ.line={xb:8,Cb:1,Ib:[$.HD,$.$D,$.VD,$.PD],Gb:null,zb:null,sb:rQ,vb:"value",ub:"value"};qQ.marker={xb:9,Cb:2,Ib:[$.jE,$.JD,$.RD,$.cE,$.TD,$.XD,$.YD,$.bE],Gb:null,zb:null,sb:$.Hy|3670016,vb:"value",ub:"value"};qQ.ohlc={xb:10,Cb:2,Ib:[$.UD,$.ZD],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};qQ["range-area"]={xb:11,Cb:1,Ib:[$.GD,$.iE,$.eE,$.JD,$.dE,$.hE,$.LD,$.KD],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};
qQ["range-bar"]={xb:12,Cb:2,Ib:[$.jE,$.JD,$.fE,$.gE,$.LD,$.KD],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};qQ["range-column"]={xb:12,Cb:2,Ib:[$.jE,$.JD,$.fE,$.gE,$.LD,$.KD],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};qQ["range-spline-area"]={xb:13,Cb:1,Ib:[$.GD,$.eE,$.iE,$.JD,$.dE,$.hE,$.LD,$.KD],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};qQ["range-step-area"]={xb:14,Cb:1,Ib:[$.GD,$.JD,$.eE,$.iE],Gb:null,zb:null,sb:rQ,vb:"high",ub:"low"};
qQ.spline={xb:15,Cb:1,Ib:[$.HD,$.$D,$.VD,$.PD],Gb:null,zb:null,sb:rQ,vb:"value",ub:"value"};qQ["spline-area"]={xb:16,Cb:1,Ib:[$.HD,$.$D,$.VD,$.PD,$.GD,$.aE,$.WD,$.OD,$.JD,$.bE,$.XD,$.cE],Gb:null,zb:null,sb:rQ,vb:"value",ub:"zero"};qQ["step-area"]={xb:17,Cb:1,Ib:[$.HD,$.$D,$.VD,$.PD,$.GD,$.aE,$.WD,$.OD,$.JD,$.bE,$.XD,$.cE],Gb:null,zb:null,sb:rQ,vb:"value",ub:"zero"};qQ["step-line"]={xb:18,Cb:1,Ib:[$.HD,$.$D,$.VD,$.PD],Gb:null,zb:null,sb:rQ,vb:"value",ub:"value"};nQ.prototype.Ei=qQ;$.Zw(nQ,nQ.prototype.Ei);
$.g=nQ.prototype;$.g.St=function(a,b){return new kQ(this,this,a,b,!0)};$.g.Ey=function(){return $.qr};$.g.fB=function(){return["Pareto chart xScale","ordinal"]};$.g.IE=function(){return 3};$.g.lJ=function(){return["Pareto chart yScale","scatter","linear, log"]};$.g.uy=function(){return[this]};$.g.YI=function(){return["value","CF","RF"]};$.g.yZ=function(a,b,c){b=c.ja();c=this.f.Uk(b);a[1]=c.Pj(b);a[2]=c.ZA(b);a[3]=c.FE(b)};$.g.kb=function(){this.J(131072)&&oQ(this);nQ.B.kb.call(this)};
$.g.Mj=function(a){this.J(131072)&&oQ(this);return nQ.B.Mj.call(this,a)};$.g.lX=function(){return this.Qa()};$.g.R=function(){$.$c(this.f,this.Ad,this.bl,this.Ha,this.Ya);this.Ya=this.Ha=this.bl=this.Ad=this.f=null;$.Zc(this.K);this.K=null;nQ.B.R.call(this)};$.g.F=function(){var a=nQ.B.F.call(this);$.n(this.data())&&(a.chart.data=this.data().F());return a};$.g.mi=function(){nQ.B.mi.call(this);this.Ki(1).scale(this.K)};
$.g.Y=function(a,b){nQ.B.Y.call(this,a,b);b&&this.Ki(1).scale(this.K);"data"in a&&this.data(a.data)};var sQ=nQ.prototype;sQ.data=sQ.data;sQ.xScale=sQ.Qa;sQ.yScale=sQ.Xa;sQ.crosshair=sQ.ig;sQ.xGrid=sQ.zn;sQ.yGrid=sQ.Bn;sQ.xMinorGrid=sQ.uo;sQ.yMinorGrid=sQ.wo;sQ.xAxis=sQ.Hh;sQ.getXAxesCount=sQ.eB;sQ.yAxis=sQ.Ki;sQ.getYAxesCount=sQ.gB;sQ.getSeries=sQ.Te;sQ.lineMarker=sQ.Hm;sQ.rangeMarker=sQ.Om;sQ.textMarker=sQ.Um;sQ.palette=sQ.Yb;sQ.markerPalette=sQ.ef;sQ.hatchFillPalette=sQ.Qd;sQ.getType=sQ.La;
sQ.addSeries=sQ.Gk;sQ.getSeriesAt=sQ.Sh;sQ.getSeriesCount=sQ.Tl;sQ.removeSeries=sQ.no;sQ.removeSeriesAt=sQ.sn;sQ.removeAllSeries=sQ.pp;sQ.getPlotBounds=sQ.Ff;sQ.xZoom=sQ.jl;sQ.xScroller=sQ.vo;sQ.getStat=sQ.Tf;sQ.annotations=sQ.Ej;sQ.getXScales=sQ.xw;sQ.getYScales=sQ.yw;$.no.pareto=pQ;$.G("anychart.pareto",pQ);}).call(this,$)}
if(!_.theme_pareto){_.theme_pareto=1;(function($){$.sa($.fa.anychart.themes.defaultTheme,{pareto:{defaultSeriesType:"column",tooltip:{displayMode:"union"},interactivity:{hoverMode:"by-x"},yAxes:[{orientation:"left"},{orientation:"right",labels:{format:"{%Value}%"}}]}});}).call(this,$)}
$_=window.anychart;$_.$=$;$_._=_});
|
import scrapy
class BaiduSpider(scrapy.Spider):
# 爬虫名,默认和文件名是一样的。作用:用于运行爬虫 ·scrapy crawl 爬虫名·
name = 'baidu'
# 允许抓取的域名
allowed_domains = ['www.baidu.com']
# 起始的url地址
start_urls = ['http://www.baidu.com/'] # 是一个列表,第一个或者第一批
def parse(self, response):
r = response.xpath('/html/head/title/text()')
print(r)
|
from typing import Union
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.models.modelv2 import ModelV2
from ray.rllib.utils.annotations import override
from ray.rllib.utils.exploration.exploration import Exploration
from ray.rllib.utils.exploration.random import Random
from ray.rllib.utils.framework import try_import_tf, try_import_torch, \
get_variable, TensorType
from ray.rllib.utils.schedules.piecewise_schedule import PiecewiseSchedule
tf = try_import_tf()
torch, _ = try_import_torch()
class GaussianNoise(Exploration):
"""An exploration that adds white noise to continuous actions.
If explore=True, returns actions plus scale (<-annealed over time) x
Gaussian noise. Also, some completely random period is possible at the
beginning.
If explore=False, returns the deterministic action.
"""
def __init__(self,
action_space,
*,
framework: str,
model: ModelV2,
random_timesteps=1000,
stddev=0.1,
initial_scale=1.0,
final_scale=0.02,
scale_timesteps=10000,
scale_schedule=None,
**kwargs):
"""Initializes a GaussianNoise Exploration object.
Args:
random_timesteps (int): The number of timesteps for which to act
completely randomly. Only after this number of timesteps, the
`self.scale` annealing process will start (see below).
stddev (float): The stddev (sigma) to use for the
Gaussian noise to be added to the actions.
initial_scale (float): The initial scaling weight to multiply
the noise with.
final_scale (float): The final scaling weight to multiply
the noise with.
scale_timesteps (int): The timesteps over which to linearly anneal
the scaling factor (after(!) having used random actions for
`random_timesteps` steps.
scale_schedule (Optional[Schedule]): An optional Schedule object
to use (instead of constructing one from the given parameters).
"""
assert framework is not None
super().__init__(
action_space, model=model, framework=framework, **kwargs)
self.random_timesteps = random_timesteps
self.random_exploration = Random(
action_space, model=self.model, framework=self.framework, **kwargs)
self.stddev = stddev
# The `scale` annealing schedule.
self.scale_schedule = scale_schedule or PiecewiseSchedule(
endpoints=[(random_timesteps, initial_scale),
(random_timesteps + scale_timesteps, final_scale)],
outside_value=final_scale,
framework=self.framework)
# The current timestep value (tf-var or python int).
self.last_timestep = get_variable(
0, framework=self.framework, tf_name="timestep")
@override(Exploration)
def get_exploration_action(self,
*,
action_distribution: ActionDistribution,
timestep: Union[int, TensorType],
explore: bool = True):
# Adds IID Gaussian noise for exploration, TD3-style.
if self.framework == "torch":
return self._get_torch_exploration_action(action_distribution,
explore, timestep)
else:
return self._get_tf_exploration_action_op(action_distribution,
explore, timestep)
def _get_tf_exploration_action_op(self, action_dist, explore, timestep):
ts = timestep if timestep is not None else self.last_timestep
# The deterministic actions (if explore=False).
deterministic_actions = action_dist.deterministic_sample()
# Take a Gaussian sample with our stddev (mean=0.0) and scale it.
gaussian_sample = self.scale_schedule(ts) * tf.random_normal(
tf.shape(deterministic_actions), stddev=self.stddev)
# Stochastic actions could either be: random OR action + noise.
random_actions, _ = \
self.random_exploration.get_tf_exploration_action_op(
action_dist, explore)
stochastic_actions = tf.cond(
pred=ts <= self.random_timesteps,
true_fn=lambda: random_actions,
false_fn=lambda: tf.clip_by_value(
deterministic_actions + gaussian_sample,
self.action_space.low * tf.ones_like(deterministic_actions),
self.action_space.high * tf.ones_like(deterministic_actions))
)
# Chose by `explore` (main exploration switch).
batch_size = tf.shape(deterministic_actions)[0]
action = tf.cond(
pred=tf.constant(explore, dtype=tf.bool)
if isinstance(explore, bool) else explore,
true_fn=lambda: stochastic_actions,
false_fn=lambda: deterministic_actions)
# Logp=always zero.
logp = tf.zeros(shape=(batch_size, ), dtype=tf.float32)
# Increment `last_timestep` by 1 (or set to `timestep`).
assign_op = \
tf.assign_add(self.last_timestep, 1) if timestep is None else \
tf.assign(self.last_timestep, timestep)
with tf.control_dependencies([assign_op]):
return action, logp
def _get_torch_exploration_action(self, action_dist, explore, timestep):
# Set last timestep or (if not given) increase by one.
self.last_timestep = timestep if timestep is not None else \
self.last_timestep + 1
# Apply exploration.
if explore:
# Random exploration phase.
if self.last_timestep <= self.random_timesteps:
action, _ = \
self.random_exploration.get_torch_exploration_action(
action_dist, True)
# Take a Gaussian sample with our stddev (mean=0.0) and scale it.
else:
det_actions = action_dist.deterministic_sample()
scale = self.scale_schedule(self.last_timestep)
gaussian_sample = scale * torch.normal(
mean=0.0, stddev=self.stddev, size=det_actions.size())
action = torch.clamp(
det_actions + gaussian_sample,
self.action_space.low * torch.ones_like(det_actions),
self.action_space.high * torch.ones_like(det_actions))
# No exploration -> Return deterministic actions.
else:
action = action_dist.deterministic_sample()
# Logp=always zero.
logp = torch.zeros(shape=(action.size()[0], ), dtype=torch.float32)
return action, logp
@override(Exploration)
def get_info(self):
"""Returns the current scale value.
Returns:
Union[float,tf.Tensor[float]]: The current scale value.
"""
scale = self.scale_schedule(self.last_timestep)
return {"cur_scale": scale}
|
#include "modes.h"
void enter_tab_mode(void)
{
if (!editor.tabs_length)
return;
SDL_Log("Mode_tab");
editor.mode = Mode_tab;
editor.refresh_needed = true;
}
void exit_tab_mode(void)
{
SDL_Log("Mode_normal");
editor.mode = Mode_normal;
editor.refresh_needed = true;
}
void move_active_view_to_the_left(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == 0)
return;
editor.refresh_needed = true;
}
void move_active_view_to_the_right(void)
{
if (!editor.tabs_length)
return;
Tab* const tab = &editor.tabs[editor.active_tab_index];
if (tab->active_view_index == tab->views_length - 1)
return;
editor.refresh_needed = true;
}
void move_active_tab_to_the_left(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == 0)
return;
const Tab temp = editor.tabs[editor.active_tab_index - 1];
editor.tabs[editor.active_tab_index - 1] = editor.tabs[editor.active_tab_index];
editor.tabs[editor.active_tab_index] = temp;
measure_tabs();
set_editor_active_tab(editor.active_tab_index - 1);
editor.refresh_needed = true;
}
void move_active_tab_to_the_right(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == editor.tabs_length - 1)
return;
const Tab temp = editor.tabs[editor.active_tab_index + 1];
editor.tabs[editor.active_tab_index + 1] = editor.tabs[editor.active_tab_index];
editor.tabs[editor.active_tab_index] = temp;
measure_tabs();
set_editor_active_tab(editor.active_tab_index + 1);
editor.refresh_needed = true;
}
void put_active_view_in_new_tab_to_the_left(void)
{
if (!editor.tabs_length)
return;
if (editor.tabs[editor.active_tab_index].views_length == 1)
return;
const uint8_t new_tab_index = insert_tab_before_active_tab();
Tab* const restrict new_tab = &editor.tabs[new_tab_index];
Tab* const restrict active_tab = &editor.tabs[++editor.active_tab_index];
const uint8_t view_index_in_new_tab = add_view(new_tab_index);
new_tab->views[view_index_in_new_tab] = active_tab->views[active_tab->active_view_index];
remove_view(editor.active_tab_index, active_tab->active_view_index);
set_editor_active_tab(active_tab->active_view_index);
measure_tabs();
editor.refresh_needed = true;
}
void put_active_view_in_new_tab_to_the_right(void)
{
if (!editor.tabs_length)
return;
Tab* const restrict active_tab = &editor.tabs[editor.active_tab_index];
if (active_tab->views_length == 1)
return;
const uint8_t new_tab_index = insert_tab_after_active_tab();
Tab* const restrict new_tab = &editor.tabs[new_tab_index];
const uint8_t view_index_in_new_tab = add_view(new_tab_index);
new_tab->views[view_index_in_new_tab] = active_tab->views[active_tab->active_view_index];
remove_view(editor.active_tab_index, active_tab->active_view_index);
if (active_tab->active_view_index)
active_tab->active_view_index--;
set_editor_active_tab(active_tab->active_view_index);
measure_tabs();
editor.refresh_needed = true;
}
void put_active_view_in_tab_to_the_left(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == editor.tabs_length - 1)
return;
editor.refresh_needed = true;
}
void put_active_view_in_tab_to_the_right(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == 0)
return;
editor.refresh_needed = true;
}
void activate_left_hand_side_view(void)
{
if (!editor.tabs_length)
return;
Tab* const active_tab = &editor.tabs[editor.active_tab_index];
if (!active_tab->active_view_index)
return;
set_editor_title_to_buffer_path(active_tab->views[--active_tab->active_view_index].buffer);
editor.refresh_needed = true;
}
void activate_right_hand_side_view(void)
{
if (!editor.tabs_length)
return;
Tab* const active_tab = &editor.tabs[editor.active_tab_index];
if (active_tab->active_view_index == active_tab->views_length - 1)
return;
set_editor_title_to_buffer_path(active_tab->views[++active_tab->active_view_index].buffer);
editor.refresh_needed = true;
}
void activate_left_hand_side_tab(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == 0)
return;
set_editor_active_tab(--editor.active_tab_index);
editor.refresh_needed = true;
}
void activate_right_hand_side_tab(void)
{
if (editor.tabs_length < 2 || editor.active_tab_index == editor.tabs_length - 1)
return;
set_editor_active_tab(++editor.active_tab_index);
editor.refresh_needed = true;
}
void close_active_tab(void)
{
// TODO: Check that all buffers that has a single reference has
// no unsaved changes.
}
|
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
import json
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.recipe.models import Recipe
class Command(BaseCommand):
help = 'Moves database recipes to json files or pub/sub topic'
def add_arguments(self, parser):
parser.add_argument(
'--remote',
action='store_true',
dest='remote',
default=False,
help='Run jobs remotely using pub sub.',
)
parser.add_argument(
'--recipe',
action='store',
dest='recipe',
default=None,
help='Run a specific recipe.',
)
parser.add_argument(
'--force',
action='store_true',
dest='force',
default=False,
help='Force execution regardless of schedule.',
)
def handle(self, *args, **kwargs):
for recipe in (Recipe.objects.filter(pk=kwargs['recipe'])
if kwargs['recipe'] else Recipe.objects.filter(active=True)):
try:
if kwargs['remote']:
print('Dispatch: %s' % recipe.uid())
if kwargs['force']:
recipe.force()
elif settings.UI_CRON:
print('Write: %s/recipe_%d.json' % (settings.UI_CRON, recipe.pk))
with open(settings.UI_CRON + '/recipe_%d.json' % recipe.pk, 'w') as f:
f.write(json.dumps(recipe.get_json()))
else:
raise Exception('Neither UI_CRON configured nor remote specified.')
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
print('DEPLOY ERROR:', str(e))
|
from enum import Enum
from tortoise import Model, Tortoise, fields
class UserRole(str, Enum):
PARTICIPANT = "participant"
CAPTAIN = "captain"
ORGANIZER = "organizer"
ADMIN = "admin"
class User(Model):
id = fields.IntField(pk=True)
email = fields.CharField(max_length=128, unique=True)
hashed_password = fields.CharField(max_length=512)
fio = fields.CharField(null=True, max_length=128)
role = fields.CharEnumField(UserRole, default=UserRole.PARTICIPANT)
avatar = fields.CharField(max_length=128, null=True)
as_participant: fields.ReverseRelation["Participant"]
as_captain: fields.ReverseRelation["Captain"]
as_organizer: fields.ReverseRelation["Organizer"]
as_admin: fields.ReverseRelation["Admin"]
def __repr__(self):
return str(self.email)
class PydanticMeta:
exclude = ["hashed_password"]
class Participant(Model):
id = fields.IntField(pk=True)
user = fields.OneToOneField("models.User", related_name="as_participant")
teams: fields.ManyToManyRelation["Team"]
class Captain(Model):
id = fields.IntField(pk=True)
user = fields.OneToOneField("models.User", related_name="as_captain")
teams: fields.ReverseRelation["Team"]
class Organizer(Model):
id = fields.IntField(pk=True)
user = fields.OneToOneField("models.User", related_name="as_organizer")
hackathons: fields.ForeignKeyRelation["Hackathon"]
class Admin(Model):
id = fields.IntField(pk=True)
user = fields.OneToOneField("models.User", related_name="as_admin")
class Team(Model):
id = fields.IntField(pk=True)
name = fields.CharField(max_length=128)
participants: fields.ManyToManyRelation[Participant] = fields.ManyToManyField(
"models.Participant"
)
invite_link = fields.CharField(max_length=64, unique=True)
capitan = fields.ForeignKeyField("models.Captain", related_name="teams")
hackathons: fields.ManyToManyRelation["Hackathon"]
async def team_size(self) -> int:
return len(await self.participants)
def __repr__(self):
return str(self.name)
class Sponsor(Model):
id = fields.IntField(pk=True)
name = fields.CharField(max_length=128)
link = fields.CharField(max_length=128)
image = fields.CharField(max_length=128)
hackathons: fields.ManyToManyRelation["Hackathon"]
class Publication(Model):
id = fields.IntField(pk=True)
title = fields.CharField(max_length=128)
text = fields.TextField()
date = fields.DatetimeField(auto_now=True)
hackathon = fields.ForeignKeyField("models.Hackathon", related_name="publications")
class HackathonTag(Model):
"""Tags to identify type of hackathon, for example: Web, VR, AR, etc"""
id = fields.IntField(pk=True)
name = fields.CharField(max_length=128)
hackathons: fields.ManyToManyRelation["Hackathon"]
class Location(Model):
id = fields.IntField(pk=True)
city = fields.CharField(max_length=128, null=True)
hackathon: fields.ForeignKeyRelation["Hackathon"]
class Hackathon(Model):
id = fields.IntField(pk=True)
name = fields.CharField(max_length=128)
description = fields.TextField()
start_date = fields.DateField()
end_date = fields.DateField()
#: Hackathon website url
image = fields.CharField(max_length=512, null=True)
url = fields.CharField(max_length=512, null=True)
location_lon = fields.FloatField(null=True)
location_lat = fields.FloatField(null=True)
location: fields.ForeignKeyNullableRelation[Location] = fields.ForeignKeyField(
"models.Location", related_name="hackathons", null=True
)
sponsors: fields.ManyToManyRelation[Sponsor] = fields.ManyToManyField(
"models.Sponsor", related_name="hackathons"
)
teams: fields.ManyToManyRelation[Team] = fields.ManyToManyField(
"models.Team", related_name="hackathons"
)
tags: fields.ManyToManyRelation[HackathonTag] = fields.ManyToManyField(
"models.HackathonTag", related_name="hackathons"
)
organizers: fields.ManyToManyRelation[Organizer] = fields.ManyToManyField(
"models.Organizer", related_name="hackathons"
)
publications: fields.ForeignKeyRelation["Publication"]
async def participants_amount(self) -> int:
teams = await self.teams
participants = sum([await team.team_size() for team in teams])
return participants + len(await self.organizers.all().values_list())
def __repr__(self):
return str(self.name)
Tortoise.init_models(["models"], "models")
|
/* Generated by RuntimeBrowser
Image: /System/Library/Frameworks/CoreImage.framework/Versions/A/CoreImage
*/
@interface CILightenBlendMode : CIBlendModeFilter
- (id)_kernel;
@end
|
from __future__ import absolute_import, unicode_literals
import logging
import os
from mopidy import config, ext
__version__ = '0.1.0'
logger = logging.getLogger(__name__)
class Extension(ext.Extension):
dist_name = 'Mopidy-SiriusXM'
ext_name = 'siriusxm'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['remember_me'] = config.Boolean()
schema['base_url'] = config.String()
schema['auth_url'] = config.String()
schema['device_make'] = config.String()
schema['oem'] = config.String()
schema['os_version'] = config.String()
schema['platform'] = config.String()
schema['device_id'] = config.String()
schema['sxm_app_version'] = config.String()
schema['carrier'] = config.String()
schema['app_region'] = config.String()
schema['device_model'] = config.String()
schema['result_template'] = config.String()
schema['allow_network'] = config.Boolean()
schema['allow_cache'] = config.Boolean()
return schema
def setup(self, registry):
from .siriusXMBackend import SiriusXM
registry.add('backend', SiriusXM)
|
import numpy as np
from numpy.testing import assert_allclose
import pytest
from scipy.fft._fftlog import fht, ifht, fhtoffset
from scipy.special import poch
def test_fht_agrees_with_fftlog():
# check that fht numerically agrees with the output from Fortran FFTLog,
# the results were generated with the provided `fftlogtest` program,
# after fixing how the k array is generated (divide range by n-1, not n)
# test function, analytical Hankel transform is of the same form
def f(r, mu):
return r**(mu+1)*np.exp(-r**2/2)
r = np.logspace(-4, 4, 16)
dln = np.log(r[1]/r[0])
mu = 0.3
offset = 0.0
bias = 0.0
a = f(r, mu)
# test 1: compute as given
ours = fht(a, dln, mu, offset=offset, bias=bias)
theirs = [ -0.1159922613593045E-02, 0.1625822618458832E-02,
-0.1949518286432330E-02, 0.3789220182554077E-02,
0.5093959119952945E-03, 0.2785387803618774E-01,
0.9944952700848897E-01, 0.4599202164586588 ,
0.3157462160881342 , -0.8201236844404755E-03,
-0.7834031308271878E-03, 0.3931444945110708E-03,
-0.2697710625194777E-03, 0.3568398050238820E-03,
-0.5554454827797206E-03, 0.8286331026468585E-03 ]
assert_allclose(ours, theirs)
# test 2: change to optimal offset
offset = fhtoffset(dln, mu, bias=bias)
ours = fht(a, dln, mu, offset=offset, bias=bias)
theirs = [ 0.4353768523152057E-04, -0.9197045663594285E-05,
0.3150140927838524E-03, 0.9149121960963704E-03,
0.5808089753959363E-02, 0.2548065256377240E-01,
0.1339477692089897 , 0.4821530509479356 ,
0.2659899781579785 , -0.1116475278448113E-01,
0.1791441617592385E-02, -0.4181810476548056E-03,
0.1314963536765343E-03, -0.5422057743066297E-04,
0.3208681804170443E-04, -0.2696849476008234E-04 ]
assert_allclose(ours, theirs)
# test 3: positive bias
bias = 0.8
offset = fhtoffset(dln, mu, bias=bias)
ours = fht(a, dln, mu, offset=offset, bias=bias)
theirs = [ -7.343667355831685 , 0.1710271207817100 ,
0.1065374386206564 , -0.5121739602708132E-01,
0.2636649319269470E-01, 0.1697209218849693E-01,
0.1250215614723183 , 0.4739583261486729 ,
0.2841149874912028 , -0.8312764741645729E-02,
0.1024233505508988E-02, -0.1644902767389120E-03,
0.3305775476926270E-04, -0.7786993194882709E-05,
0.1962258449520547E-05, -0.8977895734909250E-06 ]
assert_allclose(ours, theirs)
# test 4: negative bias
bias = -0.8
offset = fhtoffset(dln, mu, bias=bias)
ours = fht(a, dln, mu, offset=offset, bias=bias)
theirs = [ 0.8985777068568745E-05, 0.4074898209936099E-04,
0.2123969254700955E-03, 0.1009558244834628E-02,
0.5131386375222176E-02, 0.2461678673516286E-01,
0.1235812845384476 , 0.4719570096404403 ,
0.2893487490631317 , -0.1686570611318716E-01,
0.2231398155172505E-01, -0.1480742256379873E-01,
0.1692387813500801 , 0.3097490354365797 ,
2.759360718240186 , 10.52510750700458 ]
assert_allclose(ours, theirs)
@pytest.mark.parametrize('optimal', [True, False])
@pytest.mark.parametrize('offset', [0.0, 1.0, -1.0])
@pytest.mark.parametrize('bias', [0, 0.1, -0.1])
@pytest.mark.parametrize('n', [64, 63])
def test_fht_identity(n, bias, offset, optimal):
rng = np.random.RandomState(3491349965)
a = rng.standard_normal(n)
dln = rng.uniform(-1, 1)
mu = rng.uniform(-2, 2)
if optimal:
offset = fhtoffset(dln, mu, initial=offset, bias=bias)
A = fht(a, dln, mu, offset=offset, bias=bias)
a_ = ifht(A, dln, mu, offset=offset, bias=bias)
assert_allclose(a, a_)
def test_fht_special_cases():
rng = np.random.RandomState(3491349965)
a = rng.standard_normal(64)
dln = rng.uniform(-1, 1)
# let xp = (mu+1+q)/2, xm = (mu+1-q)/2, M = {0, -1, -2, ...}
# case 1: xp in M, xm in M => well-defined transform
mu, bias = -4.0, 1.0
with pytest.warns(None) as record:
fht(a, dln, mu, bias=bias)
assert not record, 'fht warned about a well-defined transform'
# case 2: xp not in M, xm in M => well-defined transform
mu, bias = -2.5, 0.5
with pytest.warns(None) as record:
fht(a, dln, mu, bias=bias)
assert not record, 'fht warned about a well-defined transform'
# case 3: xp in M, xm not in M => singular transform
mu, bias = -3.5, 0.5
with pytest.warns(Warning) as record:
fht(a, dln, mu, bias=bias)
assert record, 'fht did not warn about a singular transform'
# case 4: xp not in M, xm in M => singular inverse transform
mu, bias = -2.5, 0.5
with pytest.warns(Warning) as record:
ifht(a, dln, mu, bias=bias)
assert record, 'ifht did not warn about a singular transform'
@pytest.mark.parametrize('n', [64, 63])
def test_fht_exact(n):
rng = np.random.RandomState(3491349965)
# for a(r) a power law r^\gamma, the fast Hankel transform produces the
# exact continuous Hankel transform if biased with q = \gamma
mu = rng.uniform(0, 3)
# convergence of HT: -1-mu < gamma < 1/2
gamma = rng.uniform(-1-mu, 1/2)
r = np.logspace(-2, 2, n)
a = r**gamma
dln = np.log(r[1]/r[0])
offset = fhtoffset(dln, mu, initial=0.0, bias=gamma)
A = fht(a, dln, mu, offset=offset, bias=gamma)
k = np.exp(offset)/r[::-1]
# analytical result
At = (2/k)**gamma * poch((mu+1-gamma)/2, gamma)
assert_allclose(A, At)
|
/**
* Created by shunchen_yang on 16/10/25.
*/
UE.registerUI('dialog', function (editor, uiName) {
var btn = new UE.ui.Button({
name : 'xiumi-connect',
title : '秀米',
onclick: function () {
var dialog = new UE.ui.Dialog({
iframeUrl: 'xiumi/xiumi-ue-dialog-v5.html',
editor : editor,
name : 'xiumi-connect',
title : "秀米图文消息助手",
cssRules : "width: " + (window.innerWidth - 60) + "px;" + "height: " + (window.innerHeight - 60) + "px;",
});
dialog.render();
dialog.open();
}
});
return btn;
});
|
var data=[['20061208',3.99],
['20061211',3.965],
['20061212',3.9],
['20061213',3.8],
['20061214',3.78],
['20061215',3.685],
['20061218',3.635],
['20061219',3.63],
['20061220',3.8],
['20061221',3.8],
['20061222',3.785],
['20061227',3.715],
['20061228',3.75],
['20061229',3.885],
['20070102',3.95],
['20070103',4.07],
['20070104',3.885],
['20070105',3.895],
['20070108',3.76],
['20070109',3.785],
['20070110',3.75],
['20070111',3.78],
['20070112',3.81],
['20070115',3.93],
['20070116',3.95],
['20070117',3.955],
['20070118',3.985],
['20070119',3.98],
['20070122',3.95],
['20070123',4],
['20070124',4.085],
['20070125',3.985],
['20070126',3.985],
['20070129',4.005],
['20070130',3.96],
['20070131',3.915],
['20070201',3.95],
['20070202',4],
['20070205',3.93],
['20070206',3.895],
['20070207',3.985],
['20070208',4],
['20070209',3.995],
['20070212',4.015],
['20070213',3.935],
['20070214',3.955],
['20070215',3.97],
['20070216',3.975],
['20070221',3.93],
['20070222',4.01],
['20070223',3.96],
['20070226',4.245],
['20070227',4.225],
['20070228',4.16],
['20070301',3.96],
['20070302',3.875],
['20070305',3.52],
['20070306',3.75],
['20070307',3.745],
['20070308',3.785],
['20070309',3.75],
['20070312',3.795],
['20070313',3.76],
['20070314',3.675],
['20070315',3.73],
['20070316',3.695],
['20070319',3.825],
['20070320',3.78],
['20070321',3.825],
['20070322',3.8],
['20070323',3.825],
['20070326',3.82],
['20070327',3.92],
['20070328',3.96],
['20070329',3.975],
['20070330',3.97],
['20070402',4.005],
['20070403',3.91],
['20070404',3.95],
['20070410',4],
['20070411',3.975],
['20070412',3.945],
['20070413',3.925],
['20070416',3.935],
['20070417',4],
['20070418',3.955],
['20070419',3.83],
['20070420',3.82],
['20070423',3.89],
['20070424',3.85],
['20070425',3.85],
['20070426',3.805],
['20070427',3.785],
['20070430',3.77],
['20070502',3.74],
['20070503',3.75],
['20070504',3.71],
['20070507',3.75],
['20070508',3.75],
['20070509',3.72],
['20070510',3.745],
['20070511',3.755],
['20070514',3.755],
['20070515',3.75],
['20070516',3.75],
['20070517',3.72],
['20070518',3.69],
['20070521',3.73],
['20070522',3.695],
['20070523',3.66],
['20070525',3.64],
['20070528',3.61],
['20070529',3.58],
['20070530',3.575],
['20070531',3.625],
['20070601',3.63],
['20070604',3.64],
['20070605',3.6],
['20070606',3.635],
['20070607',3.575],
['20070608',3.545],
['20070611',3.545],
['20070612',3.55],
['20070613',3.605],
['20070614',3.79],
['20070615',3.835],
['20070618',3.81],
['20070620',3.805],
['20070621',3.77],
['20070622',3.7],
['20070625',3.745],
['20070626',3.7],
['20070627',3.625],
['20070628',3.72],
['20070629',3.73],
['20070703',3.835],
['20070704',3.845],
['20070705',3.82],
['20070706',3.82],
['20070709',3.925],
['20070710',3.97],
['20070711',3.9],
['20070712',3.87],
['20070713',3.955],
['20070716',4],
['20070717',3.985],
['20070718',3.915],
['20070719',4.01],
['20070720',4.025],
['20070723',4.255],
['20070724',4.385],
['20070725',4.25],
['20070726',4.26],
['20070727',4.025],
['20070730',4.04],
['20070731',4.21],
['20070801',4.065],
['20070802',4.06],
['20070803',4.135],
['20070806',4.085],
['20070807',3.925],
['20070808',3.97],
['20070809',3.875],
['20070810',3.68],
['20070813',3.635],
['20070814',3.64],
['20070815',3.635],
['20070816',3.4],
['20070817',3.25],
['20070820',3.41],
['20070821',3.305],
['20070822',3.335],
['20070823',3.525],
['20070824',3.56],
['20070827',3.82],
['20070828',3.74],
['20070829',3.56],
['20070830',3.545],
['20070831',3.58],
['20070903',3.61],
['20070904',3.64],
['20070905',3.75],
['20070906',4.125],
['20070907',4.225],
['20070910',4.695],
['20070911',4.855],
['20070912',4.945],
['20070913',4.85],
['20070914',4.73],
['20070917',5.5375],
['20070918',5.75],
['20070919',6.1],
['20070920',7.05],
['20070921',7.4875],
['20070924',7.65],
['20070925',7.6375],
['20070927',7.25],
['20070928',7.675],
['20071002',8.2],
['20071003',7.7875],
['20071004',7.4],
['20071005',8.175],
['20071008',8.25],
['20071009',8.6],
['20071010',8.75],
['20071011',9.45],
['20071012',10.0375],
['20071015',10.45],
['20071016',10.2],
['20071017',9.875],
['20071018',10.0375],
['20071022',9.475],
['20071023',9.475],
['20071024',9.375],
['20071025',8.75],
['20071026',9.4375],
['20071029',9.875],
['20071030',9.625],
['20071031',9.4],
['20071101',9.7125],
['20071102',9.25],
['20071105',9.25],
['20071106',9.9],
['20071107',10.3125],
['20071108',10.0125],
['20071109',10.0125],
['20071112',9.3125],
['20071113',8.6375],
['20071114',9.075],
['20071115',8.975],
['20071116',8.55],
['20071119',8.175],
['20071120',7.8125],
['20071121',7.725],
['20071122',7.175],
['20071123',6.85],
['20071126',7.5625],
['20071127',7.5],
['20071128',7.4125],
['20071129',7.6625],
['20071130',7.675],
['20071203',7.375],
['20071204',7.275],
['20071205',7.5],
['20071206',7.3375],
['20071207',7.425],
['20071210',7.275],
['20071211',7.5625],
['20071212',7.325],
['20071213',7.2625],
['20071214',7.15],
['20071217',6.6],
['20071218',6.875],
['20071219',6.6375],
['20071220',6.7],
['20071221',6.825],
['20071224',7.25],
['20071227',7.65],
['20071228',7.675],
['20071231',8.1375],
['20080102',8.2375],
['20080103',8.65],
['20080104',9.3875],
['20080107',9.45],
['20080108',9.375],
['20080109',10.25],
['20080110',10.2375],
['20080111',10.0625],
['20080114',10.0875],
['20080115',10.1],
['20080116',9.4375],
['20080117',8.75],
['20080118',9.25],
['20080121',9.075],
['20080122',7.4],
['20080123',7.9875],
['20080124',8.15],
['20080125',8.725],
['20080128',8.5625],
['20080129',8.675],
['20080130',8.125],
['20080131',7.7125],
['20080201',8.15],
['20080204',8.25],
['20080205',7.95],
['20080206',7.3875],
['20080211',7.7],
['20080212',7.625],
['20080213',7.5],
['20080214',7.45],
['20080215',7.5375],
['20080218',7.4875],
['20080219',8.075],
['20080220',8.5],
['20080221',8.65],
['20080222',8.4625],
['20080225',8.4125],
['20080226',7.925],
['20080227',8.475],
['20080228',8.4875],
['20080229',8.95],
['20080303',9.1],
['20080304',8.825],
['20080305',8.25],
['20080306',8.7],
['20080307',8.575],
['20080310',8.1125],
['20080311',7.7],
['20080312',7.5],
['20080313',7.5],
['20080314',7.3],
['20080317',7.15],
['20080318',6.45],
['20080319',6.2125],
['20080320',5.35],
['20080325',5.5],
['20080326',5.5],
['20080327',5.7],
['20080328',6.4],
['20080331',6.3],
['20080401',5.975],
['20080402',5.75],
['20080403',5.8875],
['20080407',6.375],
['20080408',6.575],
['20080409',6.125],
['20080410',6.35],
['20080411',6.3875],
['20080414',6.3375],
['20080415',6.5],
['20080416',6.44],
['20080417',6.51],
['20080418',6.51],
['20080421',6.47],
['20080422',6.7],
['20080423',6.7],
['20080424',6.75],
['20080425',6.55],
['20080428',6.41],
['20080429',6.3],
['20080430',6.14],
['20080502',5.87],
['20080505',6.13],
['20080506',6.29],
['20080507',6.08],
['20080508',5.9],
['20080509',5.97],
['20080513',5.98],
['20080514',5.95],
['20080515',6],
['20080516',6],
['20080519',6.4],
['20080520',6.3],
['20080521',6.93],
['20080522',6.86],
['20080523',6.67],
['20080526',6.66],
['20080527',6.65],
['20080528',6.54],
['20080529',6.49],
['20080530',6.44],
['20080602',6.38],
['20080603',6.24],
['20080604',6.06],
['20080605',5.87],
['20080606',5.8],
['20080610',5.65],
['20080611',5.45],
['20080612',5.25],
['20080613',5.25],
['20080616',5.15],
['20080617',5.06],
['20080618',5.21],
['20080619',5.07],
['20080620',4.955],
['20080623',4.885],
['20080624',4.705],
['20080625',4.595],
['20080626',4.5],
['20080627',4.485],
['20080630',4.8],
['20080702',5.02],
['20080703',4.86],
['20080704',4.67],
['20080707',4.61],
['20080708',4.475],
['20080709',4.5],
['20080710',4.475],
['20080711',4.7],
['20080714',4.81],
['20080715',4.68],
['20080716',4.775],
['20080717',4.725],
['20080718',4.64],
['20080721',4.75],
['20080722',4.85],
['20080723',4.67],
['20080724',4.65],
['20080725',4.58],
['20080728',4.58],
['20080729',4.54],
['20080730',4.5],
['20080731',4.55],
['20080801',4.44],
['20080804',4.375],
['20080805',3.9],
['20080807',3.545],
['20080808',3.4],
['20080811',3.175],
['20080812',2.875],
['20080813',2.78],
['20080814',2.84],
['20080815',2.7],
['20080818',2.575],
['20080819',2.545],
['20080820',2.795],
['20080821',2.63],
['20080825',2.6],
['20080826',2.575],
['20080827',2.625],
['20080828',2.605],
['20080829',2.7],
['20080901',2.595],
['20080902',2.5],
['20080903',2.365],
['20080904',2.29],
['20080905',2.15],
['20080908',2.205],
['20080909',2.03],
['20080910',1.85],
['20080911',1.675],
['20080912',1.695],
['20080916',1.6],
['20080917',1.53],
['20080918',2.01],
['20080919',2.355],
['20080922',2.36],
['20080923',2.52],
['20080924',2.425],
['20080925',2.495],
['20080926',2.355],
['20080929',2.295],
['20080930',2.5],
['20081002',2.475],
['20081003',2.285],
['20081006',2.09],
['20081008',2.065],
['20081009',2.15],
['20081010',2],
['20081013',1.91],
['20081014',1.93],
['20081015',1.9],
['20081016',1.765],
['20081017',1.655],
['20081020',1.625],
['20081021',1.55],
['20081022',1.275],
['20081023',1.175],
['20081024',1.085],
['20081027',1.015],
['20081028',1.05],
['20081029',1.125],
['20081030',1.27],
['20081031',1.38],
['20081103',1.39],
['20081104',1.39],
['20081105',1.59],
['20081106',1.495],
['20081107',1.51],
['20081110',1.675],
['20081111',1.555],
['20081112',1.485],
['20081113',1.385],
['20081114',1.455],
['20081117',1.495],
['20081118',1.415],
['20081119',1.455],
['20081120',1.435],
['20081121',1.52],
['20081124',1.6],
['20081125',1.715],
['20081126',1.74],
['20081127',1.74],
['20081128',1.765],
['20081201',1.895],
['20081202',1.75],
['20081203',1.85],
['20081204',1.85],
['20081205',1.885],
['20081208',2.1],
['20081209',2.1],
['20081210',2.275],
['20081211',2.605],
['20081212',2.5],
['20081215',2.54],
['20081216',2.6],
['20081217',2.85],
['20081218',3.1],
['20081219',2.995],
['20081222',2.7],
['20081223',2.6],
['20081224',2.625],
['20081229',2.89],
['20081230',3.01],
['20081231',3],
['20090102',3.015],
['20090105',3.045],
['20090106',2.925],
['20090107',2.935],
['20090108',2.665],
['20090109',2.79],
['20090112',2.62],
['20090113',2.525],
['20090114',2.65],
['20090115',2.695],
['20090116',2.87],
['20090119',3.085],
['20090120',2.995],
['20090121',3.16],
['20090122',3.08],
['20090123',3.05],
['20090129',2.96],
['20090130',3.08],
['20090202',3.55],
['20090203',3.53],
['20090204',3.45],
['20090205',3.5],
['20090206',3.88],
['20090209',3.91],
['20090210',3.9],
['20090211',4.175],
['20090212',4.27],
['20090213',4.25],
['20090216',4.125],
['20090217',4.285],
['20090218',4.45],
['20090219',4.68],
['20090220',4.6],
['20090223',4.595],
['20090224',4.55],
['20090225',4.41],
['20090226',4.25],
['20090227',4.13],
['20090302',4.2],
['20090303',3.95],
['20090304',3.895],
['20090305',3.955],
['20090306',4.015],
['20090309',4.315],
['20090310',4.175],
['20090311',4.19],
['20090312',4.225],
['20090313',4.295],
['20090316',4.38],
['20090317',4.4],
['20090318',4.225],
['20090319',4.825],
['20090320',5.65],
['20090323',5.75],
['20090324',5.35],
['20090325',5.53],
['20090326',6],
['20090327',5.94],
['20090330',5.53],
['20090331',5.5],
['20090401',5.36],
['20090402',5.49],
['20090403',5.01],
['20090406',4.83],
['20090407',5.02],
['20090408',5.2],
['20090409',5.2],
['20090414',5.18],
['20090415',5.07],
['20090416',5],
['20090417',4.8],
['20090420',4.705],
['20090421',5.12],
['20090422',5.01],
['20090423',4.995],
['20090424',5.45],
['20090427',5.31],
['20090428',4.945],
['20090429',4.99],
['20090430',5.02],
['20090504',5.12],
['20090505',5.23],
['20090506',5.17],
['20090507',5.29],
['20090508',5.35],
['20090511',5.1],
['20090512',5.1],
['20090513',5.54],
['20090514',5.37],
['20090515',5.46],
['20090518',5.53],
['20090519',5.35],
['20090520',5.4],
['20090521',5.83],
['20090522',5.95],
['20090525',6.15],
['20090526',5.94],
['20090527',6.09],
['20090529',6.26],
['20090601',7],
['20090602',6.9],
['20090603',7.28],
['20090604',7.11],
['20090605',6.98],
['20090608',6.48],
['20090609',6.35],
['20090610',6.78],
['20090611',6.75],
['20090612',6.64],
['20090615',6.23],
['20090616',6.18],
['20090617',6.3],
['20090618',6.25],
['20090619',6.33],
['20090622',6.26],
['20090623',5.97],
['20090624',6.1],
['20090625',6.11],
['20090626',6.44],
['20090629',6.31],
['20090630',6.29],
['20090702',6.17],
['20090703',6.07],
['20090706',6],
['20090707',6.03],
['20090708',6.08],
['20090709',5.94],
['20090710',5.85],
['20090713',5.8],
['20090714',5.95],
['20090715',6.21],
['20090716',6.18],
['20090717',6.25],
['20090720',6.42],
['20090721',6.51],
['20090722',6.3],
['20090723',6.35],
['20090724',6.33],
['20090727',6.74],
['20090728',6.89],
['20090729',6.4],
['20090730',6.23],
['20090731',6.43],
['20090803',6.66],
['20090804',6.56],
['20090805',6.64],
['20090806',6.48],
['20090807',6.15],
['20090810',6.24],
['20090811',6.14],
['20090812',5.86],
['20090813',6],
['20090814',6],
['20090817',5.56],
['20090818',5.71],
['20090819',5.47],
['20090820',5.7],
['20090821',5.75],
['20090824',5.9],
['20090825',5.8],
['20090826',5.73],
['20090827',5.66],
['20090828',5.69],
['20090831',5.52],
['20090901',5.54],
['20090902',5.51],
['20090903',6],
['20090904',6.19],
['20090907',6.36],
['20090908',7.14],
['20090909',6.86],
['20090910',6.76],
['20090911',7.12],
['20090914',7.04],
['20090915',6.75],
['20090916',7.4],
['20090917',7.48],
['20090918',7.41],
['20090921',7.1],
['20090922',7.35],
['20090923',7.23],
['20090924',7.13],
['20090925',6.84],
['20090928',6.51],
['20090929',6.6],
['20090930',6.6],
['20091002',6.57],
['20091005',6.63],
['20091006',7.1],
['20091007',7.45],
['20091008',7.49],
['20091009',7.28],
['20091012',6.97],
['20091013',7.14],
['20091014',7.3],
['20091015',7.02],
['20091016',6.95],
['20091019',7.13],
['20091020',7.05],
['20091021',7.04],
['20091022',6.9],
['20091023',7.13],
['20091027',6.89],
['20091028',6.76],
['20091029',6.89],
['20091030',6.88],
['20091102',6.83],
['20091103',6.88],
['20091104',7.2],
['20091105',7.2],
['20091106',7.19],
['20091109',7.39],
['20091110',7.09],
['20091111',7.24],
['20091112',7.2],
['20091113',7.07],
['20091116',7.36],
['20091117',7.12],
['20091118',7.03],
['20091119',7.15],
['20091120',7.02],
['20091123',7.4],
['20091124',7.3],
['20091125',7.58],
['20091126',7.6],
['20091127',7.01],
['20091130',7.3],
['20091201',7.81],
['20091202',8.36],
['20091203',8.48],
['20091204',8.28],
['20091207',7.92],
['20091208',8.12],
['20091209',7.95],
['20091210',7.91],
['20091211',8.28],
['20091214',8.25],
['20091215',8.19],
['20091216',8.1],
['20091217',7.95],
['20091218',7.88],
['20091221',7.75],
['20091222',7.59],
['20091223',7.54],
['20091224',7.8],
['20091228',8],
['20091229',7.88],
['20091230',7.71],
['20091231',7.74],
['20100104',7.93],
['20100105',8.75],
['20100106',8.75],
['20100107',8.44],
['20100108',8.4],
['20100111',8.65],
['20100112',8.75],
['20100113',8.37],
['20100114',8.49],
['20100115',8.35],
['20100118',8.17],
['20100119',8.27],
['20100120',7.96],
['20100121',7.8],
['20100122',7.72],
['20100125',7.6],
['20100126',7.21],
['20100127',6.95],
['20100128',7.2],
['20100129',6.98],
['20100201',7],
['20100202',7.46],
['20100203',7.57],
['20100204',7.4],
['20100205',6.96],
['20100208',7],
['20100209',7.26],
['20100210',7.34],
['20100211',7.41],
['20100212',7.7],
['20100217',7.85],
['20100218',7.77],
['20100219',7.52],
['20100222',7.78],
['20100223',7.65],
['20100224',7.65],
['20100225',7.55],
['20100226',7.75],
['20100301',7.94],
['20100302',8.05],
['20100303',8.05],
['20100304',7.97],
['20100305',8.07],
['20100308',8.1],
['20100309',8],
['20100310',8],
['20100311',7.92],
['20100312',7.96],
['20100315',7.67],
['20100316',7.6],
['20100317',7.74],
['20100318',7.67],
['20100319',7.8],
['20100322',7.59],
['20100323',7.56],
['20100324',7.49],
['20100325',7.28],
['20100326',7.31],
['20100329',7.48],
['20100330',7.59],
['20100331',7.6],
['20100401',7.81],
['20100407',8.24],
['20100408',8.11],
['20100409',8.34],
['20100412',8.26],
['20100413',8.05],
['20100414',7.96],
['20100415',7.93],
['20100416',7.96],
['20100419',7.88],
['20100420',7.81],
['20100421',7.78],
['20100422',7.8],
['20100423',7.53],
['20100426',7.65],
['20100427',7.6],
['20100428',7.66],
['20100429',7.72],
['20100430',7.63],
['20100503',7.59],
['20100504',7.55],
['20100505',7.89],
['20100506',7.82],
['20100507',8.11],
['20100510',7.98],
['20100511',8.18],
['20100512',8.42],
['20100513',8.33],
['20100514',8.47],
['20100517',8.32],
['20100518',8.02],
['20100519',7.82],
['20100520',7.65],
['20100524',7.61],
['20100525',7.72],
['20100526',7.85],
['20100527',7.95],
['20100528',7.96],
['20100531',8.05],
['20100601',7.99],
['20100602',8.03],
['20100603',7.93],
['20100604',7.97],
['20100607',7.92],
['20100608',8.33],
['20100609',8.41],
['20100610',8.34],
['20100611',8.42],
['20100614',8.4],
['20100615',8.39],
['20100617',8.6],
['20100618',9.07],
['20100621',9.17],
['20100622',9.16],
['20100623',9.29],
['20100624',9.18],
['20100625',9.24],
['20100628',9.16],
['20100629',9.15],
['20100630',9.18],
['20100702',8.98],
['20100705',8.91],
['20100706',8.89],
['20100707',8.65],
['20100708',8.96],
['20100709',9],
['20100712',9.25],
['20100713',8.94],
['20100714',9.15],
['20100715',8.89],
['20100716',8.96],
['20100719',8.86],
['20100720',8.77],
['20100721',8.96],
['20100722',8.85],
['20100723',8.96],
['20100726',8.89],
['20100727',8.72],
['20100728',8.46],
['20100729',8.4],
['20100730',8.4],
['20100802',8.4],
['20100803',8.55],
['20100804',8.88],
['20100805',8.95],
['20100806',8.94],
['20100809',9.19],
['20100810',9.17],
['20100811',9.04],
['20100812',8.99],
['20100813',9.42],
['20100816',9.57],
['20100817',9.54],
['20100818',9.18],
['20100819',9.56],
['20100820',9.49],
['20100823',9.38],
['20100824',9.38],
['20100825',9.38],
['20100826',9.58],
['20100827',9.41],
['20100830',9.71],
['20100831',10.2],
['20100901',10.6],
['20100902',10.55],
['20100903',10.225],
['20100906',10.375],
['20100907',10.25],
['20100908',10.425],
['20100909',10.375],
['20100910',10.175],
['20100913',10.225],
['20100914',10.4],
['20100915',10.775],
['20100916',10.65],
['20100917',10.9],
['20100920',11.425],
['20100921',11.25],
['20100922',11.5],
['20100924',11.675],
['20100927',11.675],
['20100928',11.425],
['20100929',11.725],
['20100930',11.95],
['20101004',12.375],
['20101005',12.525],
['20101006',12.925],
['20101007',13.1],
['20101008',12.65],
['20101011',13],
['20101012',13],
['20101013',13],
['20101014',13.175],
['20101015',12.975],
['20101018',12.1],
['20101019',12.325],
['20101020',11.925],
['20101021',12.125],
['20101022',12.025],
['20101025',12.425],
['20101026',12.3],
['20101027',11.75],
['20101028',11.825],
['20101029',12.05],
['20101101',12.725],
['20101102',12.725],
['20101103',12.625],
['20101104',12.825],
['20101105',13.35],
['20101108',13.85],
['20101109',14.6],
['20101110',14.55],
['20101111',14.375],
['20101112',13.8],
['20101115',13.55],
['20101116',13.425],
['20101117',12.6],
['20101118',13.575],
['20101119',13.85],
['20101122',14],
['20101123',13.875],
['20101124',14.15],
['20101125',14.575],
['20101126',13.975],
['20101129',14.3],
['20101130',15.1],
['20101201',14.725],
['20101202',15.3],
['20101203',15.85],
['20101206',16.2],
['20101207',16.5],
['20101208',15.7],
['20101209',15.95],
['20101210',15.55],
['20101213',15.8],
['20101214',16.125],
['20101215',16.3],
['20101216',15.7],
['20101217',15.9],
['20101220',15.95],
['20101221',16.1],
['20101222',16.25],
['20101223',16.075],
['20101224',16],
['20101228',15.25],
['20101229',15.825],
['20101230',15.9],
['20101231',15.9],
['20110103',16.625],
['20110104',17.225],
['20110105',16.775],
['20110106',16.125],
['20110107',15.8],
['20110110',15.85],
['20110111',16.175],
['20110112',16.65],
['20110113',16.275],
['20110114',16.2],
['20110117',15.75],
['20110118',15.65],
['20110119',15.9],
['20110120',15.325],
['20110121',14.825],
['20110124',14.45],
['20110125',14.375],
['20110126',14.475],
['20110127',14.8],
['20110128',13.975],
['20110131',14.25],
['20110201',14.125],
['20110202',14.15],
['20110207',14.075],
['20110208',13.9],
['20110209',13.45],
['20110210',13.5],
['20110211',14.975],
['20110214',14.75],
['20110215',14.975],
['20110216',15.25],
['20110217',15.55],
['20110218',15.75],
['20110221',15.675],
['20110222',16.35],
['20110223',16.275],
['20110224',16.2],
['20110225',16.425],
['20110228',16.6],
['20110301',16.55],
['20110302',17.15],
['20110303',16.75],
['20110304',17.1],
['20110307',18],
['20110308',18.6],
['20110309',18.325],
['20110310',17.15],
['20110311',17],
['20110314',17.8],
['20110315',17.625],
['20110316',17.375],
['20110317',16.9],
['20110318',17.05],
['20110321',17.5],
['20110322',17.025],
['20110323',17.05],
['20110324',17.45],
['20110325',17.375],
['20110328',17.075],
['20110329',17.3],
['20110330',17.475],
['20110331',17.425],
['20110401',17.625],
['20110404',18.125],
['20110406',18.3],
['20110407',18.025],
['20110408',18.125],
['20110411',18.225],
['20110412',18.3],
['20110413',18.125],
['20110414',18],
['20110415',18.225],
['20110418',18.625],
['20110419',18.725],
['20110420',18.9],
['20110421',19.125],
['20110426',19],
['20110427',18.975],
['20110428',18.775],
['20110429',18],
['20110503',17.375],
['20110504',16.475],
['20110505',16.625],
['20110506',15.9],
['20110509',16.275],
['20110511',17.075],
['20110512',16.64],
['20110513',16.58],
['20110516',16.18],
['20110517',15.94],
['20110518',16.18],
['20110519',15.92],
['20110520',15.64],
['20110523',15.78],
['20110524',16.56],
['20110525',16.5],
['20110526',17.16],
['20110527',17.52],
['20110530',17.9],
['20110531',17.8],
['20110601',17.56],
['20110602',17.26],
['20110603',17.38],
['20110607',17.5],
['20110608',17.5],
['20110609',16.98],
['20110610',16.72],
['20110613',16.8],
['20110614',16.9],
['20110615',17.08],
['20110616',17.16],
['20110617',16.82],
['20110620',16.24],
['20110621',16.44],
['20110622',16.62],
['20110623',16.6],
['20110624',16.4],
['20110627',15.82],
['20110628',15.84],
['20110629',16],
['20110630',16],
['20110704',15.28],
['20110705',14.66],
['20110706',15.12],
['20110707',15.64],
['20110708',15.44],
['20110711',14.86],
['20110712',14.68],
['20110713',15.4],
['20110714',16.14],
['20110715',16.2],
['20110718',15.66],
['20110719',15.18],
['20110720',14.9],
['20110721',15.18],
['20110722',15],
['20110725',14.98],
['20110726',15.16],
['20110727',15.66],
['20110728',15.86],
['20110729',15.56],
['20110801',15.6],
['20110802',15.48],
['20110803',16.42],
['20110804',16.92],
['20110805',16.16],
['20110808',17.16],
['20110809',16.42],
['20110810',16.64],
['20110811',17.58],
['20110812',17.16],
['20110815',16.58],
['20110816',17],
['20110817',17],
['20110818',16.78],
['20110819',17.38],
['20110822',17.52],
['20110823',17.78],
['20110824',17.3],
['20110825',16.42],
['20110826',17],
['20110829',17.36],
['20110830',17.32],
['20110831',18.26],
['20110901',18.32],
['20110902',18.3],
['20110905',18.1],
['20110906',18.1],
['20110907',17.94],
['20110908',17.68],
['20110909',17.74],
['20110912',17.06],
['20110914',16.46],
['20110915',16.2],
['20110916',15.52],
['20110919',15.32],
['20110920',15.6],
['20110921',16.2],
['20110922',15.82],
['20110923',14.08],
['20110926',11.54],
['20110927',12.74],
['20110928',12.26],
['20110930',12.96],
['20111003',12.76],
['20111004',12.58],
['20111006',13.18],
['20111007',14.86],
['20111010',13.9],
['20111011',14.66],
['20111012',15.1],
['20111013',15.3],
['20111014',15.06],
['20111017',15.12],
['20111018',13.98],
['20111019',14.1],
['20111020',13],
['20111021',12.78],
['20111024',13.46],
['20111025',13.8],
['20111026',14.14],
['20111027',14.5],
['20111028',14.4],
['20111031',13.98],
['20111101',13.68],
['20111102',13.82],
['20111103',13.88],
['20111104',14.32],
['20111107',14.54],
['20111108',14.92],
['20111109',14.66],
['20111110',13.94],
['20111111',14.3],
['20111114',14.34],
['20111115',14.02],
['20111116',13.28],
['20111117',12.88],
['20111118',12.3],
['20111121',12.3],
['20111122',12.24],
['20111123',12.12],
['20111124',12.78],
['20111125',13.02],
['20111128',13.3],
['20111129',13.72],
['20111130',13.26],
['20111201',13.88],
['20111202',14],
['20111205',14.1],
['20111206',13.88],
['20111207',14.12],
['20111208',14.36],
['20111209',13.96],
['20111212',13.76],
['20111213',13.36],
['20111214',13.22],
['20111215',12.12],
['20111216',12.9],
['20111219',12.44],
['20111220',12.16],
['20111221',12.5],
['20111222',12.4],
['20111223',12.8],
['20111228',12.4],
['20111229',12.28],
['20111230',12.32],
['20120103',12.7],
['20120104',12.5],
['20120105',12.72],
['20120106',12.6],
['20120109',12.4],
['20120110',12.44],
['20120111',12.72],
['20120112',12.8],
['20120113',12.7],
['20120116',12.54],
['20120117',13.24],
['20120118',13.04],
['20120119',12.84],
['20120120',12.6],
['20120126',13.82],
['20120127',13.98],
['20120130',13.46],
['20120131',13.88],
['20120201',14.16],
['20120202',14.92],
['20120203',15.5],
['20120206',15.6],
['20120207',15.1],
['20120208',15.44],
['20120209',15.68],
['20120210',15.34],
['20120213',15.54],
['20120214',15.4],
['20120215',15.68],
['20120216',15.02],
['20120217',15.18],
['20120220',15.02],
['20120221',15.06],
['20120222',15],
['20120223',15.28],
['20120224',15.4],
['20120227',15.28],
['20120228',15.4],
['20120229',16],
['20120301',15.24],
['20120302',15.4],
['20120305',15.12],
['20120306',14.46],
['20120307',14.5],
['20120308',14.74],
['20120309',15.02],
['20120312',14.98],
['20120313',14.74],
['20120314',14.24],
['20120315',13.42],
['20120316',13.38],
['20120319',13.36],
['20120320',13.08],
['20120321',13.06],
['20120322',13.32],
['20120323',13.28],
['20120326',13.12],
['20120327',13.76],
['20120328',13],
['20120329',12.96],
['20120330',13.04],
['20120402',12.7],
['20120403',12.7],
['20120405',12.28],
['20120410',12.12],
['20120411',12.4],
['20120412',12.12],
['20120413',12.36],
['20120416',12.04],
['20120417',12.06],
['20120418',12.14],
['20120419',12.24],
['20120420',12.16],
['20120423',11.9],
['20120424',11.64],
['20120425',11.44],
['20120426',11.06],
['20120427',10.5],
['20120430',10.84],
['20120502',10.94],
['20120503',10.62],
['20120504',10.26],
['20120507',10.3],
['20120508',10.12],
['20120509',9.63],
['20120510',9.55],
['20120511',9.56],
['20120514',9.51],
['20120515',9.26],
['20120516',8.68],
['20120517',8.74],
['20120518',9.07],
['20120521',9.33],
['20120522',9.61],
['20120523',8.93],
['20120524',9.2],
['20120525',9.34],
['20120528',9.47],
['20120529',9.63],
['20120530',9.36],
['20120531',9.2],
['20120601',8.82],
['20120604',9.38],
['20120605',9.39],
['20120606',10.38],
['20120607',10.26],
['20120608',10.02],
['20120611',10.46],
['20120612',10.82],
['20120613',10.94],
['20120614',10.78],
['20120615',10.94],
['20120618',10.92],
['20120619',11.04],
['20120620',11],
['20120621',10.28],
['20120622',10],
['20120625',9.9],
['20120626',9.84],
['20120627',9.68],
['20120628',9.59],
['20120629',10.1],
['20120703',10.28],
['20120704',10.6],
['20120705',10.78],
['20120706',10.7],
['20120709',10.18],
['20120710',9.94],
['20120711',9.91],
['20120712',9.51],
['20120713',9.29],
['20120716',9.66],
['20120717',9.93],
['20120718',9.71],
['20120719',9.72],
['20120720',9.75],
['20120723',9.25],
['20120724',9.08],
['20120725',8.87],
['20120726',9.12],
['20120727',9.58],
['20120730',9.88],
['20120731',9.7],
['20120801',9.67],
['20120802',9.52],
['20120803',9.45],
['20120806',9.9],
['20120807',10.28],
['20120808',9.99],
['20120809',10.16],
['20120810',9.36],
['20120813',9.46],
['20120814',9.4],
['20120815',9.25],
['20120816',9.25],
['20120817',9.3],
['20120820',9.41],
['20120821',9.6],
['20120822',10],
['20120823',10.96],
['20120824',11.1],
['20120827',11.02],
['20120828',10.64],
['20120829',10.38],
['20120830',10.44],
['20120831',10.28],
['20120903',11.08],
['20120904',10.98],
['20120905',10.94],
['20120906',11.54],
['20120907',11.34],
['20120910',11.78],
['20120911',11.8],
['20120912',11.78],
['20120913',11.58],
['20120914',13.38],
['20120917',13.64],
['20120918',13.06],
['20120919',13.66],
['20120920',13.4],
['20120921',13.64],
['20120924',13.68],
['20120925',14.04],
['20120926',13.7],
['20120927',13.84],
['20120928',14.02],
['20121003',14.1],
['20121004',14.16],
['20121005',14.4],
['20121008',13.6],
['20121009',13.6],
['20121010',13.7],
['20121011',13.9],
['20121012',14.08],
['20121015',13.56],
['20121016',13.76],
['20121017',13.72],
['20121018',13.66],
['20121019',13.52],
['20121022',13.3],
['20121024',13.12],
['20121025',12.8],
['20121026',12.34],
['20121029',13.08],
['20121030',12.8],
['20121031',13],
['20121101',13.28],
['20121102',13.2],
['20121105',12.78],
['20121106',12.9],
['20121107',13.7],
['20121108',13.36],
['20121109',13.48],
['20121112',13.44],
['20121113',13.36],
['20121114',13.48],
['20121115',13.3],
['20121116',13.04],
['20121119',13.24],
['20121120',13.04],
['20121121',13.2],
['20121122',13.14],
['20121123',13.26],
['20121126',13.46],
['20121127',13.58],
['20121128',13.24],
['20121129',13.06],
['20121130',13.1],
['20121203',13.18],
['20121204',13.18],
['20121205',13.3],
['20121206',12.8],
['20121207',12.82],
['20121210',12.98],
['20121211',12.82],
['20121212',12.94],
['20121213',12.66],
['20121214',12.52],
['20121217',12.22],
['20121218',12.58],
['20121219',12.38],
['20121220',12.38],
['20121221',11.96],
['20121224',12.06],
['20121227',12.14],
['20121228',12.2],
['20121231',12.1],
['20130102',12.66],
['20130103',12.88],
['20130104',12.22],
['20130107',12],
['20130108',12],
['20130109',11.86],
['20130110',11.62],
['20130111',11.78],
['20130114',11.62],
['20130115',11.68],
['20130116',12.14],
['20130117',11.94],
['20130118',12.1],
['20130121',12.08],
['20130122',12.1],
['20130123',12.26],
['20130124',12.04],
['20130125',11.6],
['20130128',11.56],
['20130129',11.7],
['20130130',11.6],
['20130131',11.7],
['20130201',11.52],
['20130204',11.5],
['20130205',11.38],
['20130206',11.36],
['20130207',11.26],
['20130208',11.22],
['20130214',11.14],
['20130215',11],
['20130218',10.64],
['20130219',10.54],
['20130220',10.46],
['20130221',10.04],
['20130222',10.42],
['20130225',10.18],
['20130226',10.36],
['20130227',10.4],
['20130228',10.2],
['20130301',10.04],
['20130304',10.14],
['20130305',10.18],
['20130306',10],
['20130307',10.24],
['20130308',10.12],
['20130311',10.3],
['20130312',10.2],
['20130313',10.34],
['20130314',10.06],
['20130315',10.04],
['20130318',10.16],
['20130319',10.6],
['20130320',11.08],
['20130321',10.76],
['20130322',10.98],
['20130325',10.4],
['20130326',10.58],
['20130327',10.48],
['20130328',10.42],
['20130402',10.24],
['20130403',9.79],
['20130405',9.29],
['20130408',9.62],
['20130409',9.46],
['20130410',9.92],
['20130411',9.74],
['20130412',9.52],
['20130415',8.6],
['20130416',8.31],
['20130417',8.23],
['20130418',8.13],
['20130419',8.4],
['20130422',8.35],
['20130423',8.19],
['20130424',8.4],
['20130425',8.75],
['20130426',8.74],
['20130429',8.6],
['20130430',8.62],
['20130502',8.43],
['20130503',8.38],
['20130506',8.48],
['20130507',8.43],
['20130508',8.53],
['20130509',8.68],
['20130510',8.5],
['20130513',8.26],
['20130514',8.07],
['20130515',7.96],
['20130516',7.66],
['20130520',7.33],
['20130521',7.52],
['20130522',7.55],
['20130523',7.46],
['20130524',7.48],
['20130527',7.58],
['20130528',7.77],
['20130529',7.38],
['20130530',7.38],
['20130531',7.7],
['20130603',7.52],
['20130604',7.62],
['20130605',7.44],
['20130606',7.31],
['20130607',7.33],
['20130610',7.05],
['20130611',6.76],
['20130613',6.76],
['20130614',6.78],
['20130617',6.66],
['20130618',6.61],
['20130619',6.44],
['20130620',6.09],
['20130621',5.92],
['20130624',5.56],
['20130625',5.15],
['20130626',5.09],
['20130627',5.11],
['20130628',4.99],
['20130702',5.1],
['20130703',4.8],
['20130704',4.97],
['20130705',4.85],
['20130708',4.62],
['20130709',4.61],
['20130710',4.57],
['20130711',5.02],
['20130712',4.85],
['20130715',4.85],
['20130716',4.84],
['20130717',4.82],
['20130718',4.79],
['20130719',4.87],
['20130722',5.09],
['20130723',5.3],
['20130724',5.68],
['20130725',5.48],
['20130726',5.55],
['20130729',5.27],
['20130730',5.18],
['20130731',5.24],
['20130801',5.2],
['20130802',5.1],
['20130805',5.12],
['20130806',4.96],
['20130807',4.95],
['20130808',5.03],
['20130809',5.17],
['20130812',6.07],
['20130813',6.34],
['20130815',6.57],
['20130816',6.72],
['20130819',6.68],
['20130820',6.12],
['20130821',6.27],
['20130822',6.4],
['20130823',6.51],
['20130826',7.02],
['20130827',6.98],
['20130828',7.33],
['20130829',7.25],
['20130830',7.34],
['20130902',7.19],
['20130903',7.1],
['20130904',7.5],
['20130905',7.36],
['20130906',7.15],
['20130909',7.25],
['20130910',7.14],
['20130911',6.97],
['20130912',6.75],
['20130913',6.45],
['20130916',6.82],
['20130917',6.72],
['20130918',6.61],
['20130919',7.61],
['20130923',7.05],
['20130924',6.99],
['20130925',7.17],
['20130926',7.05],
['20130927',6.93],
['20130930',6.9],
['20131002',6.72],
['20131003',6.81],
['20131004',6.76],
['20131007',6.76],
['20131008',6.74],
['20131009',6.65],
['20131010',6.62],
['20131011',6.62],
['20131015',6.5],
['20131016',6.44],
['20131017',6.44],
['20131018',6.47],
['20131021',6.46],
['20131022',6.4],
['20131023',6.44],
['20131024',6.26],
['20131025',6.29],
['20131028',6.32],
['20131029',6.36],
['20131030',6.38],
['20131031',6.25],
['20131101',6.23],
['20131104',6.19],
['20131105',6.11],
['20131106',5.84],
['20131107',5.9],
['20131108',5.71],
['20131111',5.49],
['20131112',5.29],
['20131113',5.19],
['20131114',5.19],
['20131115',5.56],
['20131118',5.47],
['20131119',5.3],
['20131120',5.41],
['20131121',5.32],
['20131122',5.35],
['20131125',5.25],
['20131126',5.2],
['20131127',5.15],
['20131128',5.03],
['20131129',5.11],
['20131202',5.13],
['20131203',4.87],
['20131204',4.91],
['20131205',4.98],
['20131206',4.88],
['20131209',4.85],
['20131210',4.885],
['20131211',4.86],
['20131212',4.81],
['20131213',4.69],
['20131216',4.63],
['20131217',4.57],
['20131218',4.55],
['20131219',4.5],
['20131220',4.31],
['20131223',4.46],
['20131224',4.48],
['20131227',4.54],
['20131230',4.45],
['20131231',4.42],
['20140102',4.55],
['20140103',4.67],
['20140106',4.61],
['20140107',4.6],
['20140108',4.48],
['20140109',4.49],
['20140110',4.57],
['20140113',4.51],
['20140114',4.55],
['20140115',4.52],
['20140116',4.54],
['20140117',4.49],
['20140120',4.66],
['20140121',4.63],
['20140122',4.69],
['20140123',4.55],
['20140124',4.65],
['20140127',4.78],
['20140128',4.59],
['20140129',4.55],
['20140130',4.63],
['20140204',4.46],
['20140205',4.4],
['20140206',4.43],
['20140207',4.54],
['20140210',4.5],
['20140211',5.05],
['20140212',5.15],
['20140213',5.1],
['20140214',5.43],
['20140217',5.55],
['20140218',5.3],
['20140219',5.48],
['20140220',5.29],
['20140221',5.24],
['20140224',5.29],
['20140225',5.26],
['20140226',5.4],
['20140227',5.12],
['20140228',5.14],
['20140303',5.46],
['20140304',5.32],
['20140305',5.22],
['20140306',5.3],
['20140307',5.36],
['20140310',5.14],
['20140311',5.14],
['20140312',5.26],
['20140313',5.51],
['20140314',5.49],
['20140317',5.44],
['20140318',5.1],
['20140319',4.95],
['20140320',4.83],
['20140321',4.71],
['20140324',4.75],
['20140325',4.7],
['20140326',4.67],
['20140327',4.57],
['20140328',4.67],
['20140331',4.61],
['20140401',4.6],
['20140402',4.75],
['20140403',4.81],
['20140404',4.76],
['20140407',4.74],
['20140408',4.82],
['20140409',4.84],
['20140410',5.11],
['20140411',4.95],
['20140414',5.06],
['20140415',4.85],
['20140416',4.78],
['20140417',4.83],
['20140422',4.77],
['20140423',4.74],
['20140424',4.75],
['20140425',4.81],
['20140428',4.74],
['20140429',4.71],
['20140430',4.66],
['20140502',4.57],
['20140505',4.56],
['20140507',4.52],
['20140508',4.42],
['20140509',4.42],
['20140512',4.39],
['20140513',4.44],
['20140514',4.38],
['20140515',4.4],
['20140516',4.35],
['20140519',4.31],
['20140520',4.25],
['20140521',4.25],
['20140522',4.34],
['20140523',4.31],
['20140526',4.28],
['20140527',4.3],
['20140528',4.15],
['20140529',4.17],
['20140530',4.2],
['20140603',4.17],
['20140604',4.18],
['20140605',4.17],
['20140606',4.19],
['20140609',4.19],
['20140610',4.23],
['20140611',4.26],
['20140612',4.3],
['20140613',4.34],
['20140616',4.44],
['20140617',4.31],
['20140618',4.25],
['20140619',4.26],
['20140620',4.51],
['20140623',4.43],
['20140624',4.43],
['20140625',4.31],
['20140626',4.31],
['20140627',4.38],
['20140630',4.44],
['20140702',4.47],
['20140703',4.73],
['20140704',4.76],
['20140707',4.75],
['20140708',4.64],
['20140709',4.72],
['20140710',4.81],
['20140711',4.91],
['20140714',4.84],
['20140715',4.86],
['20140716',4.73],
['20140717',4.72],
['20140718',4.74],
['20140721',4.7],
['20140722',4.75],
['20140723',4.84],
['20140724',4.8],
['20140725',4.76],
['20140728',4.91],
['20140729',4.85],
['20140730',4.78],
['20140731',4.87],
['20140801',4.8],
['20140804',4.87],
['20140805',4.87],
['20140806',4.92],
['20140807',5.11],
['20140808',5.29],
['20140811',5.24],
['20140812',5.19],
['20140813',5.22],
['20140814',5.19],
['20140815',5.11],
['20140818',5.1],
['20140819',4.99],
['20140820',4.89],
['20140821',4.9],
['20140822',4.9],
['20140825',4.9],
['20140826',4.96],
['20140827',4.88],
['20140828',4.84],
['20140829',4.81],
['20140901',4.83],
['20140902',4.78],
['20140903',4.75],
['20140904',4.76],
['20140905',4.75],
['20140908',4.75],
['20140910',4.72],
['20140911',4.68],
['20140912',4.6],
['20140915',4.59],
['20140916',4.54],
['20140917',4.5],
['20140918',4.44],
['20140919',4.52],
['20140922',4.45],
['20140923',4.49],
['20140924',4.48],
['20140925',4.46],
['20140926',4.39],
['20140929',4.36],
['20140930',4.3],
['20141003',4.33],
['20141006',4.37],
['20141007',4.44],
['20141008',4.35],
['20141009',4.48],
['20141010',4.39],
['20141013',4.34],
['20141014',4.39],
['20141015',4.36],
['20141016',4.36],
['20141017',4.37],
['20141020',4.29],
['20141021',4.26],
['20141022',4.3],
['20141023',4.28],
['20141024',4.2],
['20141027',4.16],
['20141028',4.16],
['20141029',4.23],
['20141030',4.2],
['20141031',4.12],
['20141103',3.99],
['20141104',4],
['20141105',3.93],
['20141106',3.86],
['20141107',3.89],
['20141110',3.98],
['20141111',3.92],
['20141112',3.97],
['20141113',3.95],
['20141114',4],
['20141117',4.08],
['20141118',4.01],
['20141119',4.04],
['20141120',3.97],
['20141121',3.99],
['20141124',4.1],
['20141125',4.11],
['20141126',4.16],
['20141127',4.1],
['20141128',4.06],
['20141201',3.91],
['20141202',3.97],
['20141203',3.9],
['20141204',3.97],
['20141205',3.96],
['20141208',3.89],
['20141209',3.88],
['20141210',4.01],
['20141211',3.99],
['20141212',3.98],
['20141215',4.04],
['20141216',3.93],
['20141217',3.85],
['20141218',3.89],
['20141219',3.94],
['20141222',3.92],
['20141223',3.78],
['20141224',3.76],
['20141229',3.85],
['20141230',3.81],
['20141231',3.91],
['20150102',3.9],
['20150105',4.01],
['20150106',4.16],
['20150107',4.3],
['20150108',4.28],
['20150109',4.28],
['20150112',4.31],
['20150113',4.48],
['20150114',4.34],
['20150115',4.27],
['20150116',4.44],
['20150119',4.53],
['20150120',4.54],
['20150121',4.81],
['20150122',4.88],
['20150123',4.8],
['20150126',4.83],
['20150127',4.62],
['20150128',4.57],
['20150129',4.58],
['20150130',4.67],
['20150202',4.82],
['20150203',4.8],
['20150204',4.71],
['20150205',4.81],
['20150206',4.79],
['20150209',4.65],
['20150210',4.67],
['20150211',4.65],
['20150212',4.51],
['20150213',4.64],
['20150216',4.68],
['20150217',4.64],
['20150218',4.63],
['20150223',4.66],
['20150224',4.66],
['20150225',4.67],
['20150226',4.68],
['20150227',4.7],
['20150302',4.71],
['20150303',4.65],
['20150304',4.62],
['20150305',4.58],
['20150306',4.51],
['20150309',4.28],
['20150310',4.2],
['20150311',4.06],
['20150312',4.15],
['20150313',4.09],
['20150316',4.1],
['20150317',4.11],
['20150318',4.14],
['20150319',4.23],
['20150320',4.29],
['20150323',4.16],
['20150324',4.11],
['20150325',4.13],
['20150326',4.18],
['20150327',4.11],
['20150330',4.13],
['20150331',4.14],
['20150401',4.17],
['20150402',4.28],
['20150408',4.56],
['20150409',4.53],
['20150410',4.55],
['20150413',4.76],
['20150414',4.65],
['20150415',4.65],
['20150416',4.8],
['20150417',4.75],
['20150420',4.56],
['20150421',4.58],
['20150422',4.63],
['20150423',4.64],
['20150424',4.76],
['20150427',4.8],
['20150428',5.05],
['20150429',5.67],
['20150430',5.54],
['20150504',5.76],
['20150505',5.66],
['20150506',5.45],
['20150507',5.13],
['20150508',5.23],
['20150511',5.15],
['20150512',5.11],
['20150513',5.25],
['20150514',5.37],
['20150515',5.35],
['20150518',5.62],
['20150519',5.52],
['20150520',5.29],
['20150521',5.34],
['20150522',5.54],
['20150526',5.67],
['20150527',5.86],
['20150528',5.98],
['20150529',6.46],
['20150601',5.95],
['20150602',5.65],
['20150603',5.78],
['20150604',5.76],
['20150605',5.75],
['20150608',5.61],
['20150609',5.44],
['20150610',5.45],
['20150611',5.36],
['20150612',5.26],
['20150615',5.21],
['20150616',5.15],
['20150617',5.24],
['20150618',5.26],
['20150619',5.32],
['20150622',5.25],
['20150623',5.27],
['20150624',5.26],
['20150625',5.19],
['20150626',5.05],
['20150629',4.78],
['20150630',4.85],
['20150702',4.48],
['20150703',4.26],
['20150706',4.08],
['20150707',3.94],
['20150708',3.5],
['20150709',4.03],
['20150710',4.2],
['20150713',4.27],
['20150714',4.13],
['20150715',3.91],
['20150716',3.92],
['20150717',3.94],
['20150720',3.77],
['20150721',3.7],
['20150722',3.7],
['20150723',3.76],
['20150724',3.78],
['20150727',3.77],
['20150728',3.67],
['20150729',3.72],
['20150730',3.63],
['20150731',3.62],
['20150803',3.53],
['20150804',3.49],
['20150805',3.47],
['20150806',3.6],
['20150807',3.66],
['20150810',3.71],
['20150811',3.87],
['20150812',3.95],
['20150813',4.27],
['20150814',4.02],
['20150817',3.98],
['20150818',4.01],
['20150819',4.24],
['20150820',4.1],
['20150821',4.32],
['20150824',4],
['20150825',3.83],
['20150826',3.87],
['20150827',3.84],
['20150828',4],
['20150831',3.86],
['20150901',3.76],
['20150902',3.7],
['20150904',3.85],
['20150907',4],
['20150908',3.99],
['20150909',3.99],
['20150910',4.01],
['20150911',4.06],
['20150914',4],
['20150915',4],
['20150916',4.09],
['20150917',4.15],
['20150918',4.52],
['20150921',4.47],
['20150922',4.28],
['20150923',4.12],
['20150924',4.13],
['20150925',4.16],
['20150929',4.05],
['20150930',4.05],
['20151002',4.07],
['20151005',4.16],
['20151006',4.14],
['20151007',4.32],
['20151008',4.25],
['20151009',4.28],
['20151012',4.68],
['20151013',4.66],
['20151014',4.65],
['20151015',5.05],
['20151016',4.92],
['20151019',4.8],
['20151020',4.61],
['20151022',4.61],
['20151023',4.68],
['20151026',4.55],
['20151027',4.46],
['20151028',4.65],
['20151029',4.53],
['20151030',4.4],
['20151102',4.35],
['20151103',4.32],
['20151104',4.49],
['20151105',4.42],
['20151106',4.46],
['20151109',4.3],
['20151110',4.52],
['20151111',4.35],
['20151112',4.39],
['20151113',4.4],
['20151116',4.32],
['20151117',4.34],
['20151118',4.22],
['20151119',4.19],
['20151120',4.3],
['20151123',4.23],
['20151124',4.2],
['20151125',4.33],
['20151126',4.24],
['20151127',4.2],
['20151130',4.12],
['20151201',4.2],
['20151202',4.15],
['20151203',4.14],
['20151204',4.12],
['20151207',4.27],
['20151208',4.18],
['20151209',4.28],
['20151210',4.22],
['20151211',4.04],
['20151214',4.12],
['20151215',4.08],
['20151216',4.2],
['20151217',4.28],
['20151218',4.07],
['20151221',4.2],
['20151222',4.22],
['20151223',4.23],
['20151224',4.28],
['20151228',4.29],
['20151229',4.34],
['20151230',4.32],
['20151231',4.38],
['20160104',4.21],
['20160105',4.32],
['20160106',4.5],
['20160107',4.46],
['20160108',4.56],
['20160111',4.47],
['20160112',4.47],
['20160113',4.48],
['20160114',4.58],
['20160115',4.52],
['20160118',4.49],
['20160119',4.8],
['20160120',4.59],
['20160121',4.61],
['20160122',4.69],
['20160125',4.73],
['20160126',4.5],
['20160127',4.5],
['20160128',4.58],
['20160129',4.39],
['20160201',4.25],
['20160202',4.17],
['20160203',4.07],
['20160204',4.21],
['20160205',4.62],
['20160211',5.08],
['20160212',5.32],
['20160215',5.25],
['20160216',5.16],
['20160217',5.29],
['20160218',5.21],
['20160219',5.69],
['20160222',5.71],
['20160223',6.16],
['20160224',6.1],
['20160225',6.25],
['20160226',6.13],
['20160229',6.11],
['20160301',6.6],
['20160302',6.39],
['20160303',6.48],
['20160304',6.82],
['20160307',6.54],
['20160308',6.51],
['20160309',6.17],
['20160310',6.07],
['20160311',6.59],
['20160314',6.39],
['20160315',6.12],
['20160316',5.95],
['20160317',6.35],
['20160318',6.55],
['20160321',6.29],
['20160322',6.45],
['20160323',6.18],
['20160324',6.04],
['20160329',5.98],
['20160330',6.17],
['20160331',6.02],
['20160401',6.02],
['20160405',5.83],
['20160406',5.71],
['20160407',5.82],
['20160408',5.9],
['20160411',6.41],
['20160412',6.42],
['20160413',6.29],
['20160414',6.2],
['20160415',6.11],
['20160418',6.19],
['20160419',6.51],
['20160420',6.38],
['20160421',6.49],
['20160422',6.25],
['20160425',6.06],
['20160426',6.07],
['20160427',6.25],
['20160428',6.4],
['20160429',6.63],
['20160503',7.03],
['20160504',6.76],
['20160505',6.84],
['20160506',6.86],
['20160509',6.98],
['20160510',7],
['20160511',7.16],
['20160512',6.95],
['20160513',7.05],
['20160516',7.17],
['20160517',7.2],
['20160518',7.14],
['20160519',6.64],
['20160520',6.64],
['20160523',6.67],
['20160524',6.55],
['20160525',6.37],
['20160526',6.52],
['20160527',6.46],
['20160530',6.18],
['20160531',6.18],
['20160601',6.38],
['20160602',6.33],
['20160603',6.6],
['20160606',6.96],
['20160607',7.04],
['20160608',7],
['20160610',7.17],
['20160613',7.4],
['20160614',7.35],
['20160615',7.4],
['20160616',7.96],
['20160617',7.73],
['20160620',7.27],
['20160621',7.33],
['20160622',7.31],
['20160623',7.42],
['20160624',7.95],
['20160627',8.24],
['20160628',7.93],
['20160629',8.22],
['20160630',8.19],
['20160704',8.56],
['20160705',8.46],
['20160706',9.17],
['20160707',9.08],
['20160708',9.31],
['20160711',9.62],
['20160712',9.5],
['20160713',9.36],
['20160714',9.27],
['20160715',9.14],
['20160718',8.84],
['20160719',8.91],
['20160720',8.78],
['20160721',8.55],
['20160722',8.66],
['20160725',8.51],
['20160726',8.62],
['20160727',8.48],
['20160728',9.06],
['20160729',8.9],
['20160801',9.18],
['20160803',9.44],
['20160804',9.14],
['20160805',9.38],
['20160808',9.01],
['20160809',8.89],
['20160810',9.21],
['20160811',9.12],
['20160812',9.03],
['20160815',8.98],
['20160816',9.01],
['20160817',8.71],
['20160818',8.61],
['20160819',8.59],
['20160822',8.42],
['20160823',8.73],
['20160824',8.61],
['20160825',8.39],
['20160826',8.56],
['20160829',8.21],
['20160830',8.34],
['20160831',7.94],
['20160901',7.81],
['20160902',8.12],
['20160905',8.37],
['20160906',8.37],
['20160907',8.45],
['20160908',8.19],
['20160909',8.01],
['20160912',8.13],
['20160913',7.95],
['20160914',7.99],
['20160915',8.1],
['20160919',8.18],
['20160920',8.18],
['20160921',8.28],
['20160922',8.3],
['20160923',8.16],
['20160926',8.04],
['20160927',8.01],
['20160928',7.75],
['20160929',7.8],
['20160930',7.83],
['20161003',7.86],
['20161004',7.86],
['20161005',7.5],
['20161006',7.5],
['20161007',7.42],
['20161011',7.37],
['20161012',7.28],
['20161013',7.38],
['20161014',7.46],
['20161017',7.36],
['20161018',7.4],
['20161019',7.5],
['20161020',7.77],
['20161024',7.62],
['20161025',7.49],
['20161026',7.78],
['20161027',7.66],
['20161028',7.54],
['20161031',7.84],
['20161101',7.87],
['20161102',8.45],
['20161103',8.45],
['20161104',8.39],
['20161107',8.07],
['20161108',8.09],
['20161109',8.74],
['20161110',8.12],
['20161111',7.86],
['20161114',7.24],
['20161115',7.23],
['20161116',7.36],
['20161117',7.2],
['20161118',6.92],
['20161121',7.03],
['20161122',7.09],
['20161123',7.11],
['20161124',6.82],
['20161125',6.93],
['20161128',6.96],
['20161129',7.03],
['20161130',7.06],
['20161201',7.05],
['20161202',7.03],
['20161205',6.85],
['20161206',6.9],
['20161207',6.97],
['20161208',7.09],
['20161209',6.92],
['20161212',6.65],
['20161213',6.75],
['20161214',6.95],
['20161215',6.69],
['20161216',6.29],
['20161219',6.2],
['20161220',6.16],
['20161221',6.19],
['20161222',6.08],
['20161223',6.19],
['20161228',6.3],
['20161229',6.34],
['20161230',6.67],
['20170103',6.73],
['20170104',6.84],
['20170105',6.97],
['20170106',6.99],
['20170109',6.86],
['20170110',7.07],
['20170111',7.15],
['20170112',7.24],
['20170113',7.13],
['20170116',7.19],
['20170117',7.35],
['20170118',7.28],
['20170119',7.14],
['20170120',7.18],
['20170123',7.2],
['20170124',7.21],
['20170125',7.11],
['20170126',6.87],
['20170127',6.85],
['20170201',7.03],
['20170202',7.12],
['20170203',7.42],
['20170206',7.57],
['20170207',7.91],
['20170208',7.76],
['20170209',7.9],
['20170210',7.57],
['20170213',7.74],
['20170214',7.59],
['20170215',7.51],
['20170216',7.56],
['20170217',7.65],
['20170220',7.68],
['20170221',7.58],
['20170222',7.57],
['20170223',7.49],
['20170224',7.8],
['20170227',7.72],
['20170228',7.6],
['20170301',7.54],
['20170302',7.54],
['20170303',7.36],
['20170306',7.44],
['20170307',7.39],
['20170308',7.33],
['20170309',7.17],
['20170310',7.03],
['20170313',7.3],
['20170314',7.25],
['20170315',7.25],
['20170316',7.55],
['20170317',7.51],
['20170320',7.53],
['20170321',7.39],
['20170322',7.56],
['20170323',7.61],
['20170324',7.6],
['20170327',7.7],
['20170328',7.5],
['20170329',7.1],
['20170330',6.96],
['20170331',6.8],
['20170403',6.91],
['20170405',6.94],
['20170406',6.94],
['20170407',7.06],
['20170410',6.97],
['20170411',7.13],
['20170412',7.22],
['20170413',7.34],
['20170418',7.24],
['20170419',7.19],
['20170420',7.01],
['20170421',7.02],
['20170424',6.9],
['20170425',6.96],
['20170426',6.79],
['20170427',6.82],
['20170428',6.76],
['20170502',6.57],
['20170504',6.46],
['20170505',6.35],
['20170508',6.46],
['20170509',6.43],
['20170510',6.33],
['20170511',6.41],
['20170512',6.35],
['20170515',6.44],
['20170516',6.48],
['20170517',6.68],
['20170518',6.72],
['20170519',6.6],
['20170522',6.62],
['20170523',6.65],
['20170524',6.58],
['20170525',6.54],
['20170526',6.53],
['20170529',6.52],
['20170531',6.37],
['20170601',6.42],
['20170602',6.42],
['20170605',6.46],
['20170606',6.71],
['20170607',6.82],
['20170608',6.76],
['20170609',6.59],
['20170612',6.5],
['20170613',6.49],
['20170614',6.48],
['20170615',6.44],
['20170616',6.36],
['20170619',6.32],
['20170620',6.27],
['20170621',6.3],
['20170622',6.42],
['20170623',6.42],
['20170626',6.4],
['20170627',6.43],
['20170628',6.34],
['20170629',6.34],
['20170630',6.39],
['20170703',6.31],
['20170704',6.28],
['20170705',6.22],
['20170706',6.2],
['20170707',6.18],
['20170710',6.15],
['20170711',6.21],
['20170712',6.18],
['20170713',6.27],
['20170714',6.27],
['20170717',6.21],
['20170718',6.39],
['20170719',6.42],
['20170720',6.32],
['20170721',6.3],
['20170724',6.32],
['20170725',6.28],
['20170726',6.28],
['20170727',6.31],
['20170728',6.26],
['20170731',6.41],
['20170801',6.38],
['20170802',6.45],
['20170803',6.41],
['20170804',6.62],
['20170807',6.5],
['20170808',6.5],
['20170809',6.59],
['20170810',6.69],
['20170811',6.48],
['20170814',6.54],
['20170815',6.4],
['20170816',6.39],
['20170817',6.42],
['20170818',6.48],
['20170821',6.38],
['20170822',6.4],
['20170824',6.41],
['20170825',6.42],
['20170828',6.63],
['20170829',6.77],
['20170830',6.7],
['20170831',6.8],
['20170901',6.83],
['20170904',7.13],
['20170905',7.12],
['20170906',7.2],
['20170907',7.12],
['20170908',7.28],
['20170911',7],
['20170912',6.76],
['20170913',6.79],
['20170914',6.78],
['20170915',6.84],
['20170918',6.63],
['20170919',6.71],
['20170920',6.8],
['20170921',6.55],
['20170922',6.7],
['20170925',6.66],
['20170926',6.66],
['20170927',6.66],
['20170928',6.55],
['20170929',6.6],
['20171003',6.59],
['20171004',6.67],
['20171006',6.59],
['20171009',6.65],
['20171010',6.63],
['20171011',6.68],
['20171012',6.67],
['20171013',6.73],
['20171016',6.74],
['20171017',6.64],
['20171018',6.65],
['20171019',6.62],
['20171020',6.71],
['20171023',6.6],
['20171024',6.57],
['20171025',6.52],
['20171026',6.51],
['20171027',6.46],
['20171030',6.45],
['20171031',6.41],
['20171101',6.39],
['20171102',6.45],
['20171103',6.43],
['20171106',6.42],
['20171107',6.36],
['20171108',6.37],
['20171109',6.4],
['20171110',6.45],
['20171113',6.37],
['20171114',6.34],
['20171115',6.42],
['20171116',6.34],
['20171117',6.35],
['20171120',6.19],
['20171121',5.99],
['20171122',6.05],
['20171123',6.03],
['20171124',6.12],
['20171127',6.05],
['20171128',6.05],
['20171129',6.03],
['20171130',6.09],
['20171201',5.98],
['20171204',5.95],
['20171205',5.92],
['20171206',5.72],
['20171207',5.7],
['20171208',5.71],
['20171211',5.69],
['20171212',5.73],
['20171213',5.7],
['20171214',5.79],
['20171215',5.83],
['20171218',5.79],
['20171219',5.85],
['20171220',5.83],
['20171221',5.91],
['20171222',5.96],
['20171227',6.02],
['20171228',6.09],
['20171229',6.05],
['20180102',6.18],
['20180103',6.16],
['20180104',6.21],
['20180105',6.19],
['20180108',6.24],
['20180109',6.17],
['20180110',6.13],
['20180111',6.13],
['20180112',6.13],
['20180115',6.38],
['20180116',6.33],
['20180117',6.26],
['20180118',6.28],
['20180119',6.3],
['20180122',6.35],
['20180123',6.31],
['20180124',6.45],
['20180125',6.62],
['20180126',6.67],
['20180129',6.69],
['20180130',6.53],
['20180131',6.66],
['20180201',6.52],
['20180202',6.69],
['20180205',6.48],
['20180206',6.22],
['20180207',6.11],
['20180208',6.11],
['20180209',5.98],
['20180212',5.98],
['20180213',6.1],
['20180214',6.19],
['20180215',6.34],
['20180220',6.24],
['20180221',6.27],
['20180222',6.17],
['20180223',6.18],
['20180226',6.25],
['20180227',6.12],
['20180228',6.05],
['20180301',6.05],
['20180302',6.05],
['20180305',5.99],
['20180306',6.05],
['20180307',6.01],
['20180308',5.97],
['20180309',5.89],
['20180312',5.89],
['20180313',5.9],
['20180314',5.93],
['20180315',5.88],
['20180316',5.77],
['20180319',5.81],
['20180320',5.88],
['20180321',5.95],
['20180322',5.94],
['20180323',6.15],
['20180326',6.24],
['20180327',6.12],
['20180328',6.12],
['20180329',6.17],
['20180403',6.2],
['20180404',6.18],
['20180406',6.2],
['20180409',6.19],
['20180410',6.25],
['20180411',6.4],
['20180412',6.38],
['20180413',6.3],
['20180416',6.3],
['20180417',6.29],
['20180418',6.37],
['20180419',6.43],
['20180420',6.35],
['20180423',6.22],
['20180424',6.17],
['20180425',6.17],
['20180426',6.18],
['20180427',6.18],
['20180430',6.18],
['20180502',6.11],
['20180503',6],
['20180504',6.01],
['20180507',6],
['20180508',6.01],
['20180509',6.06],
['20180510',6.01],
['20180511',6.04],
['20180514',6.05],
['20180515',6.15],
['20180516',6.14],
['20180517',6.14],
['20180518',6.24],
['20180521',6.21],
['20180523',6.29],
['20180524',6.4],
['20180525',6.52],
['20180528',6.65],
['20180529',6.59],
['20180530',6.67],
['20180531',6.55],
['20180601',6.43],
['20180604',6.58],
['20180605',6.44],
['20180606',6.71],
['20180607',6.62],
['20180608',6.6],
['20180611',6.67],
['20180612',6.57],
['20180613',6.46],
['20180614',6.4],
['20180615',6.19],
['20180619',6.02],
['20180620',5.99],
['20180621',5.93],
['20180622',6.01],
['20180625',5.96],
['20180626',5.89],
['20180627',5.88],
['20180628',5.88],
['20180629',5.99],
['20180703',6.16],
['20180704',6.06],
['20180705',6.06],
['20180706',6.07],
['20180709',6.16],
['20180710',6.12],
['20180711',6.04],
['20180712',6.11],
['20180713',6.11],
['20180716',6.1],
['20180717',6.05],
['20180718',6.06],
['20180719',6.04],
['20180720',6.01],
['20180723',6.1],
['20180724',6.17],
['20180725',6.2],
['20180726',6.31],
['20180727',6.35],
['20180730',6.26],
['20180731',6.33],
['20180801',6.35],
['20180802',6.33],
['20180803',6.23],
['20180806',6.34],
['20180807',6.33],
['20180808',6.37],
['20180809',6.35],
['20180810',6.29],
['20180813',6.21],
['20180814',6.27],
['20180815',6.04],
['20180816',5.98],
['20180817',5.94],
['20180820',6.06],
['20180821',6.18],
['20180822',6.17],
['20180823',6.2],
['20180824',6.22],
['20180827',6.26],
['20180828',6.31],
['20180829',6.11],
['20180830',6.1],
['20180831',6.12],
['20180903',5.97],
['20180904',6.02],
['20180905',6.01],
['20180906',5.94],
['20180907',6.05],
['20180910',5.98],
['20180911',6],
['20180912',5.93],
['20180913',6.04],
['20180914',6.07],
['20180917',6.01],
['20180918',5.93],
['20180919',5.96],
['20180920',6.04],
['20180921',6.11],
['20180924',6.05],
['20180926',6.07],
['20180927',6.03],
['20180928',6.06],
['20181002',6.01],
['20181003',6.03],
['20181004',6.04],
['20181005',6.06],
['20181008',6.08],
['20181009',6.09],
['20181010',5.96],
['20181011',6.28],
['20181012',6.7],
['20181015',7],
['20181016',6.94],
['20181018',6.81],
['20181019',6.92],
['20181022',6.91],
['20181023',7.05],
['20181024',7.03],
['20181025',7.08],
['20181026',7.09],
['20181029',7.17],
['20181030',7.01],
['20181031',6.92],
['20181101',6.9],
['20181102',6.85],
['20181105',7.03],
['20181106',7.07],
['20181107',7.07],
['20181108',6.92],
['20181109',6.83],
['20181112',6.82],
['20181113',6.83],
['20181114',6.87],
['20181115',6.92],
['20181116',6.96],
['20181119',7.06],
['20181120',7.12],
['20181121',7.06],
['20181122',7.13],
['20181123',7.24],
['20181126',7.2],
['20181127',7.2],
['20181128',7.2],
['20181129',7.5],
['20181130',7.45],
['20181203',7.31],
['20181204',7.51],
['20181205',7.3],
['20181206',7.42],
['20181207',7.42],
['20181210',7.65],
['20181211',7.65],
['20181212',7.59],
['20181213',7.6],
['20181214',7.62],
['20181217',8.01],
['20181218',7.99],
['20181219',8.2],
['20181220',7.98],
['20181221',8.05],
['20181224',7.97],
['20181227',8.08],
['20181228',8.19],
['20181231',7.95],
['20190102',8],
['20190103',8.05],
['20190104',8.18],
['20190107',8.05],
['20190108',7.82],
['20190109',7.72],
['20190110',7.9],
['20190111',7.76],
['20190114',7.74],
['20190115',7.8],
['20190116',7.34],
['20190117',7.44],
['20190118',7.52],
['20190121',7.29],
['20190122',7.43],
['20190123',7.33],
['20190124',7.51],
['20190125',7.52],
['20190128',7.85],
['20190129',7.96],
['20190130',8.09],
['20190131',8.2],
['20190201',8.34],
['20190204',8.27],
['20190208',8.41],
['20190211',8.26],
['20190212',8.21],
['20190213',8.21],
['20190214',8.12],
['20190215',8.15],
['20190218',8.44],
['20190219',8.59],
['20190220',8.88],
['20190221',8.79],
['20190222',8.67],
['20190225',8.7],
['20190226',8.61],
['20190227',8.55],
['20190228',8.37],
['20190301',8.35],
['20190304',8.29],
['20190305',8.22],
['20190306',8.2],
['20190307',8.31],
['20190308',8.12],
['20190311',8.24],
['20190312',8.34],
['20190313',8.39],
['20190314',8.32],
['20190315',8.41],
['20190318',8.27],
['20190319',8.17],
['20190320',8.23],
['20190321',8.63],
['20190322',8.63],
['20190325',8.8],
['20190326',8.69],
['20190327',8.54],
['20190328',8.46],
['20190329',7.86],
['20190401',7.7],
['20190402',7.66],
['20190403',7.83],
['20190404',7.7],
['20190408',7.69],
['20190409',7.63],
['20190410',7.72],
['20190411',7.67],
['20190412',7.48],
['20190415',7.24],
['20190416',7.28],
['20190417',7.24],
['20190418',7.22],
['20190423',7.1],
['20190424',7.08],
['20190425',7.09],
['20190426',7.21],
['20190429',7.15],
['20190430',6.75],
['20190502',6.65],
['20190503',6.71],
['20190506',7.06],
['20190507',6.98],
['20190508',7.15],
['20190509',7.07],
['20190510',7],
['20190514',7.11],
['20190515',7.12],
['20190516',7.06],
['20190517',6.93],
['20190520',6.78],
['20190521',6.82],
['20190522',6.86],
['20190523',6.64],
['20190524',6.72],
['20190527',6.75],
['20190528',6.85],
['20190529',6.92],
['20190530',6.83],
['20190531',7.2],
['20190603',7.6],
['20190604',7.69],
['20190605',7.9],
['20190606',7.74],
['20190610',7.54],
['20190611',7.55],
['20190612',7.95],
['20190613',7.86],
['20190614',8.25],
['20190617',7.96],
['20190618',8.04],
['20190619',7.83],
['20190620',8.42],
['20190621',8.44],
['20190624',8.45],
['20190625',8.88],
['20190626',8.65],
['20190627',8.64],
['20190628',8.75],
['20190702',8.65],
['20190703',8.77],
['20190704',8.7],
['20190705',8.65],
['20190708',8.58],
['20190709',8.4],
['20190710',8.51],
['20190711',8.67],
['20190712',8.53],
['20190715',8.48],
['20190716',8.55],
['20190717',8.46],
['20190718',8.85],
['20190719',9.25],
['20190722',9.22],
['20190723',9.12],
['20190724',9.09],
['20190725',9.12],
['20190726',9.08],
['20190729',9.06],
['20190730',9.2],
['20190731',9.15],
['20190801',8.58],
['20190802',8.77],
['20190805',9.36],
['20190806',9.2],
['20190807',9.89],
['20190808',9.94],
['20190809',10.32],
['20190812',10.44],
['20190813',11.28],
['20190814',10.76],
['20190815',10.7],
['20190816',10.68],
['20190819',10.18],
['20190820',10.3],
['20190821',10.22],
['20190822',10.16],
['20190823',9.95],
['20190826',10.26],
['20190827',10.18],
['20190828',10.2],
['20190829',10.46],
['20190830',10.26]];
var source='finance.yahoo.com';
|
/*******************************************************************************
*
* Copyright(c) 2015,2016 Intel Corporation.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Intel Corporation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
******************************************************************************/
/**
* @file
* @brief CDC ACM device class driver
*
* Driver for USB CDC ACM device class driver
*/
#include <nanokernel.h>
#include <init.h>
#include <uart.h>
#include <string.h>
#include <misc/byteorder.h>
#include "cdc_acm.h"
#include "usb_device.h"
#include "usb_common.h"
#ifndef CONFIG_UART_INTERRUPT_DRIVEN
#error "CONFIG_UART_INTERRUPT_DRIVEN must be set for CDC ACM driver"
#endif
/* definitions */
#define SYS_LOG_LEVEL CONFIG_SYS_LOG_USB_CDC_ACM_LEVEL
#include <misc/sys_log.h>
#define DEV_DATA(dev) \
((struct cdc_acm_dev_data_t * const)(dev)->driver_data)
static struct uart_driver_api cdc_acm_driver_api;
/* 115200bps, no parity, 1 stop bit, 8bit char */
#define CDC_ACM_DEFAUL_BAUDRATE {sys_cpu_to_le32(115200), 0, 0, 8}
/* Size of the internal buffer used for storing received data */
#define CDC_ACM_BUFFER_SIZE (2 * CDC_BULK_EP_MPS)
/* Misc. macros */
#define LOW_BYTE(x) ((x) & 0xFF)
#define HIGH_BYTE(x) ((x) >> 8)
struct device *cdc_acm_dev;
static struct nano_sem poll_wait_sem;
/* Device data structure */
struct cdc_acm_dev_data_t {
/* USB device status code */
enum usb_dc_status_code usb_status;
/* Callback function pointer */
uart_irq_callback_t cb;
/* Tx ready status. Signals when */
uint8_t tx_ready;
uint8_t rx_ready; /* Rx ready status */
uint8_t tx_irq_ena; /* Tx interrupt enable status */
uint8_t rx_irq_ena; /* Rx interrupt enable status */
uint8_t rx_buf[CDC_ACM_BUFFER_SIZE];/* Internal Rx buffer */
uint32_t rx_buf_head; /* Head of the internal Rx buffer */
uint32_t rx_buf_tail; /* Tail of the internal Rx buffer */
/* Interface data buffer */
uint8_t interface_data[CDC_CLASS_REQ_MAX_DATA_SIZE];
/* CDC ACM line coding properties. LE order */
struct cdc_acm_line_coding line_coding;
/* CDC ACM line state bitmap, DTE side */
uint8_t line_state;
/* CDC ACM serial state bitmap, DCE side */
uint8_t serial_state;
/* CDC ACM notification sent status */
uint8_t notification_sent;
};
/* Structure representing the global USB description */
static const uint8_t cdc_acm_usb_description[] = {
/* Device descriptor */
USB_DEVICE_DESC_SIZE, /* Descriptor size */
USB_DEVICE_DESC, /* Descriptor type */
LOW_BYTE(USB_1_1),
HIGH_BYTE(USB_1_1), /* USB version in BCD format */
COMMUNICATION_DEVICE_CLASS, /* Class */
0x00, /* SubClass - Interface specific */
0x00, /* Protocol - Interface specific */
MAX_PACKET_SIZE0, /* Max Packet Size */
LOW_BYTE(CDC_VENDOR_ID),
HIGH_BYTE(CDC_VENDOR_ID), /* Vendor Id */
LOW_BYTE(CDC_PRODUCT_ID),
HIGH_BYTE(CDC_PRODUCT_ID), /* Product Id */
LOW_BYTE(BCDDEVICE_RELNUM),
HIGH_BYTE(BCDDEVICE_RELNUM), /* Device Release Number */
/* Index of Manufacturer String Descriptor */
0x01,
/* Index of Product String Descriptor */
0x02,
/* Index of Serial Number String Descriptor */
0x03,
CDC_NUM_CONF, /* Number of Possible Configuration */
/* Configuration descriptor */
USB_CONFIGURATION_DESC_SIZE, /* Descriptor size */
USB_CONFIGURATION_DESC, /* Descriptor type */
/* Total length in bytes of data returned */
LOW_BYTE(CDC_CONF_SIZE),
HIGH_BYTE(CDC_CONF_SIZE),
CDC_NUM_ITF, /* Number of interfaces */
0x01, /* Configuration value */
0x00, /* Index of the Configuration string */
USB_CONFIGURATION_ATTRIBUTES, /* Attributes */
MAX_LOW_POWER, /* Max power consumption */
/* Interface descriptor */
USB_INTERFACE_DESC_SIZE, /* Descriptor size */
USB_INTERFACE_DESC, /* Descriptor type */
0x00, /* Interface index */
0x00, /* Alternate setting */
CDC1_NUM_EP, /* Number of Endpoints */
COMMUNICATION_DEVICE_CLASS, /* Class */
ACM_SUBCLASS, /* SubClass */
V25TER_PROTOCOL, /* Protocol */
/* Index of the Interface String Descriptor */
0x00,
/* Header Functional Descriptor */
USB_HFUNC_DESC_SIZE, /* Descriptor size */
CS_INTERFACE, /* Descriptor type */
USB_HFUNC_SUBDESC, /* Descriptor SubType */
LOW_BYTE(USB_1_1),
HIGH_BYTE(USB_1_1), /* CDC Device Release Number */
/* Call Management Functional Descriptor */
USB_CMFUNC_DESC_SIZE, /* Descriptor size */
CS_INTERFACE, /* Descriptor type */
USB_CMFUNC_SUBDESC, /* Descriptor SubType */
0x00, /* Capabilities */
0x01, /* Data Interface */
/* ACM Functional Descriptor */
USB_ACMFUNC_DESC_SIZE, /* Descriptor size */
CS_INTERFACE, /* Descriptor type */
USB_ACMFUNC_SUBDESC, /* Descriptor SubType */
/* Capabilities - Device supports the request combination of:
* Set_Line_Coding,
* Set_Control_Line_State,
* Get_Line_Coding
* and the notification Serial_State
*/
0x02,
/* Union Functional Descriptor */
USB_UFUNC_DESC_SIZE, /* Descriptor size */
CS_INTERFACE, /* Descriptor type */
USB_UFUNC_SUBDESC, /* Descriptor SubType */
0x00, /* Master Interface */
0x01, /* Slave Interface */
/* Endpoint INT */
USB_ENDPOINT_DESC_SIZE, /* Descriptor size */
USB_ENDPOINT_DESC, /* Descriptor type */
CDC_ENDP_INT, /* Endpoint address */
USB_DC_EP_INTERRUPT, /* Attributes */
LOW_BYTE(CDC_INTERRUPT_EP_MPS),
HIGH_BYTE(CDC_INTERRUPT_EP_MPS),/* Max packet size */
0x0A, /* Interval */
/* Interface descriptor */
USB_INTERFACE_DESC_SIZE, /* Descriptor size */
USB_INTERFACE_DESC, /* Descriptor type */
0x01, /* Interface index */
0x00, /* Alternate setting */
CDC2_NUM_EP, /* Number of Endpoints */
COMMUNICATION_DEVICE_CLASS_DATA,/* Class */
0x00, /* SubClass */
0x00, /* Protocol */
/* Index of the Interface String Descriptor */
0x00,
/* First Endpoint IN */
USB_ENDPOINT_DESC_SIZE, /* Descriptor size */
USB_ENDPOINT_DESC, /* Descriptor type */
CDC_ENDP_IN, /* Endpoint address */
USB_DC_EP_BULK, /* Attributes */
LOW_BYTE(CDC_BULK_EP_MPS),
HIGH_BYTE(CDC_BULK_EP_MPS), /* Max packet size */
0x00, /* Interval */
/* Second Endpoint OUT */
USB_ENDPOINT_DESC_SIZE, /* Descriptor size */
USB_ENDPOINT_DESC, /* Descriptor type */
CDC_ENDP_OUT, /* Endpoint address */
USB_DC_EP_BULK, /* Attributes */
LOW_BYTE(CDC_BULK_EP_MPS),
HIGH_BYTE(CDC_BULK_EP_MPS), /* Max packet size */
0x00, /* Interval */
/* String descriptor language, only one, so min size 4 bytes.
* 0x0409 English(US) language code used
*/
USB_STRING_DESC_SIZE, /* Descriptor size */
USB_STRING_DESC, /* Descriptor type */
0x09,
0x04,
/* Manufacturer String Descriptor "Intel" */
0x0C,
USB_STRING_DESC,
'I', 0, 'n', 0, 't', 0, 'e', 0, 'l', 0,
/* Product String Descriptor "CDC-ACM" */
0x10,
USB_STRING_DESC,
'C', 0, 'D', 0, 'C', 0, '-', 0, 'A', 0, 'C', 0, 'M', 0,
/* Serial Number String Descriptor "00.01" */
0x0C,
USB_STRING_DESC,
'0', 0, '0', 0, '.', 0, '0', 0, '1', 0,
};
/**
* @brief Handler called for Class requests not handled by the USB stack.
*
* @param pSetup Information about the request to execute.
* @param len Size of the buffer.
* @param data Buffer containing the request result.
*
* @return 0 on success, negative errno code on fail.
*/
int cdc_acm_class_handle_req(struct usb_setup_packet *pSetup,
int32_t *len, uint8_t **data)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(cdc_acm_dev);
switch (pSetup->bRequest) {
case CDC_SET_LINE_CODING:
memcpy(&dev_data->line_coding,
*data, sizeof(dev_data->line_coding));
SYS_LOG_DBG("\nCDC_SET_LINE_CODING %d %d %d %d",
sys_le32_to_cpu(dev_data->line_coding.dwDTERate),
dev_data->line_coding.bCharFormat,
dev_data->line_coding.bParityType,
dev_data->line_coding.bDataBits);
break;
case CDC_SET_CONTROL_LINE_STATE:
dev_data->line_state = (uint8_t)sys_le16_to_cpu(pSetup->wValue);
SYS_LOG_DBG("CDC_SET_CONTROL_LINE_STATE 0x%x",
dev_data->line_state);
break;
case CDC_GET_LINE_CODING:
*data = (uint8_t *)(&dev_data->line_coding);
*len = sizeof(dev_data->line_coding);
SYS_LOG_DBG("\nCDC_GET_LINE_CODING %d %d %d %d",
sys_le32_to_cpu(dev_data->line_coding.dwDTERate),
dev_data->line_coding.bCharFormat,
dev_data->line_coding.bParityType,
dev_data->line_coding.bDataBits);
break;
default:
SYS_LOG_DBG("CDC ACM request 0x%x, value 0x%x",
pSetup->bRequest, pSetup->wValue);
return -EINVAL;
}
return 0;
}
/**
* @brief EP Bulk IN handler, used to send data to the Host
*
* @param ep Endpoint address.
* @param ep_status Endpoint status code.
*
* @return N/A.
*/
static void cdc_acm_bulk_in(uint8_t ep, enum usb_dc_ep_cb_status_code ep_status)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(cdc_acm_dev);
dev_data->tx_ready = 1;
nano_sem_give(&poll_wait_sem);
/* Call callback only if tx irq ena */
if (dev_data->cb && dev_data->tx_irq_ena)
dev_data->cb(cdc_acm_dev);
}
/**
* @brief EP Bulk OUT handler, used to read the data received from the Host
*
* @param ep Endpoint address.
* @param ep_status Endpoint status code.
*
* @return N/A.
*/
static void cdc_acm_bulk_out(uint8_t ep,
enum usb_dc_ep_cb_status_code ep_status)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(cdc_acm_dev);
uint32_t bytes_to_read, i, j, buf_head;
uint8_t tmp_buf[4];
/* Check how many bytes were received */
usb_read(ep, NULL, 0, &bytes_to_read);
buf_head = dev_data->rx_buf_head;
/*
* Quark SE USB controller is always storing data
* in the FIFOs per 32-bit words.
*/
for (i = 0; i < bytes_to_read; i += 4) {
usb_read(ep, tmp_buf, 4, NULL);
for (j = 0; j < 4; j++) {
if (i + j == bytes_to_read) {
/* We read all the data */
break;
}
if (((buf_head + 1) % CDC_ACM_BUFFER_SIZE) ==
dev_data->rx_buf_tail) {
/* FIFO full, discard data */
SYS_LOG_ERR("CDC buffer full!");
} else {
dev_data->rx_buf[buf_head] = tmp_buf[j];
buf_head = (buf_head + 1) % CDC_ACM_BUFFER_SIZE;
}
}
}
dev_data->rx_buf_head = buf_head;
dev_data->rx_ready = 1;
/* Call callback only if rx irq ena */
if (dev_data->cb && dev_data->rx_irq_ena)
dev_data->cb(cdc_acm_dev);
}
/**
* @brief EP Interrupt handler
*
* @param ep Endpoint address.
* @param ep_status Endpoint status code.
*
* @return N/A.
*/
static void cdc_acm_int_in(uint8_t ep, enum usb_dc_ep_cb_status_code ep_status)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(cdc_acm_dev);
dev_data->notification_sent = 1;
SYS_LOG_DBG("CDC_IntIN EP[%x]\r", ep);
}
/**
* @brief Callback used to know the USB connection status
*
* @param status USB device status code.
*
* @return N/A.
*/
static void cdc_acm_dev_status_cb(enum usb_dc_status_code status)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(cdc_acm_dev);
/* Store the new status */
dev_data->usb_status = status;
/* Check the USB status and do needed action if required */
switch (status) {
case USB_DC_ERROR:
SYS_LOG_DBG("USB device error");
break;
case USB_DC_RESET:
SYS_LOG_DBG("USB device reset detected");
break;
case USB_DC_CONNECTED:
SYS_LOG_DBG("USB device connected");
break;
case USB_DC_CONFIGURED:
SYS_LOG_DBG("USB device configured");
break;
case USB_DC_DISCONNECTED:
SYS_LOG_DBG("USB device disconnected");
break;
case USB_DC_SUSPEND:
SYS_LOG_DBG("USB device supended");
break;
case USB_DC_RESUME:
SYS_LOG_DBG("USB device resumed");
break;
case USB_DC_UNKNOWN:
default:
SYS_LOG_DBG("USB unknown state");
break;
}
}
/* Describe EndPoints configuration */
static struct usb_ep_cfg_data cdc_acm_ep_data[] = {
{
.ep_cb = cdc_acm_int_in,
.ep_addr = CDC_ENDP_INT
},
{
.ep_cb = cdc_acm_bulk_out,
.ep_addr = CDC_ENDP_OUT
},
{
.ep_cb = cdc_acm_bulk_in,
.ep_addr = CDC_ENDP_IN
}
};
/* Configuration of the CDC-ACM Device send to the USB Driver */
static struct usb_cfg_data cdc_acm_config = {
.usb_device_description = cdc_acm_usb_description,
.cb_usb_status = cdc_acm_dev_status_cb,
.interface = {
.class_handler = cdc_acm_class_handle_req,
.custom_handler = NULL,
.payload_data = NULL,
},
.num_endpoints = CDC1_NUM_EP + CDC2_NUM_EP,
.endpoint = cdc_acm_ep_data
};
/**
* @brief Set the baud rate
*
* This routine set the given baud rate for the UART.
*
* @param dev CDC ACM device struct.
* @param baudrate Baud rate.
*
* @return N/A.
*/
static void cdc_acm_baudrate_set(struct device *dev, uint32_t baudrate)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
dev_data->line_coding.dwDTERate = sys_cpu_to_le32(baudrate);
}
/**
* @brief Initialize UART channel
*
* This routine is called to reset the chip in a quiescent state.
* It is assumed that this function is called only once per UART.
*
* @param dev CDC ACM device struct.
*
* @return 0 allways.
*/
static int cdc_acm_init(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
int ret;
cdc_acm_config.interface.payload_data = dev_data->interface_data;
cdc_acm_dev = dev;
/* Initialize the USB driver with the right configuration */
ret = usb_set_config(&cdc_acm_config);
if (ret < 0) {
SYS_LOG_ERR("Failed to config USB");
return ret;
}
/* Enable USB driver */
ret = usb_enable(&cdc_acm_config);
if (ret < 0) {
SYS_LOG_ERR("Failed to enable USB");
return ret;
}
dev->driver_api = &cdc_acm_driver_api;
nano_sem_init(&poll_wait_sem);
return 0;
}
/**
* @brief Fill FIFO with data
*
* @param dev CDC ACM device struct.
* @param tx_data Data to transmit.
* @param len Number of bytes to send.
*
* @return Number of bytes sent.
*/
static int cdc_acm_fifo_fill(struct device *dev,
const uint8_t *tx_data, int len)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
uint32_t bytes_written = 0;
if (dev_data->usb_status != USB_DC_CONFIGURED)
return 0;
dev_data->tx_ready = 0;
usb_write(CDC_ENDP_IN, tx_data, len, &bytes_written);
return bytes_written;
}
/**
* @brief Read data from FIFO
*
* @param dev CDC ACM device struct.
* @param rx_data Pointer to data container.
* @param size Container size.
*
* @return Number of bytes read.
*/
static int cdc_acm_fifo_read(struct device *dev, uint8_t *rx_data,
const int size)
{
uint32_t avail_data, bytes_read, i;
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
avail_data = (CDC_ACM_BUFFER_SIZE + dev_data->rx_buf_head -
dev_data->rx_buf_tail) % CDC_ACM_BUFFER_SIZE;
if (avail_data > size)
bytes_read = size;
else
bytes_read = avail_data;
for (i = 0; i < bytes_read; i++)
rx_data[i] = dev_data->rx_buf[(dev_data->rx_buf_tail + i) %
CDC_ACM_BUFFER_SIZE];
dev_data->rx_buf_tail = (dev_data->rx_buf_tail + bytes_read) %
CDC_ACM_BUFFER_SIZE;
if (dev_data->rx_buf_tail == dev_data->rx_buf_head) {
/* Buffer empty */
dev_data->rx_ready = 0;
}
return bytes_read;
}
/**
* @brief Enable TX interrupt
*
* @param dev CDC ACM device struct.
*
* @return N/A.
*/
static void cdc_acm_irq_tx_enable(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
dev_data->tx_irq_ena = 1;
}
/**
* @brief Disable TX interrupt
*
* @param dev CDC ACM device struct.
*
* @return N/A.
*/
static void cdc_acm_irq_tx_disable(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
dev_data->tx_irq_ena = 0;
}
/**
* @brief Check if Tx IRQ has been raised
*
* @param dev CDC ACM device struct.
*
* @return 1 if a Tx IRQ is pending, 0 otherwise.
*/
static int cdc_acm_irq_tx_ready(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
if (dev_data->tx_ready) {
dev_data->tx_ready = 0;
return 1;
}
return 0;
}
/**
* @brief Enable RX interrupt
*
* @param dev CDC ACM device struct.
*
* @return N/A
*/
static void cdc_acm_irq_rx_enable(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
dev_data->rx_irq_ena = 1;
}
/**
* @brief Disable RX interrupt
*
* @param dev CDC ACM device struct.
*
* @return N/A.
*/
static void cdc_acm_irq_rx_disable(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
dev_data->rx_irq_ena = 0;
}
/**
* @brief Check if Rx IRQ has been raised
*
* @param dev CDC ACM device struct.
*
* @return 1 if an IRQ is ready, 0 otherwise.
*/
static int cdc_acm_irq_rx_ready(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
if (dev_data->rx_ready) {
dev_data->rx_ready = 0;
return 1;
}
return 0;
}
/**
* @brief Check if Tx or Rx IRQ is pending
*
* @param dev CDC ACM device struct.
*
* @return 1 if a Tx or Rx IRQ is pending, 0 otherwise.
*/
static int cdc_acm_irq_is_pending(struct device *dev)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
if (dev_data->tx_ready || dev_data->rx_ready)
return 1;
return 0;
}
/**
* @brief Update IRQ status
*
* @param dev CDC ACM device struct.
*
* @return Always 1
*/
static int cdc_acm_irq_update(struct device *dev)
{
return 1;
}
/**
* @brief Set the callback function pointer for IRQ.
*
* @param dev CDC ACM device struct.
* @param cb Callback function pointer.
*
* @return N/A
*/
static void cdc_acm_irq_callback_set(struct device *dev,
uart_irq_callback_t cb)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
dev_data->cb = cb;
}
#ifdef CONFIG_UART_LINE_CTRL
/**
* @brief Send serial line state notification to the Host
*
* This routine sends asynchronous notification of UART status
* on the interrupt endpoint
*
* @param dev CDC ACM device struct.
* @param ep_status Endpoint status code.
*
* @return N/A.
*/
static int cdc_acm_send_notification(struct device *dev, uint16_t serial_state)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
struct cdc_acm_notification notification;
uint32_t cnt = 0;
notification.bmRequestType = 0xA1;
notification.bNotificationType = 0x20;
notification.wValue = 0;
notification.wIndex = 0;
notification.wLength = sys_cpu_to_le16(sizeof(serial_state));
notification.data = sys_cpu_to_le16(serial_state);
dev_data->notification_sent = 0;
usb_write(CDC_ENDP_INT, (const uint8_t *)¬ification,
sizeof(notification), NULL);
/* Wait for notification to be sent */
while (!((volatile uint8_t)dev_data->notification_sent)) {
sys_thread_busy_wait(1);
if (++cnt > CDC_CONTROL_SERIAL_STATE_TIMEOUT_US) {
SYS_LOG_DBG("CDC ACM notification timeout!");
return -EIO;
}
}
return 0;
}
/**
* @brief Manipulate line control for UART.
*
* @param dev CDC ACM device struct
* @param ctrl The line control to be manipulated
* @param val Value to set the line control
*
* @return 0 if successful, failed otherwise.
*/
static int cdc_acm_line_ctrl_set(struct device *dev,
uint32_t ctrl, uint32_t val)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
switch (ctrl) {
case LINE_CTRL_BAUD_RATE:
cdc_acm_baudrate_set(dev, val);
return 0;
case LINE_CTRL_DCD:
dev_data->serial_state &= ~CDC_CONTROL_SERIAL_STATE_DCD;
if (val)
dev_data->serial_state |= CDC_CONTROL_SERIAL_STATE_DCD;
cdc_acm_send_notification(dev, CDC_CONTROL_SERIAL_STATE_DCD);
return 0;
case LINE_CTRL_DSR:
dev_data->serial_state &= ~CDC_CONTROL_SERIAL_STATE_DSR;
if (val)
dev_data->serial_state |= CDC_CONTROL_SERIAL_STATE_DSR;
cdc_acm_send_notification(dev, dev_data->serial_state);
return 0;
default:
return -ENODEV;
}
return -ENOTSUP;
}
/**
* @brief Manipulate line control for UART.
*
* @param dev CDC ACM device struct
* @param ctrl The line control to be manipulated
* @param val Value to set the line control
*
* @return 0 if successful, failed otherwise.
*/
static int cdc_acm_line_ctrl_get(struct device *dev,
uint32_t ctrl, uint32_t *val)
{
struct cdc_acm_dev_data_t * const dev_data = DEV_DATA(dev);
switch (ctrl) {
case LINE_CTRL_BAUD_RATE:
*val = sys_le32_to_cpu(dev_data->line_coding.dwDTERate);
return 0;
case LINE_CTRL_RTS:
*val =
(dev_data->line_state & CDC_CONTROL_LINE_STATE_RTS) ? 1 : 0;
return 0;
case LINE_CTRL_DTR:
*val =
(dev_data->line_state & CDC_CONTROL_LINE_STATE_DTR) ? 1 : 0;
return 0;
}
return -ENOTSUP;
}
#endif /* CONFIG_UART_LINE_CTRL */
/*
* @brief Poll the device for input.
*
* @return -ENOTSUP Since underlying USB device controller always uses
* interrupts, polled mode UART APIs are not implemented for the UART interface
* exported by CDC ACM driver. Apps should use fifo_read API instead.
*/
static int cdc_acm_poll_in(struct device *dev, unsigned char *c)
{
return -ENOTSUP;
}
/*
* @brief Output a character in polled mode.
*
* The UART poll method for USB UART is simulated by waiting till
* we get the next BULK In upcall from the USB device controller or 100 ms.
*
* @return the same character which is sent
*/
static unsigned char cdc_acm_poll_out(struct device *dev,
unsigned char c)
{
cdc_acm_fifo_fill(dev, &c, 1);
nano_sem_take(&poll_wait_sem, MSEC(100));
return c;
}
static struct uart_driver_api cdc_acm_driver_api = {
.poll_in = cdc_acm_poll_in,
.poll_out = cdc_acm_poll_out,
.fifo_fill = cdc_acm_fifo_fill,
.fifo_read = cdc_acm_fifo_read,
.irq_tx_enable = cdc_acm_irq_tx_enable,
.irq_tx_disable = cdc_acm_irq_tx_disable,
.irq_tx_ready = cdc_acm_irq_tx_ready,
.irq_rx_enable = cdc_acm_irq_rx_enable,
.irq_rx_disable = cdc_acm_irq_rx_disable,
.irq_rx_ready = cdc_acm_irq_rx_ready,
.irq_is_pending = cdc_acm_irq_is_pending,
.irq_update = cdc_acm_irq_update,
.irq_callback_set = cdc_acm_irq_callback_set,
#ifdef CONFIG_UART_LINE_CTRL
.line_ctrl_set = cdc_acm_line_ctrl_set,
.line_ctrl_get = cdc_acm_line_ctrl_get,
#endif /* CONFIG_UART_LINE_CTRL */
};
static struct cdc_acm_dev_data_t cdc_acm_dev_data = {
.usb_status = USB_DC_UNKNOWN,
.line_coding = CDC_ACM_DEFAUL_BAUDRATE,
};
DEVICE_INIT(cdc_acm, CONFIG_CDC_ACM_PORT_NAME, &cdc_acm_init,
&cdc_acm_dev_data, NULL,
APPLICATION, CONFIG_KERNEL_INIT_PRIORITY_DEVICE);
|
from output.models.nist_data.atomic.short.schema_instance.nistschema_sv_iv_atomic_short_min_inclusive_2_xsd.nistschema_sv_iv_atomic_short_min_inclusive_2 import NistschemaSvIvAtomicShortMinInclusive2
__all__ = [
"NistschemaSvIvAtomicShortMinInclusive2",
]
|
/**
* First we will load all of this project's JavaScript dependencies which
* includes Vue and other libraries. It is a great starting point when
* building robust, powerful web applications using Vue and Laravel.
*/
require('./bootstrap');
window.Vue = require('vue');
import VueRouter from 'vue-router'
/**
* The following block of code may be used to automatically register your
* Vue components. It will recursively scan this directory for the Vue
* components and automatically register them with their "basename".
*
* Eg. ./components/ExampleComponent.vue -> <example-component></example-component>
*/
// const files = require.context('./', true, /\.vue$/i)
// files.keys().map(key => Vue.component(key.split('/').pop().split('.')[0], files(key).default))
Vue.component('example-component', require('./components/ExampleComponent.vue').default);
/**
* Next, we will create a fresh Vue application instance and attach it to
* the page. Then, you may begin adding components to this application
* or customize the JavaScript scaffolding to fit your unique needs.
*/
const app = new Vue({
el: '#app',
});
|
const NewsletterItem = require('../../models/NewsletterItem');
const tokenService = require('../../tokenService');
module.exports = {
registerRoutes(app) {
app.get('/api/newsletters', (req, res) => {
tokenService.isAuthenticated(req, res, () =>
NewsletterItem.find(
{},
(err, docs) => {
if (err) {
return res.send(500);
}
return res.send(docs);
}
)
);
});
app.get('/api/newsletters/:id/links', (req, res) => {
NewsletterItem.findOne(
{ _id: req.params.id },
{ _id: 0, links: 1 },
(err, docs) => {
if (err) res.sendStatus(500);
return res.send(docs.links);
}
);
});
app.post('/api/newsletters/:id/links', (req, res) => {
tokenService.isAuthenticated(req, res, () => {
NewsletterItem.findOne(
{ _id: req.params.id },
(err, doc) => { // eslint-disable-line consistent-return
if (err) return res.sendStatus(500);
let link = doc.links.find((docLink) => docLink.url === req.body.url);
if (!link) {
link = Object.assign({}, {
url: req.body.url,
description: req.body.description,
voteCount: 0,
voters: [],
});
doc.links.push(link);
}
// Get the link including it's _id
link = doc.links.find((docLink) => docLink.url === req.body.url);
doc.save((saveError) => {
if (saveError) return res.status(500).send(err);
return res.send(link);
});
});
});
});
},
};
|
"""
FI API
Allow clients to fetch Analytics through APIs. # noqa: E501
The version of the OpenAPI document: 1
Contact: analytics.api.support@factset.com
Generated by: https://openapi-generator.tech
"""
from setuptools import setup, find_packages # noqa: H301
import os
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
NAME = "fds.sdk.FixedIncomeCalculation"
VERSION = "0.8.2"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = [
"urllib3 >= 1.25.3",
"python-dateutil",
"fds.sdk.utils >= 1.0.0",
]
setup(
name=NAME,
version=VERSION,
description="Fixed Income Calculation client library for Python",
author="FactSet Research Systems",
url="https://github.com/FactSet/enterprise-sdk/tree/main/code/python/FixedIncomeCalculation/v1",
keywords=["FactSet", "API", "SDK"],
python_requires=">=3.6",
install_requires=REQUIRES,
packages=find_packages(exclude=["test", "tests"]),
include_package_data=True,
license="Apache-2.0",
long_description_content_type="text/markdown",
long_description=read("README.md")
)
|
import pytest
from meiga import Success
from petisco import controller_handler, INFO, ERROR
from petisco.controller.tokens.jwt_config import JwtConfig
from tests.unit.mocks.fake_logger import FakeLogger
from tests.unit.mocks.log_message_mother import LogMessageMother
@pytest.fixture
def given_any_token_type():
return "TOKEN"
@pytest.fixture
def given_other_token_type():
return "REQUIRED_TOKEN"
@pytest.fixture
def given_any_token_type_with_user():
return "USER_TOKEN"
@pytest.fixture
def given_any_client_id():
return "client_id"
@pytest.fixture
def given_any_user_id():
return "user_id"
@pytest.fixture
def given_any_decoded_token_info(given_any_token_type, given_any_client_id):
return {
"user_id": None,
"client_id": given_any_client_id,
"token_type": given_any_token_type,
}
@pytest.fixture
def given_any_decoded_token_info_with_user(
given_any_token_type_with_user, given_any_client_id, given_any_user_id
):
return {
"user_id": given_any_user_id,
"client_id": given_any_client_id,
"token_type": given_any_token_type_with_user,
}
@pytest.mark.unit
def test_should_execute_successfully_a_empty_controller_with_jwt_requirement_without_user(
given_any_token_type, given_any_decoded_token_info
):
logger = FakeLogger()
jwt_config = JwtConfig(token_type=given_any_token_type)
@controller_handler(logger=logger, jwt_config=jwt_config)
def my_controller(token_info):
return Success("Hello Petisco")
http_response = my_controller(token_info=given_any_decoded_token_info)
assert http_response == ({"message": "OK"}, 200)
first_logging_message = logger.get_logging_messages()[0]
second_logging_message = logger.get_logging_messages()[1]
assert first_logging_message == (
INFO,
LogMessageMother.get_controller(
operation="my_controller", message="Start"
).to_json(),
)
assert second_logging_message == (
INFO,
LogMessageMother.get_controller(
operation="my_controller",
message="Result[status: success | value: Hello Petisco]",
).to_json(),
)
@pytest.mark.unit
def test_should_execute_successfully_a_empty_controller_with_jwt_requirement_with_user(
given_any_token_type_with_user, given_any_decoded_token_info_with_user
):
logger = FakeLogger()
jwt_config = JwtConfig(token_type=given_any_token_type_with_user, require_user=True)
@controller_handler(logger=logger, jwt_config=jwt_config)
def my_controller(token_info, user_id):
return Success("Hello Petisco")
http_response = my_controller(token_info=given_any_decoded_token_info_with_user)
assert http_response == ({"message": "OK"}, 200)
first_logging_message = logger.get_logging_messages()[0]
second_logging_message = logger.get_logging_messages()[1]
assert first_logging_message == (
INFO,
LogMessageMother.get_controller(
operation="my_controller", message="Start"
).to_json(),
)
assert second_logging_message == (
INFO,
LogMessageMother.get_controller(
operation="my_controller",
message="Result[status: success | value: Hello Petisco]",
).to_json(),
)
@pytest.mark.unit
def test_should_returns_an_error_when_a_empty_controller_do_not_get_a_required_jwt_token(
given_other_token_type, given_any_decoded_token_info
):
logger = FakeLogger()
jwt_config = JwtConfig(token_type=given_other_token_type)
@controller_handler(logger=logger, jwt_config=jwt_config)
def my_controller(token_info):
return Success("Hello Petisco")
http_response = my_controller(token_info=given_any_decoded_token_info)
assert http_response == (
{
"error": {
"message": "Access token is missing or invalid. This entry point expects a valid REQUIRED_TOKEN Token",
"type": "InvalidTokenHttpError",
}
},
401,
)
first_logging_message = logger.get_logging_messages()[0]
second_logging_message = logger.get_logging_messages()[1]
assert first_logging_message == (
INFO,
LogMessageMother.get_controller(
operation="my_controller", message="Start"
).to_json(),
)
assert second_logging_message == (
ERROR,
LogMessageMother.get_controller(
operation="my_controller",
message="Result[status: failure | value: InvalidTokenError]",
).to_json(),
)
@pytest.mark.unit
def test_should_returns_an_error_when_a_empty_controller_get_a_required_jwt_token_but_missing_user(
given_any_token_type, given_any_decoded_token_info
):
logger = FakeLogger()
jwt_config = JwtConfig(token_type=given_any_token_type, require_user=True)
@controller_handler(logger=logger, jwt_config=jwt_config)
def my_controller(token_info):
return Success("Hello Petisco")
http_response = my_controller(token_info=given_any_decoded_token_info)
assert http_response == (
{
"error": {
"message": "Access token is missing or invalid. This entry point expects a valid TOKEN Token",
"type": "InvalidTokenHttpError",
}
},
401,
)
first_logging_message = logger.get_logging_messages()[0]
second_logging_message = logger.get_logging_messages()[1]
assert first_logging_message == (
INFO,
LogMessageMother.get_controller(
operation="my_controller", message="Start"
).to_json(),
)
assert second_logging_message == (
ERROR,
LogMessageMother.get_controller(
operation="my_controller",
message="Result[status: failure | value: InvalidTokenError]",
).to_json(),
)
|
var geojson;
var defaultStyle = {'weight': '0', fillColor: '#381f5e', fillOpacity: '1'};
L.mapbox.accessToken = 'pk.eyJ1IjoiZHVuZXIiLCJhIjoiaWkwMnJIZyJ9.2zMvIebbUOk9C5R2itT7Dg';
var map = L.mapbox.map('small-map', 'duner.m3npglde', {
minZoom: 15,
maxBounds: [
[42.07095890994855, -87.65922546386719],
[42.039094188385945, -87.69158363342285]
],
scrollWheelZoom: false
}).setView([42.05504447993239,-87.6753830909729], 16);
function parse_map_data(data){
$.each(data, function(key, val){
geojson = new L.GeoJSON(val, {
style: function(feature) {
return defaultStyle;
}
}).addTo(map);
var bounds = geojson.getBounds();
map.setView(bounds.getCenter(), 17);
});
}
$(document).ready(function() {
$(".rslides").responsiveSlides();
$.ajax({
url: dorm_url,
async: true,
dataType: 'jsonp',
jsonp: false,
jsonpCallback:'myCallback',
success:function(data) {
parse_map_data(data);
}
});
$('#video-tab').click(function() {
$('#embed-container').append('<iframe src="' + dorm_video + '" frameborder="0" webkitAllowFullScreen mozallowfullscreen allowFullScreen></iframe>');
$('#media-container img').css('display', 'none');
$('#video-tab').css('background', '#381F5E');
$('#photo-tab').css('background', '#c3bccf');
$('#embed-container').css('display','block');
});
$('#photo-tab').click(function() {
$('#embed-container iframe').remove();
$('#embed-container').hide();
$('#media-container img').css('display', 'block');
$('#photo-tab').css('background', '#381F5E');
$('#video-tab').css('background', '#c3bccf');
});
});
|
from unittest.mock import patch
import pytest
from constants.jobs import JobLifeCycle
from db.models.build_jobs import BuildJob
from factories.factory_build_jobs import BuildJobFactory
from factories.factory_projects import ProjectFactory
from factories.factorycode_reference import CodeReferenceFactory
from polyaxon_schemas.polyaxonfile.specification import BuildSpecification
from scheduler import dockerizer_scheduler
from tests.utils import BaseTest
@pytest.mark.dockerizer_mark
class TestDockerizerScheduler(BaseTest):
def setUp(self):
super().setUp()
self.project = ProjectFactory()
self.code_reference = CodeReferenceFactory()
def test_scheduler_create_build_job(self):
"""Test the case when the job needs to be built and started."""
assert BuildJob.objects.count() == 0
with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start:
with patch('scheduler.dockerizer_scheduler.check_image') as mock_check:
mock_start.return_value = True
mock_check.return_value = False
_, image_exists, build_status = dockerizer_scheduler.create_build_job(
user=self.project.user,
project=self.project,
config={'image': 'bar:foo'},
code_reference=self.code_reference
)
assert mock_start.call_count == 1
assert mock_check.call_count == 1
assert image_exists is False
assert build_status is True
assert BuildJob.objects.count() == 1
def test_scheduler_create_build_job_of_already_running_job(self):
"""Check the case when the job is already running and
we just set the requesting service to running."""
config = {'image': 'busybox:tag'}
build_job = BuildJobFactory(project=self.project,
user=self.project.user,
code_reference=self.code_reference,
config=BuildSpecification.create_specification(config))
build_job.set_status(JobLifeCycle.RUNNING)
assert BuildJob.objects.count() == 1
with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start:
with patch('scheduler.dockerizer_scheduler.check_image') as mock_check:
mock_check.return_value = False
build_job, image_exists, build_status = dockerizer_scheduler.create_build_job(
user=self.project.user,
project=self.project,
config=config,
code_reference=self.code_reference
)
assert mock_start.call_count == 0
assert mock_check.call_count == 1
assert image_exists is False
assert build_status is True
assert BuildJob.objects.count() == 1
def test_scheduler_create_build_job_of_already_done_job(self):
"""Check the case when the job is already done and
we need to create a new job."""
config = {'image': 'busybox:tag'}
build_job = BuildJobFactory(project=self.project,
user=self.project.user,
code_reference=self.code_reference,
config=BuildSpecification.create_specification(config))
build_job.set_status(JobLifeCycle.STOPPED)
assert BuildJob.objects.count() == 1
with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start:
with patch('scheduler.dockerizer_scheduler.check_image') as mock_check:
mock_start.return_value = True
mock_check.return_value = False
build_job, image_exists, build_status = dockerizer_scheduler.create_build_job(
user=self.project.user,
project=self.project,
config=config,
code_reference=self.code_reference
)
assert mock_start.call_count == 1
assert mock_check.call_count == 1
assert image_exists is False
assert build_status is True
assert BuildJob.objects.count() == 2
def test_scheduler_create_build_job_image_already_exists(self):
"""Check the case when the image is already built."""
config = {'image': 'busybox:tag'}
BuildJobFactory(project=self.project,
user=self.project.user,
code_reference=self.code_reference,
config=BuildSpecification.create_specification(config))
assert BuildJob.objects.count() == 1
with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start:
with patch('scheduler.dockerizer_scheduler.check_image') as mock_check:
mock_check.return_value = True
_, image_exists, build_status = dockerizer_scheduler.create_build_job(
user=self.project.user,
project=self.project,
config=config,
code_reference=self.code_reference
)
assert mock_start.call_count == 0
assert mock_check.call_count == 1
assert image_exists is True
assert build_status is False
assert BuildJob.objects.count() == 1
|
/*
Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'showblocks', 'fr-ca', {
toolbar: 'Afficher les blocs'
} );
|
# coding=utf-8
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
from pants.backend.jvm import argfile
from pants.backend.jvm.subsystems.java import Java
from pants.backend.jvm.subsystems.jvm_platform import JvmPlatform
from pants.backend.jvm.targets.annotation_processor import AnnotationProcessor
from pants.backend.jvm.targets.javac_plugin import JavacPlugin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.engine.fs import DirectoryToMaterialize
from pants.engine.isolated_process import ExecuteProcessRequest
from pants.java.distribution.distribution import DistributionLocator
from pants.util.dirutil import safe_open
from pants.util.process_handler import subprocess
# Well known metadata file to register javac plugins.
_JAVAC_PLUGIN_INFO_FILE = 'META-INF/services/com.sun.source.util.Plugin'
# Well known metadata file to register annotation processors with a java 1.6+ compiler.
_PROCESSOR_INFO_FILE = 'META-INF/services/javax.annotation.processing.Processor'
logger = logging.getLogger(__name__)
class JavacCompile(JvmCompile):
"""Compile Java code using Javac."""
_name = 'java'
@staticmethod
def _write_javac_plugin_info(resources_dir, javac_plugin_target):
javac_plugin_info_file = os.path.join(resources_dir, _JAVAC_PLUGIN_INFO_FILE)
with safe_open(javac_plugin_info_file, 'w') as f:
f.write(javac_plugin_target.classname)
@classmethod
def get_args_default(cls, bootstrap_option_values):
return ('-encoding', 'UTF-8')
@classmethod
def get_warning_args_default(cls):
return ('-deprecation', '-Xlint:all', '-Xlint:-serial', '-Xlint:-path')
@classmethod
def get_no_warning_args_default(cls):
return ('-nowarn', '-Xlint:none', )
@classmethod
def get_fatal_warnings_enabled_args_default(cls):
return ('-Werror')
@classmethod
def get_fatal_warnings_disabled_args_default(cls):
return ()
@classmethod
def register_options(cls, register):
super(JavacCompile, cls).register_options(register)
@classmethod
def subsystem_dependencies(cls):
return super(JavacCompile, cls).subsystem_dependencies() + (JvmPlatform,)
@classmethod
def prepare(cls, options, round_manager):
super(JavacCompile, cls).prepare(options, round_manager)
@classmethod
def product_types(cls):
return ['runtime_classpath']
def __init__(self, *args, **kwargs):
super(JavacCompile, self).__init__(*args, **kwargs)
self.set_distribution(jdk=True)
def select(self, target):
if not isinstance(target, JvmTarget):
return False
return target.has_sources('.java')
def select_source(self, source_file_path):
return source_file_path.endswith('.java')
def javac_classpath(self):
# Note that if this classpath is empty then Javac will automatically use the javac from
# the JDK it was invoked with.
return Java.global_javac_classpath(self.context.products)
def write_extra_resources(self, compile_context):
"""Override write_extra_resources to produce plugin and annotation processor files."""
target = compile_context.target
if isinstance(target, JavacPlugin):
self._write_javac_plugin_info(compile_context.classes_dir, target)
elif isinstance(target, AnnotationProcessor) and target.processors:
processor_info_file = os.path.join(compile_context.classes_dir, _PROCESSOR_INFO_FILE)
self._write_processor_info(processor_info_file, target.processors)
def _write_processor_info(self, processor_info_file, processors):
with safe_open(processor_info_file, 'w') as f:
for processor in processors:
f.write('{}\n'.format(processor.strip()))
def execute(self):
if JvmPlatform.global_instance().get_options().compiler == 'javac':
return super(JavacCompile, self).execute()
def compile(self, ctx, args, classpath, upstream_analysis,
settings, fatal_warnings, zinc_file_manager,
javac_plugin_map, scalac_plugin_map):
try:
distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=True)
except DistributionLocator.Error:
distribution = JvmPlatform.preferred_jvm_distribution([settings], strict=False)
javac_cmd = ['{}/bin/javac'.format(distribution.real_home)]
javac_cmd.extend([
'-classpath', ':'.join(classpath),
])
if settings.args:
settings_args = settings.args
if any('$JAVA_HOME' in a for a in settings.args):
logger.debug('Substituting "$JAVA_HOME" with "{}" in jvm-platform args.'
.format(distribution.home))
settings_args = (a.replace('$JAVA_HOME', distribution.home) for a in settings.args)
javac_cmd.extend(settings_args)
javac_cmd.extend([
# TODO: support -release
'-source', str(settings.source_level),
'-target', str(settings.target_level),
])
if self.execution_strategy == self.HERMETIC:
javac_cmd.extend([
# We need to strip the source root from our output files. Outputting to a directory, and
# capturing that directory, does the job.
# Unfortunately, javac errors if the directory you pass to -d doesn't exist, and we don't
# have a convenient way of making a directory in the output tree, so let's just use the
# working directory as our output dir.
# This also has the benefit of not needing to strip leading directories from the returned
# snapshot.
'-d', '.',
])
else:
javac_cmd.extend([
'-d', ctx.classes_dir,
])
javac_cmd.extend(self._javac_plugin_args(javac_plugin_map))
javac_cmd.extend(args)
if fatal_warnings:
javac_cmd.extend(self.get_options().fatal_warnings_enabled_args)
else:
javac_cmd.extend(self.get_options().fatal_warnings_disabled_args)
with argfile.safe_args(ctx.sources, self.get_options()) as batched_sources:
javac_cmd.extend(batched_sources)
if self.execution_strategy == self.HERMETIC:
self._execute_hermetic_compile(javac_cmd, ctx)
else:
with self.context.new_workunit(name='javac',
cmd=' '.join(javac_cmd),
labels=[WorkUnitLabel.COMPILER]) as workunit:
self.context.log.debug('Executing {}'.format(' '.join(javac_cmd)))
p = subprocess.Popen(javac_cmd, stdout=workunit.output('stdout'), stderr=workunit.output('stderr'))
return_code = p.wait()
workunit.set_outcome(WorkUnit.FAILURE if return_code else WorkUnit.SUCCESS)
if return_code:
raise TaskError('javac exited with return code {rc}'.format(rc=return_code))
@classmethod
def _javac_plugin_args(cls, javac_plugin_map):
ret = []
for plugin, args in javac_plugin_map.items():
for arg in args:
if ' ' in arg:
# Note: Args are separated by spaces, and there is no way to escape embedded spaces, as
# javac's Main does a simple split on these strings.
raise TaskError('javac plugin args must not contain spaces '
'(arg {} for plugin {})'.format(arg, plugin))
ret.append('-Xplugin:{} {}'.format(plugin, ' '.join(args)))
return ret
def _execute_hermetic_compile(self, cmd, ctx):
# For now, executing a compile remotely only works for targets that
# do not have any dependencies or inner classes
input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
output_files = tuple(
# Assume no extra .class files to grab. We'll fix up that case soon.
# Drop the source_root from the file path.
# Assumes `-d .` has been put in the command.
os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base)
for f in input_snapshot.files if f.path.endswith('.java')
)
exec_process_request = ExecuteProcessRequest.create_from_snapshot(
argv=tuple(cmd),
snapshot=input_snapshot,
output_files=output_files,
description='Compiling {} with javac'.format(ctx.target.address.spec),
)
exec_result = self.context.execute_process_synchronously(
exec_process_request,
'javac',
(WorkUnitLabel.TASK, WorkUnitLabel.JVM),
)
# Dump the output to the .pants.d directory where it's expected by downstream tasks.
classes_directory = ctx.classes_dir
self.context._scheduler.materialize_directories((
DirectoryToMaterialize(str(classes_directory), exec_result.output_directory_digest),
))
|
/**
* @ngdoc controller
* @name Umbraco.Editors.DataType.CreateController
* @function
*
* @description
* The controller for the data type creation dialog
*/
function DataTypeCreateController($scope, $location, navigationService, dataTypeResource, formHelper, appState) {
$scope.model = {
folderName: "",
creatingFolder: false
};
var node = $scope.currentNode;
var section = appState.getSectionState("currentSection");
$scope.showCreateFolder = function() {
$scope.model.creatingFolder = true;
}
$scope.createContainer = function () {
if (formHelper.submitForm({ scope: $scope, formCtrl: $scope.createFolderForm })) {
dataTypeResource.createContainer(node.id, $scope.model.folderName).then(function (folderId) {
navigationService.hideMenu();
var currPath = node.path ? node.path : "-1";
navigationService.syncTree({ tree: "datatypes", path: currPath + "," + folderId, forceReload: true, activate: true });
formHelper.resetForm({ scope: $scope, formCtrl: $scope.createFolderForm });
}, function(err) {
formHelper.resetForm({ scope: $scope, formCtrl: $scope.createFolderForm, hasErrors: true });
// TODO: Handle errors
});
};
}
$scope.createDataType = function() {
$location.search('create', null);
$location.path("/" + section + "/datatypes/edit/" + node.id).search("create", "true");
navigationService.hideMenu();
};
$scope.close = function() {
const showMenu = true;
navigationService.hideDialog(showMenu);
};
}
angular.module('umbraco').controller("Umbraco.Editors.DataType.CreateController", DataTypeCreateController);
|
'''
cd zjx/DL_Traff_Graph_main/airSTTN
nohup python -u run_tstn.py > run_tstn.log 2>&1 &
'''
import sys
import os
import shutil
import math
import numpy as np
import pandas as pd
import scipy.sparse as ss
from sklearn.preprocessing import StandardScaler
from datetime import datetime
import time
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
from torchsummary import summary
import Metrics
from TSTN import *
from Param import *
cpu_num = 1
os.environ ['OMP_NUM_THREADS'] = str(cpu_num)
os.environ ['OPENBLAS_NUM_THREADS'] = str(cpu_num)
os.environ ['MKL_NUM_THREADS'] = str(cpu_num)
os.environ ['VECLIB_MAXIMUM_THREADS'] = str(cpu_num)
os.environ ['NUMEXPR_NUM_THREADS'] = str(cpu_num)
torch.set_num_threads(cpu_num)
DEVICE = 'cuda:0'
BATCHSIZE = 32
EPOCH = 200
def getXSYS(data, mode):
TRAIN_NUM = int(data.shape[0] * TRAINRATIO)
XS, YS = [], []
if mode == 'TRAIN':
for i in range(TRAIN_NUM - TIMESTEP_OUT - TIMESTEP_IN + 1):
x = data[i:i+TIMESTEP_IN, :]
y = data[i+TIMESTEP_IN:i+TIMESTEP_IN+TIMESTEP_OUT, :]
XS.append(x), YS.append(y)
elif mode == 'TEST':
for i in range(TRAIN_NUM - TIMESTEP_IN, data.shape[0] - TIMESTEP_OUT - TIMESTEP_IN + 1):
x = data[i:i+TIMESTEP_IN, :]
y = data[i+TIMESTEP_IN:i+TIMESTEP_IN+TIMESTEP_OUT, :]
XS.append(x), YS.append(y)
XS, YS = np.array(XS), np.array(YS)
XS, YS = XS[:, :, :, np.newaxis], YS[:, :, :, np.newaxis]
XS = XS.transpose(0, 3, 2, 1)
YS = YS.transpose(0, 3, 1, 2)
print(XS.shape)
print(YS.shape)
return XS, YS
def getModel(name):
# ks, kt, bs, T, n, p = 3, 3, [[CHANNEL, 16, 64], [64, 16, 64]], TIMESTEP_IN, N_NODE, 0
# A = pd.read_csv(ADJPATH).values
# W = weight_matrix(A)
# L = scaled_laplacian(W)
# Lk = cheb_poly(L, ks)
# Lk = torch.Tensor(Lk.astype(np.float32)).to(device)
# model = STGCN(ks, kt, bs, T, n, Lk, p).to(device)
# return model
### Adjacency Matrix Import
# adj_mx = pd.read_csv('../PEMSD7M/W_228.csv')#.iloc[:,1:]
# adj_mx = np.array(adj_mx)
# A = adj_mx
# A = torch.Tensor(A)
#ADJPATH = "../BEIJINGAIR/air_data/b_W_10.csv"
A = pd.read_csv(ADJPATH,nrows = 10, header= None).values
# print(ADJPATH)
# print(A)
# print(A.shape)
A = torch.Tensor(A)
### Training Hyparameter
in_channels = 1 # Channels of input
embed_size = 64 # Dimension of hidden embedding features
time_num = 288
num_layers = 2 # Number of ST Block
T_dim = 12 # Input length, should be the same as prepareData.py
output_T_dim = 12 # Output Expected length
heads = 4 # Number of Heads in MultiHeadAttention
cheb_K = 2 # Order for Chebyshev Polynomials (Eq 2)
forward_expansion = 4 # Dimension of Feed Forward Network: embed_size --> embed_size * forward_expansion --> embed_size
dropout = 0
### Construct Network
model = TSTN(
A,
in_channels,
embed_size,
time_num,
num_layers,
T_dim,
output_T_dim,
heads,
cheb_K,
forward_expansion,
dropout).to(DEVICE)
return model
def evaluateModel(model, criterion, data_iter):
model.eval()
l_sum, n = 0.0, 0
with torch.no_grad():
for x, y in data_iter:
y_pred = model(x)
l = criterion(y_pred, y)
l_sum += l.item() * y.shape[0]
n += y.shape[0]
return l_sum / n
def predictModel(model, data_iter):
YS_pred = []
model.eval()
with torch.no_grad():
for x, y in data_iter:
YS_pred_batch = model(x)
YS_pred_batch = YS_pred_batch.cpu().numpy()
YS_pred.append(YS_pred_batch)
YS_pred = np.vstack(YS_pred)
return YS_pred
def trainModel(name, mode, XS, YS):
print('Model Training Started ...', time.ctime())
print('TIMESTEP_IN, TIMESTEP_OUT', TIMESTEP_IN, TIMESTEP_OUT)
model = getModel(name)
# summary(model, (CHANNEL,N_NODE,TIMESTEP_IN), device=device)
XS_torch, YS_torch = torch.Tensor(XS).to(device), torch.Tensor(YS).to(device)
trainval_data = torch.utils.data.TensorDataset(XS_torch, YS_torch)
trainval_size = len(trainval_data)
train_size = int(trainval_size * (1-TRAINVALSPLIT))
print('XS_torch.shape: ', XS_torch.shape)
print('YS_torch.shape: ', YS_torch.shape)
train_data = torch.utils.data.Subset(trainval_data, list(range(0, train_size)))
val_data = torch.utils.data.Subset(trainval_data, list(range(train_size, trainval_size)))
train_iter = torch.utils.data.DataLoader(train_data, BATCHSIZE, shuffle=True)
val_iter = torch.utils.data.DataLoader(val_data, BATCHSIZE, shuffle=True)
min_val_loss = np.inf
wait = 0
print('LOSS is :',LOSS)
if LOSS == "MaskMAE":
criterion = Utils.masked_mae
if LOSS == 'MSE':
criterion = nn.MSELoss()
if LOSS == 'MAE':
criterion = nn.L1Loss()
if OPTIMIZER == 'RMSprop':
optimizer = torch.optim.RMSprop(model.parameters(), lr=LEARN)
if OPTIMIZER == 'Adam':
optimizer = torch.optim.Adam(model.parameters(), lr=LEARN)
for epoch in range(EPOCH):
starttime = datetime.now()
loss_sum, n = 0.0, 0
model.train()
for x, y in train_iter:
optimizer.zero_grad()
# print(x.shape)
y_pred = model(x)
loss = criterion(y_pred, y)
loss.backward()
optimizer.step()
loss_sum += loss.item() * y.shape[0]
n += y.shape[0]
train_loss = loss_sum / n
val_loss = evaluateModel(model, criterion, val_iter)
if val_loss < min_val_loss:
wait = 0
min_val_loss = val_loss
torch.save(model.state_dict(), PATH + '/' + name + '.pt')
else:
wait += 1
if wait == PATIENCE:
print('Early stopping at epoch: %d' % epoch)
break
endtime = datetime.now()
epoch_time = (endtime - starttime).seconds
print("epoch", epoch, "time used:", epoch_time," seconds ", "train loss:", train_loss, "validation loss:", val_loss)
with open(PATH + '/' + name + '_log.txt', 'a') as f:
f.write("%s, %d, %s, %d, %s, %s, %.10f, %s, %.10f\n" % ("epoch", epoch, "time used", epoch_time, "seconds", "train loss", train_loss, "validation loss:", val_loss))
torch_score = evaluateModel(model, criterion, train_iter)
YS_pred = predictModel(model, torch.utils.data.DataLoader(trainval_data, BATCHSIZE, shuffle=False))
print('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
YS, YS_pred = scaler.inverse_transform(np.squeeze(YS)), scaler.inverse_transform(np.squeeze(YS_pred))
print('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS, YS_pred)
with open(PATH + '/' + name + '_prediction_scores.txt', 'a') as f:
f.write("%s, %s, Torch MSE, %.10e, %.10f\n" % (name, mode, torch_score, torch_score))
f.write("%s, %s, MSE, RMSE, MAE, MAPE, %.10f, %.10f, %.10f, %.10f\n" % (name, mode, MSE, RMSE, MAE, MAPE))
print('*' * 40)
print("%s, %s, Torch MSE, %.10e, %.10f" % (name, mode, torch_score, torch_score))
print("%s, %s, MSE, RMSE, MAE, MAPE, %.10f, %.10f, %.10f, %.10f" % (name, mode, MSE, RMSE, MAE, MAPE))
print('Model Training Ended ...', time.ctime())
def testModel(name, mode, XS, YS):
if LOSS == "MaskMAE":
criterion = Utils.masked_mae
if LOSS == 'MSE':
criterion = nn.MSELoss()
if LOSS == 'MAE':
criterion = nn.L1Loss()
print('Model Testing Started ...', time.ctime())
print('TIMESTEP_IN, TIMESTEP_OUT', TIMESTEP_IN, TIMESTEP_OUT)
XS_torch, YS_torch = torch.Tensor(XS).to(device), torch.Tensor(YS).to(device)
test_data = torch.utils.data.TensorDataset(XS_torch, YS_torch)
test_iter = torch.utils.data.DataLoader(test_data, BATCHSIZE, shuffle=False)
model = getModel(name)
model.load_state_dict(torch.load(PATH+ '/' + name + '.pt'))
torch_score = evaluateModel(model, criterion, test_iter)
YS_pred = predictModel(model, test_iter)
print('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
YS, YS_pred = scaler.inverse_transform(np.squeeze(YS)), scaler.inverse_transform(np.squeeze(YS_pred))
print('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
np.save(PATH + '/' + MODELNAME + '_prediction.npy', YS_pred)
np.save(PATH + '/' + MODELNAME + '_groundtruth.npy', YS)
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS, YS_pred)
print('*' * 40)
print("%s, %s, Torch MSE, %.10e, %.10f" % (name, mode, torch_score, torch_score))
f = open(PATH + '/' + name + '_prediction_scores.txt', 'a')
f.write("%s, %s, Torch MSE, %.10e, %.10f\n" % (name, mode, torch_score, torch_score))
print("all pred steps, %s, %s, MSE, RMSE, MAE, MAPE, %.10f, %.10f, %.10f, %.10f" % (name, mode, MSE, RMSE, MAE, MAPE))
f.write("all pred steps, %s, %s, MSE, RMSE, MAE, MAPE, %.10f, %.10f, %.10f, %.10f\n" % (name, mode, MSE, RMSE, MAE, MAPE))
for i in range(TIMESTEP_OUT):
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS[:, i, :], YS_pred[:, i, :])
print("%d step, %s, %s, MSE, RMSE, MAE, MAPE, %.10f, %.10f, %.10f, %.10f" % (i+1, name, mode, MSE, RMSE, MAE, MAPE))
f.write("%d step, %s, %s, MSE, RMSE, MAE, MAPE, %.10f, %.10f, %.10f, %.10f\n" % (i+1, name, mode, MSE, RMSE, MAE, MAPE))
f.close()
print('Model Testing Ended ...', time.ctime())
if __name__ == "__main__":
################# Parameter Setting #######################
MODELNAME = 'TSTN'
KEYWORD = 'pred_' + DATANAME + '_' + MODELNAME + '_' + datetime.now().strftime("%y%m%d%H%M")
PATH = '../save/' + KEYWORD
torch.manual_seed(100)
torch.cuda.manual_seed(100)
np.random.seed(100)
# torch.backends.cudnn.deterministic = True
###########################################################
GPU = '0'
device = torch.device("cuda:{}".format(GPU)) if torch.cuda.is_available() else torch.device("cpu")
###########################################################
v = pd.read_csv(FLOWPATH, nrows = 35064, header= None)
v = np.array(v)
v = v.T
v = torch.tensor(v, dtype=torch.float32)
v = v.transpose(1,0)
data = v
# data = np.load(FLOWPATH,allow_pickle=True)
#data = pd.read_hdf(FLOWPATH).values
scaler = StandardScaler()
data = scaler.fit_transform(data)
print('data.shape', data.shape)
###########################################################
if not os.path.exists(PATH):
os.makedirs(PATH)
# currentPython = sys.argv[0]
# shutil.copy2(currentPython, PATH)
# shutil.copy2('STTN.py', PATH)
# shutil.copy2('Param.py', PATH)
# shutil.copy2('Param_GraphWaveNet.py', PATH)
print(KEYWORD, 'training started', time.ctime())
trainXS, trainYS = getXSYS(data, 'TRAIN')
print('TRAIN XS.shape YS,shape', trainXS.shape, trainYS.shape)
trainModel(MODELNAME, 'train', trainXS, trainYS)
print(KEYWORD, 'testing started', time.ctime())
testXS, testYS = getXSYS(data, 'TEST')
print('TEST XS.shape, YS.shape', testXS.shape, testYS.shape)
testModel(MODELNAME, 'test', testXS, testYS)
|
module.exports={A:{A:{"2":"J D E F A B xB"},B:{"1":"C K L G M N O P Q R S T U V W Z a b c d e f g h i j k l H X"},C:{"1":"0 1 2 3 4 5 6 7 8 9 t u v w x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB mB RB nB SB TB Y UB VB WB XB YB ZB aB bB cB dB eB fB gB hB P Q R oB S T U V W Z a b c d e f g h i j k l H X pB","2":"yB lB I m J D E F A B C K L G M N O n o p q r s zB 0B"},D:{"1":"DB EB FB GB HB IB JB KB LB MB NB OB PB QB mB RB nB SB TB Y UB VB WB XB YB ZB aB bB cB dB eB fB gB hB P Q R S T U V W Z a b c d e f g h i j k l H X pB 1B 2B","2":"0 1 2 3 4 5 6 7 8 9 I m J D E F A B C K L G M N O n o p q r s t u v w x y z AB BB CB"},E:{"1":"E F A B C K L G 6B 7B rB iB jB 8B 9B AC sB tB uB kB BC","2":"I m J D 3B qB 4B 5B"},F:{"1":"0 1 2 3 4 5 6 7 8 9 AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB Y UB VB WB XB YB ZB aB bB cB dB eB fB gB hB P Q R oB S T U V W","2":"F B C G M N O n o p q r s t u v w x y z CC DC EC FC iB vB GC jB"},G:{"1":"E LC MC NC OC PC QC RC SC TC UC VC WC XC YC ZC aC sB tB uB kB","2":"qB HC wB IC JC KC"},H:{"2":"bC"},I:{"1":"H","2":"lB I cC dC eC fC wB gC hC"},J:{"2":"D","16":"A"},K:{"1":"Y","2":"A B C iB vB jB"},L:{"1":"X"},M:{"1":"H"},N:{"2":"A B"},O:{"1":"iC"},P:{"1":"jC kC lC mC nC rB oC pC qC rC sC kB","2":"I"},Q:{"1":"tC"},R:{"1":"uC"},S:{"1":"vC"}},B:6,C:"Array.prototype.findIndex"};
|
import numpy as np
import pandas as pd
import sklearn.preprocessing as pp
from sklearn.compose import ColumnTransformer
from sklearn.datasets import load_iris
from sklearn.model_selection import cross_val_score, train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.neural_network import MLPClassifier
class Encode:
def __init__(self, dim_red_health, use_onehots):
self.dim_red_health = dim_red_health
self.use_onehots = use_onehots
self.__create_transformer()
def __create_transformer(self):
self.__transformers = [
("type_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [0]),
("age_min_max", pp.MinMaxScaler(), [1]),
("breed_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [2,3]),
("gender_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [4]),
("color_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [5,6,7]),
("maturity_min_max", pp.MinMaxScaler(), [8]),
("fur_min_max", pp.MinMaxScaler(), [9]),
("health_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [10]),
("quantity_std", pp.StandardScaler(), [11]),
("fee_min_max", pp.MinMaxScaler(), [12]),
("state_onehot", pp.OneHotEncoder(handle_unknown="ignore"),[13]),
("photo_std", pp.StandardScaler(), [14]),
("vaccinated_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [15]),
("dewormed_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [16]),
("sterilized_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [17])
] if not self.dim_red_health else \
[
("type_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [0]),
("age_min_max", pp.MinMaxScaler(), [1]),
("breed_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [2,3]),
("gender_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [4]),
("color_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [5,6,7]),
("maturity_min_max", pp.MinMaxScaler(), [8]),
("fur_min_max", pp.MinMaxScaler(), [9]),
# ("health_bulk_onehot", pp.StandardScaler(), [10]), PCA takes care of it
("health_onehot", pp.OneHotEncoder(handle_unknown="ignore"), [11]),
("quantity_std", pp.StandardScaler(), [12]),
("fee_min_max", pp.MinMaxScaler(), [13]),
("state_onehot", pp.OneHotEncoder(handle_unknown="ignore"),[14]),
("photo_std", pp.StandardScaler(), [15])
]
if not self.use_onehots:
self.__transformers = list(filter(lambda transform: "onehot" not in transform[0], self.__transformers))
def get_column_transformer(self):
return ColumnTransformer(self.__transformers)
def ann(train_X, train_y, test_X, test_y):
clf = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(32, 3), random_state=1)
clf.fit(train_X, train_y)
print("randomforest train acc:",clf.score(train_X,train_y))
print("randomforest test acc:",clf.score(test_X,test_y))
test_y_pred = clf.predict(test_X)
accuracy = np.mean(test_y_pred.ravel() == test_y.ravel())
print("Accuracy: " + str(accuracy))
params = {
'activation' : ['relu'],
'solver': ['sgd', 'lbfgs', 'adam'],
'batch_size': [200, 300, 400],
'random_state': [1],
'hidden_layer_sizes': [(16,3), (32, 3), (64, 3)]
}
clf_grid = GridSearchCV(MLPClassifier(), param_grid=params)
clf_grid.fit(train_X, train_y)
print("randomforest with gridsearchCV train acc:", clf_grid.score(train_X, train_y))
print("randomforest with gridsearchCV test acc:", clf_grid.score(test_X, test_y))
test_y_pred_grid = clf_grid.predict(test_X)
grid_accuracy = np.mean(test_y_pred_grid.ravel() == test_y.ravel())
print("Accuracy: " + str(grid_accuracy))
print(clf_grid.best_estimator_)
return (accuracy, grid_accuracy)
def get_data(dim_red_health=True):
df = pd.read_csv("data/train/train.csv")
reduced_df = df[[
"Type", "Age", "Breed1", "Breed2",
"Gender", "Color1", "Color2", "Color3",
"MaturitySize", "FurLength", "Vaccinated",
"Dewormed", "Sterilized", "Health",
"Quantity", "Fee", "State",
"PhotoAmt", "AdoptionSpeed"]]
if dim_red_health:
from sklearn.decomposition import PCA
reduced_df_no_dim_red = reduced_df.copy()
high_correlation_df = reduced_df[["Vaccinated", "Dewormed", "Sterilized"]]
pca = PCA(n_components=1)
pca.fit(high_correlation_df)
# Seeing the high correlation between the 3 variables, we combine them
del reduced_df["Vaccinated"]
del reduced_df["Dewormed"]
del reduced_df["Sterilized"]
reduced_df["Health Stats Bulk"] = pd.Series(pca.transform(high_correlation_df).reshape(1,-1)[0])
train_X = reduced_df[["Type", "Age", "Breed1", "Breed2", "Gender", "Color1", "Color2", "Color3", "MaturitySize", "FurLength", "Health Stats Bulk", "Health", "Quantity", "Fee", "State", "PhotoAmt"]].values
else:
train_X = reduced_df[["Type", "Age", "Breed1", "Breed2", "Gender", "Color1", "Color2", "Color3", "MaturitySize", "FurLength", "Vaccinated", "Dewormed", "Sterilized", "Health", "Quantity", "Fee", "State", "PhotoAmt"]].values
ct = Encode(dim_red_health, True).get_column_transformer()
train_X = ct.fit_transform(train_X)
train_y = reduced_df[["AdoptionSpeed"]].values
return train_X, train_y
if __name__ == "__main__":
dim_red_health = True
df = pd.read_csv("data/train/train.csv")
reduced_df = df[[
"Type", "Age", "Breed1", "Breed2",
"Gender", "Color1", "Color2", "Color3",
"MaturitySize", "FurLength", "Vaccinated",
"Dewormed", "Sterilized", "Health",
"Quantity", "Fee", "State",
"PhotoAmt", "AdoptionSpeed"]]
reduced_df[["Vaccinated", "Dewormed", "Sterilized"]].corr()
if dim_red_health:
from sklearn.decomposition import PCA
reduced_df_no_dim_red = reduced_df.copy()
high_correlation_df = reduced_df[["Vaccinated", "Dewormed", "Sterilized"]]
pca = PCA(n_components=1)
pca.fit(high_correlation_df)
# Seeing the high correlation between the 3 variables, we combine them
del reduced_df["Vaccinated"]
del reduced_df["Dewormed"]
del reduced_df["Sterilized"]
reduced_df["Health Stats Bulk"] = pd.Series(pca.transform(high_correlation_df).reshape(1,-1)[0])
train_X = reduced_df[["Type", "Age", "Breed1", "Breed2", "Gender", "Color1", "Color2", "Color3", "MaturitySize", "FurLength", "Health Stats Bulk", "Health", "Quantity", "Fee", "State", "PhotoAmt"]].values
ct = Encode(dim_red_health, True).get_column_transformer()
train_X = ct.fit_transform(train_X)
train_y = reduced_df[["AdoptionSpeed"]].values
#y_encoder = pp.OneHotEncoder(handle_unknown="ignore")
#train_y = y_encoder.fit_transform(train_y).toarray()
train_X, test_X, train_y, test_y = train_test_split(train_X, train_y, test_size=0.2)
feature_num = train_X.shape[-1]
record_num = train_X.shape[0]
n_classes = np.unique(train_y).shape[0]
ann(train_X, train_y, test_X, test_y)
|
import React from 'react';
import classnames from 'classnames';
var IonView = React.createClass({
propTypes: {
customClasses: React.PropTypes.string
},
getDefaultProps: function() {
return {
customClasses: ''
};
},
contextTypes: {
ionSetTransitionDirection: React.PropTypes.func
},
componentWillUnmount: function() {
if (this.context.ionSetTransitionDirection) {
this.context.ionSetTransitionDirection('forward');
}
},
render() {
var classes = classnames(
{'view': true,
'nav-view-stage': true},
this.props.customClasses
);
return (
<div className={ classes } >
{ this.props.children }
</div>
);
}
});
export default IonView;
|
// region import
import fs from 'fs'
// internal
import * as session from './session'
// endregion
// region routes
export default ({api, config}) => ({
name: 'web-api',
routes: [
['get', 'v1/session', ['headers'], session.verify(api)],
['post', 'v1/session', ['headers', 'body'], session.signIn(api)],
['delete', 'v1/session', ['headers'], session.remove(api)]
]
})
// endregion
|
from twython import TwythonStreamer
from urllib import request
import base64
import os
import subprocess
import json
import sys
import datetime
import hashlib
"""
скрипт слушает ленту пользователя, переданного через credentials.py,
печатает в консоль дату и текст твита и результат обработки медиа.
Обработка медиа заключается в том, что, если медиа есть,
создаётся каталог с именем id твита, туда кладутся картинки с названиями,
равными base64 от url источника картинки(так для каждой картинки),
создаётся подкаталог с названием равным base64 от url прикреплённой к твиту ссылки,
туда выкачивается полный вариант страницы, со структурой каталогов, соответствующей пути к странице в url
(и так для каждой ссылки в твите).
"""
class SaverStreamer(TwythonStreamer):
def on_success(self, data):
if 'text' in data:
handle_new_tweet(data)
def on_error(self, status_code, data):
print(status_code, data)
try:
import Credentials
except Exception:
Credentials = None
printed_keys = ['created_at', 'text']
prefix = Credentials.PREFIX
def handle_entities(entities, tweet_id):
if 'urls' in entities:
if len(entities['urls']) > 0:
print('Urls:')
for url_entry in entities['urls']:
url = url_entry['expanded_url']
try:
subprocess.call(['wget', '-q', '--user-agent="User-Agent: Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.12) Gecko/20101026 Firefox/3.6.12"', '-e', 'robots=off', '--wait=0.1', '--random-wait', '-E', '-H', '-k', '-K', '-p', '-np', '-l 1', '-P', prefix + tweet_id + '/' +
hashlib.md5(url.encode()).hexdigest() + '/', url])
print('Url downloaded: ', url)
except:
print('Cannot download url: ', url)
if 'media' in entities:
if len(entities['media']) > 0:
print('Media:')
for media in entities['media']:
if 'type' in media and media['type'] == 'photo':
media_url = media['media_url_https']
try:
request.urlretrieve(media_url,
prefix + tweet_id + '/' +
base64.b64encode(
bytes(media_url, "utf-8")).decode("ascii")
+ os.path.splitext(media_url)[-1])
print('Image downloaded: ', media_url)
except:
print('Cannot download: ', media_url)
def handle_new_tweet(tweet_data):
# 2015-02-11 15:35:10 +0000
try:
date = datetime.datetime.strptime(
tweet_data['created_at'], '%Y-%m-%d %H:%M:%S %z')
except:
date = datetime.datetime.strptime(
tweet_data['created_at'], '%a %b %d %H:%M:%S %z %Y')
tweet_id = date.strftime('%Y-%m-%d_%H.%M.%S') + \
"___" + str(tweet_data['id'])
for key in printed_keys:
print(key, ': ', tweet_data[key])
print('')
if not os.path.exists(prefix + tweet_id):
os.makedirs(prefix + tweet_id)
with open(prefix + tweet_id + '/data.json', 'a') as the_file:
the_file.write(json.dumps(tweet_data, ensure_ascii=False))
if 'entities' in tweet_data:
handle_entities(tweet_data['entities'], tweet_id)
if 'quoted_status' in tweet_data:
q = tweet_data['quoted_status']
if 'entities' in q:
print(q['entities'])
handle_entities(q['entities'], tweet_id)
print('\n\n')
# trying to parse local data if present
if len(sys.argv) > 1:
import execjs
arch_dir_path = sys.argv[1]
print("Processing twitter archive: ", arch_dir_path)
with open(arch_dir_path + '/data/js/tweet_index.js', 'r') as the_file:
tw_index_str = the_file.read()
ctx = execjs.compile(tw_index_str)
tweet_index = ctx.eval("tweet_index")
for meta_data in tweet_index:
file_name = arch_dir_path + '/' + meta_data['file_name']
var_name = meta_data['var_name']
with open(file_name, 'r') as m_file:
tw_a_str = m_file.read()
ctx_m = execjs.compile(
'var Grailbird = {}; Grailbird["data"] = {};' + tw_a_str)
tw_list = ctx_m.eval("Grailbird.data." + var_name)
for tweet in tw_list:
handle_new_tweet(tweet)
else:
APP_KEY = ''
APP_SECRET = ''
OAUTH_TOKEN = ''
OAUTH_TOKEN_SECRET = ''
if Credentials is not None:
APP_KEY = Credentials.APP_KEY
APP_SECRET = Credentials.APP_SECRET
OAUTH_TOKEN = Credentials.OAUTH_TOKEN
OAUTH_TOKEN_SECRET = Credentials.OAUTH_TOKEN_SECRET
stream = SaverStreamer(
APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
stream.user(**{'with': 'user'})
else:
print('No credentials – no tweets')
|
#pragma once
#include "EdgesDetector.h"
class DFTFilter : public EdgesDetector {
public:
DFTFilter() { filterSize_ = 32; };
DFTFilter(int filterSize) : filterSize_(filterSize) {};
virtual void detect(cv::Mat& inputImage, cv::Mat& outputImage);
private:
int filterSize_;
void dftShift(cv::Mat& i, cv::Mat& outI);
void calcMS(cv::Mat& i, cv::Mat& magI);
void synthesizeFilter(cv::Mat& H, cv::Size size, int filterSize);
void createComplexImage(const cv::Mat& i, cv::Mat& complexImage);
};
|
var class_puck_detector_builder_1_1_puck_detector_builder =
[
[ "__init__", "class_puck_detector_builder_1_1_puck_detector_builder.html#a3ad605fef8650da4d031e7ff9bf3caa8", null ],
[ "build", "class_puck_detector_builder_1_1_puck_detector_builder.html#ab04534170dcf872cd34b6fcbcbecd876", null ],
[ "m_broadcaster", "class_puck_detector_builder_1_1_puck_detector_builder.html#a310292569418863fb37ab05a06d51344", null ],
[ "m_camera", "class_puck_detector_builder_1_1_puck_detector_builder.html#a264a331e37c8e5c23b47af309022c009", null ],
[ "m_mode", "class_puck_detector_builder_1_1_puck_detector_builder.html#ae8cceaee1abb1ba8a7e2a4e2c10384dd", null ],
[ "m_path", "class_puck_detector_builder_1_1_puck_detector_builder.html#a5b2c5b13d376b722d381f9be62c2949b", null ],
[ "m_reconfigure", "class_puck_detector_builder_1_1_puck_detector_builder.html#aa615fc1a3e1a9211ab814ad9334e070b", null ]
];
|
/*
* Copyright (C) 2014 NVIDIA CORPORATION. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*/
#include <linux/clk.h>
#include <linux/interrupt.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/of.h>
#include <linux/platform_device.h>
#include <linux/slab.h>
#include <linux/sort.h>
#include <soc/tegra/fuse.h>
#include "mc.h"
#define MC_INTSTATUS 0x000
#define MC_INTMASK 0x004
#define MC_ERR_STATUS 0x08
#define MC_ERR_STATUS_TYPE_SHIFT 28
#define MC_ERR_STATUS_TYPE_INVALID_SMMU_PAGE (6 << MC_ERR_STATUS_TYPE_SHIFT)
#define MC_ERR_STATUS_TYPE_MASK (0x7 << MC_ERR_STATUS_TYPE_SHIFT)
#define MC_ERR_STATUS_READABLE (1 << 27)
#define MC_ERR_STATUS_WRITABLE (1 << 26)
#define MC_ERR_STATUS_NONSECURE (1 << 25)
#define MC_ERR_STATUS_ADR_HI_SHIFT 20
#define MC_ERR_STATUS_ADR_HI_MASK 0x3
#define MC_ERR_STATUS_SECURITY (1 << 17)
#define MC_ERR_STATUS_RW (1 << 16)
#define MC_ERR_ADR 0x0c
#define MC_EMEM_ARB_CFG 0x90
#define MC_EMEM_ARB_CFG_CYCLES_PER_UPDATE(x) (((x) & 0x1ff) << 0)
#define MC_EMEM_ARB_CFG_CYCLES_PER_UPDATE_MASK 0x1ff
#define MC_EMEM_ARB_MISC0 0xd8
#define MC_EMEM_ADR_CFG 0x54
#define MC_EMEM_ADR_CFG_EMEM_NUMDEV BIT(0)
static const struct of_device_id tegra_mc_of_match[] = {
#ifdef CONFIG_ARCH_TEGRA_3x_SOC
{ .compatible = "nvidia,tegra30-mc", .data = &tegra30_mc_soc },
#endif
#ifdef CONFIG_ARCH_TEGRA_114_SOC
{ .compatible = "nvidia,tegra114-mc", .data = &tegra114_mc_soc },
#endif
#ifdef CONFIG_ARCH_TEGRA_124_SOC
{ .compatible = "nvidia,tegra124-mc", .data = &tegra124_mc_soc },
#endif
#ifdef CONFIG_ARCH_TEGRA_132_SOC
{ .compatible = "nvidia,tegra132-mc", .data = &tegra132_mc_soc },
#endif
#ifdef CONFIG_ARCH_TEGRA_210_SOC
{ .compatible = "nvidia,tegra210-mc", .data = &tegra210_mc_soc },
#endif
{ }
};
MODULE_DEVICE_TABLE(of, tegra_mc_of_match);
static int tegra_mc_setup_latency_allowance(struct tegra_mc *mc)
{
unsigned long long tick;
unsigned int i;
u32 value;
/* compute the number of MC clock cycles per tick */
tick = mc->tick * clk_get_rate(mc->clk);
do_div(tick, NSEC_PER_SEC);
value = readl(mc->regs + MC_EMEM_ARB_CFG);
value &= ~MC_EMEM_ARB_CFG_CYCLES_PER_UPDATE_MASK;
value |= MC_EMEM_ARB_CFG_CYCLES_PER_UPDATE(tick);
writel(value, mc->regs + MC_EMEM_ARB_CFG);
/* write latency allowance defaults */
for (i = 0; i < mc->soc->num_clients; i++) {
const struct tegra_mc_la *la = &mc->soc->clients[i].la;
u32 value;
value = readl(mc->regs + la->reg);
value &= ~(la->mask << la->shift);
value |= (la->def & la->mask) << la->shift;
writel(value, mc->regs + la->reg);
}
return 0;
}
void tegra_mc_write_emem_configuration(struct tegra_mc *mc, unsigned long rate)
{
unsigned int i;
struct tegra_mc_timing *timing = NULL;
for (i = 0; i < mc->num_timings; i++) {
if (mc->timings[i].rate == rate) {
timing = &mc->timings[i];
break;
}
}
if (!timing) {
dev_err(mc->dev, "no memory timing registered for rate %lu\n",
rate);
return;
}
for (i = 0; i < mc->soc->num_emem_regs; ++i)
mc_writel(mc, timing->emem_data[i], mc->soc->emem_regs[i]);
}
unsigned int tegra_mc_get_emem_device_count(struct tegra_mc *mc)
{
u8 dram_count;
dram_count = mc_readl(mc, MC_EMEM_ADR_CFG);
dram_count &= MC_EMEM_ADR_CFG_EMEM_NUMDEV;
dram_count++;
return dram_count;
}
static int load_one_timing(struct tegra_mc *mc,
struct tegra_mc_timing *timing,
struct device_node *node)
{
int err;
u32 tmp;
err = of_property_read_u32(node, "clock-frequency", &tmp);
if (err) {
dev_err(mc->dev,
"timing %s: failed to read rate\n", node->name);
return err;
}
timing->rate = tmp;
timing->emem_data = devm_kcalloc(mc->dev, mc->soc->num_emem_regs,
sizeof(u32), GFP_KERNEL);
if (!timing->emem_data)
return -ENOMEM;
err = of_property_read_u32_array(node, "nvidia,emem-configuration",
timing->emem_data,
mc->soc->num_emem_regs);
if (err) {
dev_err(mc->dev,
"timing %s: failed to read EMEM configuration\n",
node->name);
return err;
}
return 0;
}
static int load_timings(struct tegra_mc *mc, struct device_node *node)
{
struct device_node *child;
struct tegra_mc_timing *timing;
int child_count = of_get_child_count(node);
int i = 0, err;
mc->timings = devm_kcalloc(mc->dev, child_count, sizeof(*timing),
GFP_KERNEL);
if (!mc->timings)
return -ENOMEM;
mc->num_timings = child_count;
for_each_child_of_node(node, child) {
timing = &mc->timings[i++];
err = load_one_timing(mc, timing, child);
if (err) {
of_node_put(child);
return err;
}
}
return 0;
}
static int tegra_mc_setup_timings(struct tegra_mc *mc)
{
struct device_node *node;
u32 ram_code, node_ram_code;
int err;
ram_code = tegra_read_ram_code();
mc->num_timings = 0;
for_each_child_of_node(mc->dev->of_node, node) {
err = of_property_read_u32(node, "nvidia,ram-code",
&node_ram_code);
if (err || (node_ram_code != ram_code))
continue;
err = load_timings(mc, node);
of_node_put(node);
if (err)
return err;
break;
}
if (mc->num_timings == 0)
dev_warn(mc->dev,
"no memory timings for RAM code %u registered\n",
ram_code);
return 0;
}
static const char *const status_names[32] = {
[ 1] = "External interrupt",
[ 6] = "EMEM address decode error",
[ 8] = "Security violation",
[ 9] = "EMEM arbitration error",
[10] = "Page fault",
[11] = "Invalid APB ASID update",
[12] = "VPR violation",
[13] = "Secure carveout violation",
[16] = "MTS carveout violation",
};
static const char *const error_names[8] = {
[2] = "EMEM decode error",
[3] = "TrustZone violation",
[4] = "Carveout violation",
[6] = "SMMU translation error",
};
static irqreturn_t tegra_mc_irq(int irq, void *data)
{
struct tegra_mc *mc = data;
unsigned long status;
unsigned int bit;
/* mask all interrupts to avoid flooding */
status = mc_readl(mc, MC_INTSTATUS) & mc->soc->intmask;
if (!status)
return IRQ_NONE;
for_each_set_bit(bit, &status, 32) {
const char *error = status_names[bit] ?: "unknown";
const char *client = "unknown", *desc;
const char *direction, *secure;
phys_addr_t addr = 0;
unsigned int i;
char perm[7];
u8 id, type;
u32 value;
value = mc_readl(mc, MC_ERR_STATUS);
#ifdef CONFIG_PHYS_ADDR_T_64BIT
if (mc->soc->num_address_bits > 32) {
addr = ((value >> MC_ERR_STATUS_ADR_HI_SHIFT) &
MC_ERR_STATUS_ADR_HI_MASK);
addr <<= 32;
}
#endif
if (value & MC_ERR_STATUS_RW)
direction = "write";
else
direction = "read";
if (value & MC_ERR_STATUS_SECURITY)
secure = "secure ";
else
secure = "";
id = value & mc->soc->client_id_mask;
for (i = 0; i < mc->soc->num_clients; i++) {
if (mc->soc->clients[i].id == id) {
client = mc->soc->clients[i].name;
break;
}
}
type = (value & MC_ERR_STATUS_TYPE_MASK) >>
MC_ERR_STATUS_TYPE_SHIFT;
desc = error_names[type];
switch (value & MC_ERR_STATUS_TYPE_MASK) {
case MC_ERR_STATUS_TYPE_INVALID_SMMU_PAGE:
perm[0] = ' ';
perm[1] = '[';
if (value & MC_ERR_STATUS_READABLE)
perm[2] = 'R';
else
perm[2] = '-';
if (value & MC_ERR_STATUS_WRITABLE)
perm[3] = 'W';
else
perm[3] = '-';
if (value & MC_ERR_STATUS_NONSECURE)
perm[4] = '-';
else
perm[4] = 'S';
perm[5] = ']';
perm[6] = '\0';
break;
default:
perm[0] = '\0';
break;
}
value = mc_readl(mc, MC_ERR_ADR);
addr |= value;
dev_err_ratelimited(mc->dev, "%s: %s%s @%pa: %s (%s%s)\n",
client, secure, direction, &addr, error,
desc, perm);
}
/* clear interrupts */
mc_writel(mc, status, MC_INTSTATUS);
return IRQ_HANDLED;
}
static int tegra_mc_probe(struct platform_device *pdev)
{
const struct of_device_id *match;
struct resource *res;
struct tegra_mc *mc;
int err;
match = of_match_node(tegra_mc_of_match, pdev->dev.of_node);
if (!match)
return -ENODEV;
mc = devm_kzalloc(&pdev->dev, sizeof(*mc), GFP_KERNEL);
if (!mc)
return -ENOMEM;
platform_set_drvdata(pdev, mc);
mc->soc = match->data;
mc->dev = &pdev->dev;
/* length of MC tick in nanoseconds */
mc->tick = 30;
res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mc->regs = devm_ioremap_resource(&pdev->dev, res);
if (IS_ERR(mc->regs))
return PTR_ERR(mc->regs);
mc->clk = devm_clk_get(&pdev->dev, "mc");
if (IS_ERR(mc->clk)) {
dev_err(&pdev->dev, "failed to get MC clock: %ld\n",
PTR_ERR(mc->clk));
return PTR_ERR(mc->clk);
}
err = tegra_mc_setup_latency_allowance(mc);
if (err < 0) {
dev_err(&pdev->dev, "failed to setup latency allowance: %d\n",
err);
return err;
}
err = tegra_mc_setup_timings(mc);
if (err < 0) {
dev_err(&pdev->dev, "failed to setup timings: %d\n", err);
return err;
}
mc->irq = platform_get_irq(pdev, 0);
if (mc->irq < 0) {
dev_err(&pdev->dev, "interrupt not specified\n");
return mc->irq;
}
err = devm_request_irq(&pdev->dev, mc->irq, tegra_mc_irq, IRQF_SHARED,
dev_name(&pdev->dev), mc);
if (err < 0) {
dev_err(&pdev->dev, "failed to request IRQ#%u: %d\n", mc->irq,
err);
return err;
}
WARN(!mc->soc->client_id_mask, "Missing client ID mask for this SoC\n");
mc_writel(mc, mc->soc->intmask, MC_INTMASK);
return 0;
}
static struct platform_driver tegra_mc_driver = {
.driver = {
.name = "tegra-mc",
.of_match_table = tegra_mc_of_match,
.suppress_bind_attrs = true,
},
.prevent_deferred_probe = true,
.probe = tegra_mc_probe,
};
static int tegra_mc_init(void)
{
return platform_driver_register(&tegra_mc_driver);
}
arch_initcall(tegra_mc_init);
MODULE_AUTHOR("Thierry Reding <treding@nvidia.com>");
MODULE_DESCRIPTION("NVIDIA Tegra Memory Controller driver");
MODULE_LICENSE("GPL v2");
|
var benchmark_2_n_e_o_n_2_color_convert_8cpp =
[
[ "NEColorConvertFixture", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#ab8fb396866bcf9936ff357f16a871325", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a4adf2c31c9668a635ba7a16620eac36e", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a177a877f56d3311b7f4bd4d7f24b19fc", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a0de424b12484ba2cad2fb4aa5e0a840a", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a8f7df98385513ab246a09f26ec53f7d8", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#abb0f8798014e59e07214939e62a88172", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a1d06c39d80a4ed2e52ce9f071760b5f0", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a0d0a8409b71d2cd895507345570ab736", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#abda62451c0731d19b121142c69d575b6", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a87a623a31a7e1be53583999dbd81b0ec", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#abc0656cf27fc23a2b77a2a37eacb2c27", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#ac9fee2e9fde80ef64eeab5295d49198f", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#abdec8ccdaf630613d37cd4923c98093f", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#a46f8d142c65bbea99e44da409807c761", null ],
[ "REGISTER_FIXTURE_DATA_TEST_CASE", "benchmark_2_n_e_o_n_2_color_convert_8cpp.xhtml#aa642990b61d14397ac4bfa6900f98188", null ]
];
|
# -*- coding: utf-8 -*-
"""
test_ext_inheritance_diagram
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test sphinx.ext.inheritance_diagram extension.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
import pytest
from sphinx.ext.inheritance_diagram import InheritanceException, import_classes
@pytest.mark.sphinx('html', testroot='ext-inheritance_diagram')
@pytest.mark.usefixtures('if_graphviz_found')
def test_inheritance_diagram_html(app, status, warning):
app.builder.build_all()
content = (app.outdir / 'index.html').text()
pattern = ('<div class="figure" id="id1">\n'
'<img src="_images/inheritance-\\w+.png" alt="Inheritance diagram of test.Foo" '
'class="inheritance"/>\n<p class="caption"><span class="caption-text">'
'Test Foo!</span><a class="headerlink" href="#id1" '
'title="Permalink to this image">\xb6</a></p>')
assert re.search(pattern, content, re.M)
@pytest.mark.sphinx('latex', testroot='ext-inheritance_diagram')
@pytest.mark.usefixtures('if_graphviz_found')
def test_inheritance_diagram_latex(app, status, warning):
app.builder.build_all()
content = (app.outdir / 'Python.tex').text()
pattern = ('\\\\begin{figure}\\[htbp]\n\\\\centering\n\\\\capstart\n\n'
'\\\\includegraphics{inheritance-\\w+.pdf}\n'
'\\\\caption{Test Foo!}\\\\label{\\\\detokenize{index:id1}}\\\\end{figure}')
assert re.search(pattern, content, re.M)
@pytest.mark.sphinx('html', testroot='ext-inheritance_diagram',
srcdir='ext-inheritance_diagram-alias')
@pytest.mark.usefixtures('if_graphviz_found')
def test_inheritance_diagram_latex_alias(app, status, warning):
app.config.inheritance_alias = {'test.Foo': 'alias.Foo'}
app.builder.build_all()
doc = app.env.get_and_resolve_doctree('index', app)
aliased_graph = doc.children[0].children[3]['graph'].class_info
assert len(aliased_graph) == 3
assert ('test.Baz', 'test.Baz', ['test.Bar'], None) in aliased_graph
assert ('test.Bar', 'test.Bar', ['alias.Foo'], None) in aliased_graph
assert ('alias.Foo', 'alias.Foo', [], None) in aliased_graph
content = (app.outdir / 'index.html').text()
pattern = ('<div class="figure" id="id1">\n'
'<img src="_images/inheritance-\\w+.png" alt="Inheritance diagram of test.Foo" '
'class="inheritance"/>\n<p class="caption"><span class="caption-text">'
'Test Foo!</span><a class="headerlink" href="#id1" '
'title="Permalink to this image">\xb6</a></p>')
assert re.search(pattern, content, re.M)
def test_import_classes(rootdir):
from sphinx.application import Sphinx, TemplateBridge
from sphinx.util.i18n import CatalogInfo
try:
sys.path.append(rootdir / 'test-ext-inheritance_diagram')
from example.sphinx import DummyClass
# got exception for unknown class or module
with pytest.raises(InheritanceException):
import_classes('unknown', None)
with pytest.raises(InheritanceException):
import_classes('unknown.Unknown', None)
# a module having no classes
classes = import_classes('sphinx', None)
assert classes == []
classes = import_classes('sphinx', 'foo')
assert classes == []
# all of classes in the module
classes = import_classes('sphinx.application', None)
assert set(classes) == set([Sphinx, TemplateBridge])
# specified class in the module
classes = import_classes('sphinx.application.Sphinx', None)
assert classes == [Sphinx]
# specified class in current module
classes = import_classes('Sphinx', 'sphinx.application')
assert classes == [Sphinx]
# relative module name to current module
classes = import_classes('i18n.CatalogInfo', 'sphinx.util')
assert classes == [CatalogInfo]
# got exception for functions
with pytest.raises(InheritanceException):
import_classes('encode_uri', 'sphinx.util')
# import submodule on current module (refs: #3164)
classes = import_classes('sphinx', 'example')
assert classes == [DummyClass]
finally:
sys.path.pop()
|
#!/usr/bin/env python3
import os
import sys
min_python_version = [3,6,0]
for i,v in enumerate(min_python_version):
if sys.version_info[i] < v:
print("Randomizer requires at least version 3.6 and you are using %s" % '.'.join([str(i) for i in sys.version_info[0:3]]))
exit(1)
if sys.version_info[i] > v:
break
import subprocess
import shutil
import webbrowser
from Utils import local_path, data_path, check_python_version, compare_version, VersionError
from SettingsToJson import CreateJSON
def guiMain():
try:
version_check("Node", "8.0.0", "https://nodejs.org/en/download/")
version_check("NPM", "3.5.2", "https://nodejs.org/en/download/")
except VersionError as ex:
print(ex.args[0])
webbrowser.open(ex.args[1])
return
web_version = '--web' in sys.argv
if '--skip-settingslist' not in sys.argv:
CreateJSON(data_path('generated/settings_list.json'), web_version)
args = ["node", "run.js", "release", "python", sys.executable]
subprocess.Popen(args,shell=False,cwd=local_path("GUI"))
def version_check(name, version, URL):
try:
process = subprocess.Popen([shutil.which(name.lower()), "--version"], stdout=subprocess.PIPE)
except Exception as ex:
raise VersionError('{name} is not installed. Please install {name} {version} or later'.format(name=name, version=version), URL)
while True:
line = str(process.stdout.readline().strip(), 'UTF-8')
if line == '':
break
if compare_version(line, version) < 0:
raise VersionError('{name} {version} or later is required but you are using {line}'.format(name=name, version=version, line=line), URL)
print('Using {name} {line}'.format(name=name, line=line))
if __name__ == '__main__':
guiMain()
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_THREAD_DEFS_H
#define ANDROID_THREAD_DEFS_H
#include "graphics.h"
#if defined(__cplusplus)
extern "C" {
#endif
enum {
/*
* ***********************************************
* ** Keep in sync with android.os.Process.java **
* ***********************************************
*
* This maps directly to the "nice" priorities we use in Android.
* A thread priority should be chosen inverse-proportionally to
* the amount of work the thread is expected to do. The more work
* a thread will do, the less favorable priority it should get so that
* it doesn't starve the system. Threads not behaving properly might
* be "punished" by the kernel.
* Use the levels below when appropriate. Intermediate values are
* acceptable, preferably use the {MORE|LESS}_FAVORABLE constants below.
*/
ANDROID_PRIORITY_LOWEST = 19,
/* use for background tasks */
ANDROID_PRIORITY_BACKGROUND = 10,
/* most threads run at normal priority */
ANDROID_PRIORITY_NORMAL = 0,
/* threads currently running a UI that the user is interacting with */
ANDROID_PRIORITY_FOREGROUND = -2,
/* the main UI thread has a slightly more favorable priority */
ANDROID_PRIORITY_DISPLAY = -4,
/* ui service treads might want to run at a urgent display (uncommon) */
ANDROID_PRIORITY_URGENT_DISPLAY = HAL_PRIORITY_URGENT_DISPLAY,
/* all normal audio threads */
ANDROID_PRIORITY_AUDIO = -16,
/* service audio threads (uncommon) */
ANDROID_PRIORITY_URGENT_AUDIO = -19,
/* should never be used in practice. regular process might not
* be allowed to use this level */
ANDROID_PRIORITY_HIGHEST = -20,
// BEGIN Motorola, IKJBXLINE-9555, rknize2, 05/10/2013
/* Because of the way Android munges the policy with the priority, handle
* RT as a special case. Actual RT priority is set using a different API */
ANDROID_PRIORITY_REALTIME = -21,
// END Motorola, IKJBXLINE-9555
ANDROID_PRIORITY_DEFAULT = ANDROID_PRIORITY_NORMAL,
ANDROID_PRIORITY_MORE_FAVORABLE = -1,
ANDROID_PRIORITY_LESS_FAVORABLE = +1,
};
#if defined(__cplusplus)
}
#endif
#endif /* ANDROID_THREAD_DEFS_H */
|
# -*- coding: utf-8 -*-
from datetime import timedelta
from odoo import models, fields, api, exceptions
class Course(models.Model):
_name = 'academy.course'
_description = 'Academy Courses'
name = fields.Char(string="Title", required=True)
description = fields.Text()
responsible_id = fields.Many2one('res.users',
ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many(
'academy.session', 'course_id', string="Sessions")
@api.multi
def copy(self, default=None):
default = dict(default or {})
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.depends('value')
def _value_pc(self):
self.value2 = float(self.value) / 100
class Session(models.Model):
_name = 'academy.session'
_description = "Academy Sessions"
name = fields.Char(required=True)
start_date = fields.Date(default=fields.Date.today)
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
active = fields.Boolean(default=True)
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('academy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
taken_seats = fields.Float(string="Taken seats", compute='_taken_seats')
end_date = fields.Date(string="End Date", store=True,
compute='_get_end_date', inverse='_set_end_date')
hours = fields.Float(string="Duration in hours",
compute='_get_hours', inverse='_set_hours')
attendees_count = fields.Integer(
string="Attendees count", compute='_get_attendees_count', store=True)
@api.depends('seats', 'attendee_ids')
def _taken_seats(self):
for r in self:
if not r.seats:
r.taken_seats = 0.0
else:
r.taken_seats = 100.0 * len(r.attendee_ids) / r.seats
@api.onchange('seats', 'attendee_ids')
def _verify_valid_seats(self):
if self.seats < 0:
return {
'warning': {
'title': "Incorrect 'seats' value",
'message': "The number of available seats may not be negative",
},
}
if self.seats < len(self.attendee_ids):
return {
'warning': {
'title': "Too many attendees",
'message': "Increase seats or remove excess attendees",
},
}
@api.depends('start_date', 'duration')
def _get_end_date(self):
for r in self:
if not (r.start_date and r.duration):
r.end_date = r.start_date
continue
# Add duration to start_date, but: Monday + 5 days = Saturday, so
# subtract one second to get on Friday instead
duration = timedelta(days=r.duration, seconds=-1)
r.end_date = r.start_date + duration
def _set_end_date(self):
for r in self:
if not (r.start_date and r.end_date):
continue
# Compute the difference between dates, but: Friday - Monday = 4 days,
# so add one day to get 5 days instead
r.duration = (r.end_date - r.start_date).days + 1
@api.depends('duration')
def _get_hours(self):
for r in self:
r.hours = r.duration * 24
def _set_hours(self):
for r in self:
r.duration = r.hours / 24
@api.depends('attendee_ids')
def _get_attendees_count(self):
for r in self:
r.attendees_count = len(r.attendee_ids)
@api.constrains('instructor_id', 'attendee_ids')
def _check_instructor_not_in_attendees(self):
for r in self:
if r.instructor_id and r.instructor_id in r.attendee_ids:
raise exceptions.ValidationError("A session's instructor can't be an attendee")
|
import React from "react"
import { useStaticQuery,graphql } from "gatsby"
import Img from "gatsby-image"
import { navigate } from '@reach/router';
import { Navbar, Nav } from 'react-bootstrap';
import "./style.scss";
export const Footer = () => {
return (
<footer className="footer bg-primary text-white py-4">
<div className="container row mx-auto">
<div className="col-4 col-md-3 text-left">
<a href="https://www.beaconcouncil.com/terms-conditions/" target="_blank" rel="noopener noreferrer">Legal</a>
</div>
<div className="col-4 col-md-6 text-center made-miami">Made in Miami 🌴</div>
<div className="col-4 col-md-3 text-right">{new Date().getFullYear()} MiamiTech.Works</div>
</div>
</footer>
)
}
|
const ENV = {
development: {
router: './routes/dev', // 测试
maxAge: 0, // 缓存时间
},
production: {
router: './routes/prod', // 正式
maxAge: 1000 * 60 * 60 * 24 * 30, // 缓存时间
}
};
module.exports = ENV[process.env.NODE_ENV];
|
import React, { useEffect, useState } from 'react'
import ReactPaginate from 'react-paginate';
import ContactItem from './Contacts.ListItem';
import { Link } from 'react-router-dom';
import { getAllCategories } from '../../services/category.service'
import { FiArrowDown, FiArrowUp, FiChevronDown, FiSearch } from 'react-icons/fi';
import { getAllContacts, fetchContactTypes } from '../../services/contacts.services';
import Spinner from '../../components/core/Spinner';
import { getCurrentUser } from '../../utils/user';
function EventList({match}) {
const initalSearchParams = {
"supplierId" : "",
"firstName" : "",
"lastName" : "",
"storeId" : getCurrentUser().storeId,
"contactTypeId" : "",
"email" : "",
"limit" : "",
"offset" : "",
"sortCol" : "FirstName",
"sortOrder" : "DESC"
}
const [ loading, setLoading ] = useState(true);
const [ categoryOptions, setOptions ] = useState([]);
const [ contacts, setContacts ] = useState([]);
const [ searchParams, setSearchParams ] = useState(initalSearchParams);
const [ contactTypes, setContactTypes ] = useState([]);
const [ listAttributes, setListAttributes ] = useState({ pageNumber : 0, pageSize: 5, totalPages: 0 })
const handleSort = async (e) => {
const column = e.target.getAttribute("column");
if(searchParams.sortCol === column) {
setSearchParams({...searchParams, sortOrder: (searchParams.sortOrder === "ASC" ? "DESC" : "ASC")});
}
else setSearchParams({...searchParams, sortCol: column, sortOrder: searchParams.sortOrder });
fetchAllData();
}
const handleSearch = () => {
fetchAllData();
}
const handleReset = () => {
getAllContacts();
}
useEffect(() => {
console.log("Contacts", contacts);
}, [contacts])
const fetchAllData = async () => {
try {
setLoading(true);
const contactTypes = await fetchContactTypes();
const categories = await getAllCategories();
const allContacts = await getAllContacts(searchParams);
const paginationData = allContacts.pageDetails;
console.log("contactTypes", contactTypes);
console.log("categories", categories);
console.log("allContacts", allContacts);
console.log("paginationData", paginationData);
setContactTypes(contactTypes);
setOptions(categories.data);
setContacts(allContacts);
setListAttributes({
pageNumber : paginationData.pageNumber,
pageSize: paginationData.pageSize,
pageCount: paginationData.totalPages
})
}
catch (error) {
console.log(error.message);
}
finally {
setLoading(false);
}
}
useEffect( () => {
fetchAllData();
setSearchParams({ ...searchParams, storeId: getCurrentUser().storeId });
}, [])
return (
<div className="settings-wrapper">
<div className="list-controls">
<h1 className="admin-title">Contacts</h1>
<Link to={`${match.path}new`} style={{float:'right'}}>
<button className="button is-solid accent-button">New Contact</button>
</Link>
</div>
<div className="list-controls justify-content-center">
<div className="d-flex flex-row">
<div className="small-input">
<input
className="input is-rounded"
type="text"
placeholder="First Name"
name="firstName"
onChange={(e) => setSearchParams({...searchParams, [e.target.name]: e.target.value })}
/>
<div className="search-icon">
<FiSearch />
</div>
</div>
<div className="small-input">
<input
className="input is-rounded"
type="text"
placeholder="Last Name"
name="lastName"
onChange={(e) => setSearchParams({...searchParams, [e.target.name]: e.target.value })}
/>
<div className="search-icon">
<FiSearch />
</div>
</div>
<div className="small-input">
<select
className="input is-rounded"
type="text"
name="contactTypeId"
onChange={(e) => setSearchParams({...searchParams, [e.target.name]: e.target.value })}
style={{paddingLeft:'30px', textAlign: 'center'}}
>
<option disabled selected value>Select Type</option>
{contactTypes.map((item) => <option value={item.ContactTypeId}>{item.ContactType}</option>)}
</select>
<div className="search-icon">
<FiChevronDown />
</div>
</div>
<div className="small-input">
<input
className="input is-rounded"
type="text"
placeholder="Email"
name="email"
onChange={(e) => setSearchParams({...searchParams, [e.target.name]: e.target.value })}
/>
<div className="search-icon">
<FiSearch />
</div>
</div>
<div className="small-input">
<button
className="input is-rounded admin-search-button"
placeholder="Type"
onClick={handleSearch}
>
<FiSearch className="mr-2"/>
Search
</button>
</div>
<div className="small-input">
<button
className="input is-rounded"
placeholder="Type"
onClick={handleReset}
>
Reset
</button>
</div>
</div>
</div>
<div class="flex-table">
<div class="flex-table-header">
<span class="w-15 sort-column" onClick={handleSort} column="FirstName">
First Name
{
searchParams.sortCol === "FirstName" &&
(searchParams.sortOrder === "ASC" ? <FiArrowUp className="ml-2"/> : <FiArrowDown className="ml-2"/>)
}
</span>
<span class="w-15 sort-column" onClick={handleSort} column="LastName">
Last Name
{
searchParams.sortCol === "LastName" &&
(searchParams.sortOrder === "ASC" ? <FiArrowUp className="ml-2"/> : <FiArrowDown className="ml-2"/>)
}
</span>
<span class="w-15 sort-column" onClick={handleSort} column="ContactType">
Role
{
searchParams.sortCol === "ContactType" &&
(searchParams.sortOrder === "ASC" ? <FiArrowUp className="ml-2"/> : <FiArrowDown className="ml-2"/>)
}
</span>
<span class="w-25 sort-column" onClick={handleSort} column="Email">
Email
{
searchParams.sortCol === "Email" &&
(searchParams.sortOrder === "ASC" ? <FiArrowUp className="ml-2"/> : <FiArrowDown className="ml-2"/>)
}
</span>
<span class="w-25 sort-column" onClick={handleSort} column="StoreName">
Store
{
searchParams.sortCol === "StoreName" &&
(searchParams.sortOrder === "ASC" ? <FiArrowUp className="ml-2"/> : <FiArrowDown className="ml-2"/>)
}
</span>
<span class="w-5 sort-column" column="PresenterType" >Edit</span>
</div>
{loading ? <Spinner /> : contacts.map((item) => <ContactItem contact={item} key={item.OrgId} match={match} />)}
</div>
{(listAttributes.totalPages > 1) && <ReactPaginate
previousLabel={'Prev'}
nextLabel={'Next'}
breakLabel={'...'}
breakClassName={'break-me'}
pageCount={listAttributes.pageCount}
marginPagesDisplayed={2}
pageRangeDisplayed={3}
onPageChange={(x) => setListAttributes({...listAttributes, pageNumber: x.selected})}
containerClassName={'pagination'}
activeClassName={'active'}
/>}
</div>
)
}
export default EventList;
|
// Webfont configuration
var urls = [
'http://s.npr.org/templates/css/fonts/GothamSSm.css',
'http://s.npr.org/templates/css/fonts/Gotham.css',
'http://s.npr.org/templates/css/fonts/Knockout.css'
];
if (window.location.protocol == "https:") {
urls = [
'https://secure.npr.org/templates/css/fonts/GothamSSm.css',
'https://secure.npr.org/templates/css/fonts/Gotham.css',
'https://secure.npr.org/templates/css/fonts/Knockout.css'
];
}
WebFont.load({
custom: {
families: [
'Gotham SSm:n4,n7',
'Gotham:n4,n7',
'Knockout 31 4r:n4'
],
urls: urls
},
timeout: 10000
});
|
const contentToElement = (contentText) => {
const element = document.createElement('div');
element.innerHTML = (contentText || '').toString().trim();
return element;
};
const writeElements = (element, contentElement, replaceParent) => {
if (replaceParent && contentElement.childNodes.length === 1) {
const parent = element.parentElement;
const elementIndex = Array.from(parent.childNodes).indexOf(element);
parent.replaceChild(contentElement.childNodes[0], element);
return parent.childNodes[elementIndex];
}
// eslint-disable-next-line no-param-reassign
element.innerHTML = contentElement.innerHTML;
return element;
};
export const HelperService = {
getJson: (filename, defaultResponse) => fetch(`${SERVICES.ASSETS}${filename}.json`)
.then((response) => {
if (response.ok) {
return response.json();
}
throw new Error();
})
.catch(() => defaultResponse),
naiveRender: (selector, contentText, replaceParent = false) => Array
.from(document.querySelectorAll(selector))
.map((element) => writeElements(element, contentToElement(contentText), replaceParent)),
};
|
from typing import Any, Callable, Dict, List, Union # isort:skip
from pathlib import Path
import numpy as np
from torch.utils.data import Dataset
from catalyst.utils import merge_dicts
_Path = Union[str, Path]
class ListDataset(Dataset):
"""
General purpose dataset class with several data sources `list_data`
"""
def __init__(
self,
list_data: List[Dict],
open_fn: Callable,
dict_transform: Callable = None,
):
"""
Args:
list_data (List[Dict]): list of dicts, that stores
you data annotations,
(for example path to images, labels, bboxes, etc.)
open_fn (callable): function, that can open your
annotations dict and
transfer it to data, needed by your network
(for example open image by path, or tokenize read string.)
dict_transform (callable): transforms to use on dict.
(for example normalize image, add blur, crop/resize/etc)
"""
self.data = list_data
self.open_fn = open_fn
self.dict_transform = (
dict_transform if dict_transform is not None else lambda x: x
)
def __getitem__(self, index: int) -> Any:
"""
Gets element of the dataset
Args:
index (int): index of the element in the dataset
Returns:
Single element by index
"""
item = self.data[index]
dict_ = self.open_fn(item)
dict_ = self.dict_transform(dict_)
return dict_
def __len__(self) -> int:
"""
Returns:
int: length of the dataset
"""
return len(self.data)
class MergeDataset(Dataset):
"""
Abstraction to merge several datasets into one dataset.
"""
def __init__(self, *datasets: Dataset, dict_transform: Callable = None):
"""
Args:
datasets (List[Dataset]): params count of datasets to merge
dict_transform (callable): transforms common for all datasets.
(for example normalize image, add blur, crop/resize/etc)
"""
self.len = len(datasets[0])
assert all([len(x) == self.len for x in datasets])
self.datasets = datasets
self.dict_transform = dict_transform
def __getitem__(self, index: int) -> Any:
"""Get item from all datasets
Args:
index (int): index to value from all datasets
Returns:
list: list of value in every dataset
"""
dcts = [x[index] for x in self.datasets]
dct = merge_dicts(*dcts)
if self.dict_transform is not None:
dct = self.dict_transform(dct)
return dct
def __len__(self) -> int:
"""
Returns:
int: length of the dataset
"""
return self.len
class NumpyDataset(Dataset):
"""
General purpose dataset class to use with `numpy_data`
"""
def __init__(
self,
numpy_data: np.ndarray,
numpy_key: str = "features",
dict_transform: Callable = None,
):
"""
Args:
numpy_data (np.ndarray): numpy data
(for example path to embeddings, features, etc.)
numpy_key (str): key to use for output dictionary
dict_transform (callable): transforms to use on dict.
(for example normalize vector, etc)
"""
super().__init__()
self.data = numpy_data
self.key = numpy_key
self.dict_transform = (
dict_transform if dict_transform is not None else lambda x: x
)
def __getitem__(self, index: int) -> Any:
"""
Gets element of the dataset
Args:
index (int): index of the element in the dataset
Returns:
Single element by index
"""
dict_ = {self.key: np.copy(self.data[index])}
dict_ = self.dict_transform(dict_)
return dict_
def __len__(self) -> int:
"""
Returns:
int: length of the dataset
"""
return len(self.data)
class PathsDataset(ListDataset):
"""
Dataset that derives features and targets from samples filesystem paths.
"""
def __init__(
self, filenames: List[_Path], open_fn: Callable[[dict], dict],
label_fn: Callable[[_Path], Any], **list_dataset_params
):
"""
Args:
filenames (List[str]): list of file paths that store information
about your dataset samples; it could be images, texts or
any other files in general.
open_fn (callable): function, that can open your
annotations dict and
transfer it to data, needed by your network
(for example open image by path, or tokenize read string)
label_fn (callable): function, that can extract target
value from sample path
(for example, your sample could be an image file like
``/path/to/your/image_1.png`` where the target is encoded as
a part of file path)
list_dataset_params (dict): base class initialization
parameters.
Examples:
>>> label_fn = lambda x: x.split("_")[0]
>>> dataset = PathsDataset(
>>> filenames=Path("/path/to/images/").glob("*.jpg"),
>>> label_fn=label_fn,
>>> open_fn=open_fn,
>>> )
"""
list_data = [
dict(features=filename, targets=label_fn(filename))
for filename in filenames
]
super().__init__(
list_data=list_data, open_fn=open_fn, **list_dataset_params
)
__all__ = ["ListDataset", "MergeDataset", "NumpyDataset", "PathsDataset"]
|
/*
* EVE Swagger Interface
* An OpenAPI for EVE Online
*
* OpenAPI spec version: 1.3.8
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
*
* Swagger Codegen version: 2.4.14
*
* Do not edit the class manually.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['ApiClient'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
module.exports = factory(require('../ApiClient'));
} else {
// Browser globals (root is window)
if (!root.EveSwaggerInterface) {
root.EveSwaggerInterface = {};
}
root.EveSwaggerInterface.GetCorporationsCorporationIdContractsContractIdItems200Ok = factory(root.EveSwaggerInterface.ApiClient);
}
}(this, function(ApiClient) {
'use strict';
/**
* The GetCorporationsCorporationIdContractsContractIdItems200Ok model module.
* @module model/GetCorporationsCorporationIdContractsContractIdItems200Ok
* @version 1.3.8
*/
/**
* Constructs a new <code>GetCorporationsCorporationIdContractsContractIdItems200Ok</code>.
* 200 ok object
* @alias module:model/GetCorporationsCorporationIdContractsContractIdItems200Ok
* @class
* @param isIncluded {Boolean} true if the contract issuer has submitted this item with the contract, false if the isser is asking for this item in the contract
* @param isSingleton {Boolean} is_singleton boolean
* @param quantity {Number} Number of items in the stack
* @param recordId {Number} Unique ID for the item
* @param typeId {Number} Type ID for item
*/
var exports = function(isIncluded, isSingleton, quantity, recordId, typeId) {
this.isIncluded = isIncluded;
this.isSingleton = isSingleton;
this.quantity = quantity;
this.recordId = recordId;
this.typeId = typeId;
};
/**
* Constructs a <code>GetCorporationsCorporationIdContractsContractIdItems200Ok</code> from a plain JavaScript object, optionally creating a new instance.
* Copies all relevant properties from <code>data</code> to <code>obj</code> if supplied or a new instance if not.
* @param {Object} data The plain JavaScript object bearing properties of interest.
* @param {module:model/GetCorporationsCorporationIdContractsContractIdItems200Ok} obj Optional instance to populate.
* @return {module:model/GetCorporationsCorporationIdContractsContractIdItems200Ok} The populated <code>GetCorporationsCorporationIdContractsContractIdItems200Ok</code> instance.
*/
exports.constructFromObject = function(data, obj) {
if (data) {
obj = obj || new exports();
if (data.hasOwnProperty('is_included'))
obj.isIncluded = ApiClient.convertToType(data['is_included'], 'Boolean');
if (data.hasOwnProperty('is_singleton'))
obj.isSingleton = ApiClient.convertToType(data['is_singleton'], 'Boolean');
if (data.hasOwnProperty('quantity'))
obj.quantity = ApiClient.convertToType(data['quantity'], 'Number');
if (data.hasOwnProperty('raw_quantity'))
obj.rawQuantity = ApiClient.convertToType(data['raw_quantity'], 'Number');
if (data.hasOwnProperty('record_id'))
obj.recordId = ApiClient.convertToType(data['record_id'], 'Number');
if (data.hasOwnProperty('type_id'))
obj.typeId = ApiClient.convertToType(data['type_id'], 'Number');
}
return obj;
}
/**
* true if the contract issuer has submitted this item with the contract, false if the isser is asking for this item in the contract
* @member {Boolean} isIncluded
*/
exports.prototype.isIncluded = undefined;
/**
* is_singleton boolean
* @member {Boolean} isSingleton
*/
exports.prototype.isSingleton = undefined;
/**
* Number of items in the stack
* @member {Number} quantity
*/
exports.prototype.quantity = undefined;
/**
* -1 indicates that the item is a singleton (non-stackable). If the item happens to be a Blueprint, -1 is an Original and -2 is a Blueprint Copy
* @member {Number} rawQuantity
*/
exports.prototype.rawQuantity = undefined;
/**
* Unique ID for the item
* @member {Number} recordId
*/
exports.prototype.recordId = undefined;
/**
* Type ID for item
* @member {Number} typeId
*/
exports.prototype.typeId = undefined;
return exports;
}));
|
from __future__ import print_function, division
from sympy.core.compatibility import range
from .vector import Vector, _check_vector
from .frame import _check_frame
__all__ = ['Point']
class Point(object):
"""This object represents a point in a dynamic system.
It stores the: position, velocity, and acceleration of a point.
The position is a vector defined as the vector distance from a parent
point to this point.
Parameters
==========
name : string
The display name of the Point
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame, dynamicsymbols
>>> N = ReferenceFrame('N')
>>> O = Point('O')
>>> P = Point('P')
>>> u1, u2, u3 = dynamicsymbols('u1 u2 u3')
>>> O.set_vel(N, u1 * N.x + u2 * N.y + u3 * N.z)
>>> O.acc(N)
u1'*N.x + u2'*N.y + u3'*N.z
symbols() can be used to create multiple Points in a single step, for example:
>>> from sympy.physics.vector import Point, ReferenceFrame, dynamicsymbols
>>> from sympy import symbols
>>> N = ReferenceFrame('N')
>>> u1, u2 = dynamicsymbols('u1 u2')
>>> A, B = symbols('A B', cls=Point)
>>> type(A)
<class 'sympy.physics.vector.point.Point'>
>>> A.set_vel(N, u1 * N.x + u2 * N.y)
>>> B.set_vel(N, u2 * N.x + u1 * N.y)
>>> A.acc(N) - B.acc(N)
(u1' - u2')*N.x + (-u1' + u2')*N.y
"""
def __init__(self, name):
"""Initialization of a Point object. """
self.name = name
self._pos_dict = {}
self._vel_dict = {}
self._acc_dict = {}
self._pdlist = [self._pos_dict, self._vel_dict, self._acc_dict]
def __str__(self):
return self.name
__repr__ = __str__
def _check_point(self, other):
if not isinstance(other, Point):
raise TypeError('A Point must be supplied')
def _pdict_list(self, other, num):
"""Creates a list from self to other using _dcm_dict. """
outlist = [[self]]
oldlist = [[]]
while outlist != oldlist:
oldlist = outlist[:]
for i, v in enumerate(outlist):
templist = v[-1]._pdlist[num].keys()
for i2, v2 in enumerate(templist):
if not v.__contains__(v2):
littletemplist = v + [v2]
if not outlist.__contains__(littletemplist):
outlist.append(littletemplist)
for i, v in enumerate(oldlist):
if v[-1] != other:
outlist.remove(v)
outlist.sort(key=len)
if len(outlist) != 0:
return outlist[0]
raise ValueError('No Connecting Path found between ' + other.name +
' and ' + self.name)
def a1pt_theory(self, otherpoint, outframe, interframe):
"""Sets the acceleration of this point with the 1-point theory.
The 1-point theory for point acceleration looks like this:
^N a^P = ^B a^P + ^N a^O + ^N alpha^B x r^OP + ^N omega^B x (^N omega^B
x r^OP) + 2 ^N omega^B x ^B v^P
where O is a point fixed in B, P is a point moving in B, and B is
rotating in frame N.
Parameters
==========
otherpoint : Point
The first point of the 1-point theory (O)
outframe : ReferenceFrame
The frame we want this point's acceleration defined in (N)
fixedframe : ReferenceFrame
The intermediate frame in this calculation (B)
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> from sympy.physics.vector import Vector, dynamicsymbols
>>> q = dynamicsymbols('q')
>>> q2 = dynamicsymbols('q2')
>>> qd = dynamicsymbols('q', 1)
>>> q2d = dynamicsymbols('q2', 1)
>>> N = ReferenceFrame('N')
>>> B = ReferenceFrame('B')
>>> B.set_ang_vel(N, 5 * B.y)
>>> O = Point('O')
>>> P = O.locatenew('P', q * B.x)
>>> P.set_vel(B, qd * B.x + q2d * B.y)
>>> O.set_vel(N, 0)
>>> P.a1pt_theory(O, N, B)
(-25*q + q'')*B.x + q2''*B.y - 10*q'*B.z
"""
_check_frame(outframe)
_check_frame(interframe)
self._check_point(otherpoint)
dist = self.pos_from(otherpoint)
v = self.vel(interframe)
a1 = otherpoint.acc(outframe)
a2 = self.acc(interframe)
omega = interframe.ang_vel_in(outframe)
alpha = interframe.ang_acc_in(outframe)
self.set_acc(outframe, a2 + 2 * (omega ^ v) + a1 + (alpha ^ dist) +
(omega ^ (omega ^ dist)))
return self.acc(outframe)
def a2pt_theory(self, otherpoint, outframe, fixedframe):
"""Sets the acceleration of this point with the 2-point theory.
The 2-point theory for point acceleration looks like this:
^N a^P = ^N a^O + ^N alpha^B x r^OP + ^N omega^B x (^N omega^B x r^OP)
where O and P are both points fixed in frame B, which is rotating in
frame N.
Parameters
==========
otherpoint : Point
The first point of the 2-point theory (O)
outframe : ReferenceFrame
The frame we want this point's acceleration defined in (N)
fixedframe : ReferenceFrame
The frame in which both points are fixed (B)
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame, dynamicsymbols
>>> q = dynamicsymbols('q')
>>> qd = dynamicsymbols('q', 1)
>>> N = ReferenceFrame('N')
>>> B = N.orientnew('B', 'Axis', [q, N.z])
>>> O = Point('O')
>>> P = O.locatenew('P', 10 * B.x)
>>> O.set_vel(N, 5 * N.x)
>>> P.a2pt_theory(O, N, B)
- 10*q'**2*B.x + 10*q''*B.y
"""
_check_frame(outframe)
_check_frame(fixedframe)
self._check_point(otherpoint)
dist = self.pos_from(otherpoint)
a = otherpoint.acc(outframe)
omega = fixedframe.ang_vel_in(outframe)
alpha = fixedframe.ang_acc_in(outframe)
self.set_acc(outframe, a + (alpha ^ dist) + (omega ^ (omega ^ dist)))
return self.acc(outframe)
def acc(self, frame):
"""The acceleration Vector of this Point in a ReferenceFrame.
Parameters
==========
frame : ReferenceFrame
The frame in which the returned acceleration vector will be defined in
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> N = ReferenceFrame('N')
>>> p1 = Point('p1')
>>> p1.set_acc(N, 10 * N.x)
>>> p1.acc(N)
10*N.x
"""
_check_frame(frame)
if not (frame in self._acc_dict):
if self._vel_dict[frame] != 0:
return (self._vel_dict[frame]).dt(frame)
else:
return Vector(0)
return self._acc_dict[frame]
def locatenew(self, name, value):
"""Creates a new point with a position defined from this point.
Parameters
==========
name : str
The name for the new point
value : Vector
The position of the new point relative to this point
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Point
>>> N = ReferenceFrame('N')
>>> P1 = Point('P1')
>>> P2 = P1.locatenew('P2', 10 * N.x)
"""
if not isinstance(name, str):
raise TypeError('Must supply a valid name')
if value == 0:
value = Vector(0)
value = _check_vector(value)
p = Point(name)
p.set_pos(self, value)
self.set_pos(p, -value)
return p
def pos_from(self, otherpoint):
"""Returns a Vector distance between this Point and the other Point.
Parameters
==========
otherpoint : Point
The otherpoint we are locating this one relative to
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> N = ReferenceFrame('N')
>>> p1 = Point('p1')
>>> p2 = Point('p2')
>>> p1.set_pos(p2, 10 * N.x)
>>> p1.pos_from(p2)
10*N.x
"""
outvec = Vector(0)
plist = self._pdict_list(otherpoint, 0)
for i in range(len(plist) - 1):
outvec += plist[i]._pos_dict[plist[i + 1]]
return outvec
def set_acc(self, frame, value):
"""Used to set the acceleration of this Point in a ReferenceFrame.
Parameters
==========
frame : ReferenceFrame
The frame in which this point's acceleration is defined
value : Vector
The vector value of this point's acceleration in the frame
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> N = ReferenceFrame('N')
>>> p1 = Point('p1')
>>> p1.set_acc(N, 10 * N.x)
>>> p1.acc(N)
10*N.x
"""
if value == 0:
value = Vector(0)
value = _check_vector(value)
_check_frame(frame)
self._acc_dict.update({frame: value})
def set_pos(self, otherpoint, value):
"""Used to set the position of this point w.r.t. another point.
Parameters
==========
otherpoint : Point
The other point which this point's location is defined relative to
value : Vector
The vector which defines the location of this point
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> N = ReferenceFrame('N')
>>> p1 = Point('p1')
>>> p2 = Point('p2')
>>> p1.set_pos(p2, 10 * N.x)
>>> p1.pos_from(p2)
10*N.x
"""
if value == 0:
value = Vector(0)
value = _check_vector(value)
self._check_point(otherpoint)
self._pos_dict.update({otherpoint: value})
otherpoint._pos_dict.update({self: -value})
def set_vel(self, frame, value):
"""Sets the velocity Vector of this Point in a ReferenceFrame.
Parameters
==========
frame : ReferenceFrame
The frame in which this point's velocity is defined
value : Vector
The vector value of this point's velocity in the frame
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> N = ReferenceFrame('N')
>>> p1 = Point('p1')
>>> p1.set_vel(N, 10 * N.x)
>>> p1.vel(N)
10*N.x
"""
if value == 0:
value = Vector(0)
value = _check_vector(value)
_check_frame(frame)
self._vel_dict.update({frame: value})
def v1pt_theory(self, otherpoint, outframe, interframe):
"""Sets the velocity of this point with the 1-point theory.
The 1-point theory for point velocity looks like this:
^N v^P = ^B v^P + ^N v^O + ^N omega^B x r^OP
where O is a point fixed in B, P is a point moving in B, and B is
rotating in frame N.
Parameters
==========
otherpoint : Point
The first point of the 2-point theory (O)
outframe : ReferenceFrame
The frame we want this point's velocity defined in (N)
interframe : ReferenceFrame
The intermediate frame in this calculation (B)
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> from sympy.physics.vector import Vector, dynamicsymbols
>>> q = dynamicsymbols('q')
>>> q2 = dynamicsymbols('q2')
>>> qd = dynamicsymbols('q', 1)
>>> q2d = dynamicsymbols('q2', 1)
>>> N = ReferenceFrame('N')
>>> B = ReferenceFrame('B')
>>> B.set_ang_vel(N, 5 * B.y)
>>> O = Point('O')
>>> P = O.locatenew('P', q * B.x)
>>> P.set_vel(B, qd * B.x + q2d * B.y)
>>> O.set_vel(N, 0)
>>> P.v1pt_theory(O, N, B)
q'*B.x + q2'*B.y - 5*q*B.z
"""
_check_frame(outframe)
_check_frame(interframe)
self._check_point(otherpoint)
dist = self.pos_from(otherpoint)
v1 = self.vel(interframe)
v2 = otherpoint.vel(outframe)
omega = interframe.ang_vel_in(outframe)
self.set_vel(outframe, v1 + v2 + (omega ^ dist))
return self.vel(outframe)
def v2pt_theory(self, otherpoint, outframe, fixedframe):
"""Sets the velocity of this point with the 2-point theory.
The 2-point theory for point velocity looks like this:
^N v^P = ^N v^O + ^N omega^B x r^OP
where O and P are both points fixed in frame B, which is rotating in
frame N.
Parameters
==========
otherpoint : Point
The first point of the 2-point theory (O)
outframe : ReferenceFrame
The frame we want this point's velocity defined in (N)
fixedframe : ReferenceFrame
The frame in which both points are fixed (B)
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame, dynamicsymbols
>>> q = dynamicsymbols('q')
>>> qd = dynamicsymbols('q', 1)
>>> N = ReferenceFrame('N')
>>> B = N.orientnew('B', 'Axis', [q, N.z])
>>> O = Point('O')
>>> P = O.locatenew('P', 10 * B.x)
>>> O.set_vel(N, 5 * N.x)
>>> P.v2pt_theory(O, N, B)
5*N.x + 10*q'*B.y
"""
_check_frame(outframe)
_check_frame(fixedframe)
self._check_point(otherpoint)
dist = self.pos_from(otherpoint)
v = otherpoint.vel(outframe)
omega = fixedframe.ang_vel_in(outframe)
self.set_vel(outframe, v + (omega ^ dist))
return self.vel(outframe)
def vel(self, frame):
"""The velocity Vector of this Point in the ReferenceFrame.
Parameters
==========
frame : ReferenceFrame
The frame in which the returned velocity vector will be defined in
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> N = ReferenceFrame('N')
>>> p1 = Point('p1')
>>> p1.set_vel(N, 10 * N.x)
>>> p1.vel(N)
10*N.x
"""
_check_frame(frame)
if not (frame in self._vel_dict):
raise ValueError('Velocity of point ' + self.name + ' has not been'
' defined in ReferenceFrame ' + frame.name)
return self._vel_dict[frame]
def partial_velocity(self, frame, *gen_speeds):
"""Returns the partial velocities of the linear velocity vector of this
point in the given frame with respect to one or more provided
generalized speeds.
Parameters
==========
frame : ReferenceFrame
The frame with which the velocity is defined in.
gen_speeds : functions of time
The generalized speeds.
Returns
=======
partial_velocities : tuple of Vector
The partial velocity vectors corresponding to the provided
generalized speeds.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Point
>>> from sympy.physics.vector import dynamicsymbols
>>> N = ReferenceFrame('N')
>>> A = ReferenceFrame('A')
>>> p = Point('p')
>>> u1, u2 = dynamicsymbols('u1, u2')
>>> p.set_vel(N, u1 * N.x + u2 * A.y)
>>> p.partial_velocity(N, u1)
N.x
>>> p.partial_velocity(N, u1, u2)
(N.x, A.y)
"""
partials = [self.vel(frame).diff(speed, frame, var_in_dcm=False) for
speed in gen_speeds]
if len(partials) == 1:
return partials[0]
else:
return tuple(partials)
|
(function() {
'use strict';
angular
.module('App')
.controller('UploadFileController', UploadFileController);
UploadFileController.inject = ['$scope', '$http', 'fileUpload'];
function UploadFileController($scope, $http, fileUpload) {
function resetItem() {
$scope.uploadfile = {
uuid : '',
name : '',
fileName : '',
s3Key : '',
s3Bucket : '',
s3Location : '',
};
$scope.myFile = '';
$scope.displayForm = '';
}
resetItem();
$scope.addItem = function () {
resetItem();
$scope.displayForm = true;
};
$scope.saveItem = function () {
var uf = $scope.uploadfile;
if (uf.uuid.length == 0) {
$http.post('/uploadfiles', {
'name': uf.name,
'fileName': uf.fileName,
's3Key': uf.s3Key,
's3Bucket': uf.s3Bucket,
's3Location': uf.s3Location
}).success(function(data) {
$scope.items.push(data);
$scope.displayForm = '';
removeModal();
}).error(function() {
console.log("ERROR");
});
} else {
$http.put('/uploadfiles/' + uf.uuid, {
'name': uf.name,
'fileName': uf.fileName,
's3Key': uf.s3Key,
's3Bucket': uf.s3Bucket,
's3Location': uf.s3Location
}).success(function(data) {
$scope.displayForm = '';
removeModal();
}).error(function() {
console.log("ERROR");
});
}
};
$scope.editItem = function (data) {
$scope.uploadfile = data;
$scope.displayForm = true;
};
$scope.removeItem = function (data) {
if (confirm('Do you really want to delete?')) {
$http.delete('/uploadfiles/' + data.uuid).success(function() {
$scope.items.splice($scope.items.indexOf(data), 1);
});
}
};
$scope.submitData = function() {
var addParams = {
"acl": "public-read",
"success_action_status": "200",
"x-amz-meta-uuid": "14365123651274",
"x-amz-server-side-encryption": "AES256",
"Policy": 'eyAiZXhwaXJhdGlvbiI6ICIyMDE4LTA2LTAxVDEyOjAwOjAwLjAwMFoiLA0KICAiY29uZGl0aW9ucyI6IFsNCiAgICBbInN0YXJ0cy13aXRoIiwgIiRrZXkiLCAibWF0ZXJpYWxzLyJdLA0KICAgIHsiYnVja2V0IjogInNibC10ZXN0MSJ9LA0KICAgIHsiYWNsIjogInB1YmxpYy1yZWFkIn0sDQogICAgeyJzdWNjZXNzX2FjdGlvbl9zdGF0dXMiOiAiMjAwIn0sDQogICAgWyJzdGFydHMtd2l0aCIsICIkQ29udGVudC1UeXBlIiwgImFwcGxpY2F0aW9uL3BkZiJdLA0KICAgIHsieC1hbXotbWV0YS11dWlkIjogIjE0MzY1MTIzNjUxMjc0In0sDQogICAgeyJ4LWFtei1zZXJ2ZXItc2lkZS1lbmNyeXB0aW9uIjogIkFFUzI1NiJ9LA0KICAgIFsic3RhcnRzLXdpdGgiLCAiJHgtYW16LW1ldGEtdGFnIiwgIiJdLA0KDQogICAgeyJ4LWFtei1jcmVkZW50aWFsIjogIkFLSUFJSU9WRVpaNE80REsyNlBBLzIwMTcwNTMxL2V1LWNlbnRyYWwtMS9zMy9hd3M0X3JlcXVlc3QifSwNCiAgICB7IngtYW16LWFsZ29yaXRobSI6ICJBV1M0LUhNQUMtU0hBMjU2In0sDQogICAgeyJ4LWFtei1kYXRlIjogIjIwMTcwNTMxVDAwMDAwMFoiIH0NCiAgXQ0KfQ==',
"X-Amz-Signature": "642eec7cc808bc8982da5c040017e2f1770a3faa5757d3949369c20396c0ca50",
"X-Amz-Credential": "AKIAIIOVEZZ4O4DK26PA/20170531/eu-central-1/s3/aws4_request",
"X-Amz-Algorithm": "AWS4-HMAC-SHA256",
"X-Amz-Date": "20170531T000000Z",
"Content-Type": "application/pdf",
"key": "materials/" + $scope.uploadfile.name
};
var file = $scope.myFile;
var uploadUrl = "//s3.eu-central-1.amazonaws.com/sbl-test1/";
fileUpload.uploadFileToUrl(file, uploadUrl, addParams)
.success(function() {
$scope.saveItem();
})
.error(function() {
console.log("ERROR");
});
};
function removeModal() {
$('.modal').modal('hide'); /* global $ */
}
}
})();
|
// @flow
import React, { Component } from 'react';
import { observer } from 'mobx-react';
import { ThemeProvider } from 'react-css-themr';
import { IntlProvider } from 'react-intl';
import AboutPage from './containers/static/AboutPage';
import { luxTheme } from './themes/lux';
import translations from './i18n/translations';
import type { StoresMap } from './stores/index';
import ThemeManager from './ThemeManager';
type Props = { stores: StoresMap };
@observer
export default class About extends Component<Props> {
render() {
const { stores } = this.props;
const locale = stores.profile.currentLocale;
const currentTheme = stores.profile.currentTheme;
const theme = require(`./themes/lux/${currentTheme}.js`); // eslint-disable-line
return (
<div>
<ThemeManager variables={theme} />
<ThemeProvider theme={luxTheme}>
<IntlProvider {...{ locale, key: locale, messages: translations[locale] }}>
<AboutPage />
</IntlProvider>
</ThemeProvider>
</div>
);
}
}
|
import React, { useState } from "react";
import "./Login.css";
import { Link } from "react-router-dom";
import Footer from "../../../components/Footer/Footer";
import InputField from "./../../../components/UI/InputField/InputField";
import RoleField from "./../../../components/UI/RoleField/RoleField";
import { ToastContainer, toast } from 'react-toastify';
import { StudentLogin, TeacherLogin } from '../../../axios/instance';
const inputValidator = (field) => {
let isValid = true;
field.forEach((item) => {
if (item.length === 0) {
isValid = false;
}
});
return isValid;
};
const Login = () => {
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [role, setRole] = useState("student");
const onSubmitHandler = async (event) => {
event.preventDefault();
let isValid = inputValidator([email, password, role]);
if (!isValid) {
toast.error('Please fill all inputs')
}
else{
const body = {
email: email,
password: password
}
try
{
if(role === 'student')
{
const res = await StudentLogin(body);
if (!res.data.error)
{
localStorage.setItem('token', res.data.accesstoken);
// Save UserLoginData and make authenticated using redux
toast.success('You are logged in successfully');
}
}
else {
const res = await TeacherLogin(body);
if (!res.data.error)
{
localStorage.setItem('token', res.data.accesstoken);
// Save UserLoginData and make authenticated using redux
toast.success('You are logged in successfully');
}
}
clearFields();
} catch (err) {
if (err.response)
{
toast.error(`${ err.response.data.error }`);
}
}
}
};
const clearFields = () => {
setEmail("");
setPassword("");
}
return (
<div>
<section className="login__container">
<ToastContainer position="bottom-center" bodyClassName="toastBody"/>
<form className="login__form" onSubmit={onSubmitHandler}>
<div className="login__heading--container">
<h1 className="login__heading">Login</h1>
</div>
<InputField
value={email}
type="email"
placeholder="Email"
onChange={(event) => setEmail(event.target.value)}
/>
<InputField
value={password}
type="password"
placeholder="Password"
onChange={(event) => setPassword(event.target.value)}
/>
<RoleField
value={role}
onChange={(event) => setRole(event.target.value)}
/>
<p className="login__form--p">
Not a User, Don't worry you can <Link to="/signup"> SignUp </Link>{" "}
here
</p>
<p className="login__forgot--button">
<Link to="/forgot-password">Forgot Password</Link>
</p>
<button className="login__form--button" type="submit">
Login
</button>
</form>
</section>
<Footer />
</div>
);
};
export default Login;
|
# -*- coding: utf-8 -*-
"""DNA Center Get Polling Interval by Id data model.
Copyright (c) 2019 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import fastjsonschema
import json
from dnacentersdk.exceptions import MalformedRequest
from builtins import *
class JSONSchemaValidator82918A1B4D289C5C(object):
"""Get Polling Interval by Id request schema definition."""
def __init__(self):
super(JSONSchemaValidator82918A1B4D289C5C, self).__init__()
self._validator = fastjsonschema.compile(json.loads(
'''{
"properties": {
"response": {
"type": [
"number",
"null"
]
},
"version": {
"description":
"",
"type": [
"string",
"null"
]
}
},
"type": "object"
}'''.replace("\n" + ' ' * 16, '')
))
def validate(self, request):
try:
self._validator(request)
except fastjsonschema.exceptions.JsonSchemaException as e:
raise MalformedRequest(
'{} is invalid. Reason: {}'.format(request, e.message)
)
|
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef CHEETAHCOIN_KEY_H
#define CHEETAHCOIN_KEY_H
#include <stdexcept>
#include <vector>
#include "allocators.h"
#include "serialize.h"
#include "uint256.h"
#include "hash.h"
#include <openssl/ec.h> // for EC_KEY definition
// secp160k1
// const unsigned int PRIVATE_KEY_SIZE = 192;
// const unsigned int PUBLIC_KEY_SIZE = 41;
// const unsigned int SIGNATURE_SIZE = 48;
//
// secp192k1
// const unsigned int PRIVATE_KEY_SIZE = 222;
// const unsigned int PUBLIC_KEY_SIZE = 49;
// const unsigned int SIGNATURE_SIZE = 57;
//
// secp224k1
// const unsigned int PRIVATE_KEY_SIZE = 250;
// const unsigned int PUBLIC_KEY_SIZE = 57;
// const unsigned int SIGNATURE_SIZE = 66;
//
// secp256k1:
// const unsigned int PRIVATE_KEY_SIZE = 279;
// const unsigned int PUBLIC_KEY_SIZE = 65;
// const unsigned int SIGNATURE_SIZE = 72;
//
// see www.keylength.com
// script supports up to 75 for single byte push
class key_error : public std::runtime_error
{
public:
explicit key_error(const std::string& str) : std::runtime_error(str) {}
};
/** A reference to a CKey: the Hash160 of its serialized public key */
class CKeyID : public uint160
{
public:
CKeyID() : uint160(0) { }
CKeyID(const uint160 &in) : uint160(in) { }
};
/** A reference to a CScript: the Hash160 of its serialization (see script.h) */
class CScriptID : public uint160
{
public:
CScriptID() : uint160(0) { }
CScriptID(const uint160 &in) : uint160(in) { }
};
/** An encapsulated public key. */
class CPubKey {
private:
std::vector<unsigned char> vchPubKey;
friend class CKey;
public:
CPubKey() { }
CPubKey(const std::vector<unsigned char> &vchPubKeyIn) : vchPubKey(vchPubKeyIn) { }
friend bool operator==(const CPubKey &a, const CPubKey &b) { return a.vchPubKey == b.vchPubKey; }
friend bool operator!=(const CPubKey &a, const CPubKey &b) { return a.vchPubKey != b.vchPubKey; }
friend bool operator<(const CPubKey &a, const CPubKey &b) { return a.vchPubKey < b.vchPubKey; }
IMPLEMENT_SERIALIZE(
READWRITE(vchPubKey);
)
CKeyID GetID() const {
return CKeyID(Hash160(vchPubKey));
}
uint256 GetHash() const {
return Hash(vchPubKey.begin(), vchPubKey.end());
}
bool IsValid() const {
return vchPubKey.size() == 33 || vchPubKey.size() == 65;
}
bool IsCompressed() const {
return vchPubKey.size() == 33;
}
std::vector<unsigned char> Raw() const {
return vchPubKey;
}
};
// secure_allocator is defined in allocators.h
// CPrivKey is a serialized private key, with all parameters included (279 bytes)
typedef std::vector<unsigned char, secure_allocator<unsigned char> > CPrivKey;
// CSecret is a serialization of just the secret parameter (32 bytes)
typedef std::vector<unsigned char, secure_allocator<unsigned char> > CSecret;
/** An encapsulated OpenSSL Elliptic Curve key (public and/or private) */
class CKey
{
protected:
EC_KEY* pkey;
bool fSet;
bool fCompressedPubKey;
public:
void SetCompressedPubKey(bool fCompressed = true);
void Reset();
CKey();
CKey(const CKey& b);
CKey& operator=(const CKey& b);
~CKey();
bool IsNull() const;
bool IsCompressed() const;
void MakeNewKey(bool fCompressed);
bool SetPrivKey(const CPrivKey& vchPrivKey);
bool SetSecret(const CSecret& vchSecret, bool fCompressed = false);
CSecret GetSecret(bool &fCompressed) const;
CPrivKey GetPrivKey() const;
bool SetPubKey(const CPubKey& vchPubKey);
CPubKey GetPubKey() const;
bool Sign(uint256 hash, std::vector<unsigned char>& vchSig);
// create a compact signature (65 bytes), which allows reconstructing the used public key
// The format is one header byte, followed by two times 32 bytes for the serialized r and s values.
// The header byte: 0x1B = first key with even y, 0x1C = first key with odd y,
// 0x1D = second key with even y, 0x1E = second key with odd y
bool SignCompact(uint256 hash, std::vector<unsigned char>& vchSig);
// reconstruct public key from a compact signature
// This is only slightly more CPU intensive than just verifying it.
// If this function succeeds, the recovered public key is guaranteed to be valid
// (the signature is a valid signature of the given data for that key)
bool SetCompactSignature(uint256 hash, const std::vector<unsigned char>& vchSig);
bool Verify(uint256 hash, const std::vector<unsigned char>& vchSig);
// Verify a compact signature
bool VerifyCompact(uint256 hash, const std::vector<unsigned char>& vchSig);
bool IsValid();
};
#endif
|
const IncomingForm = require('formidable').IncomingForm
module.exports = function upload(req, res) {
var form = new IncomingForm()
form.parse(req, function (err, fields, files) {
var dir = __dirname + '/upload';
if (!path.existsSync(dir)) {
fs.mkdirSync(dir, 0744);
}
var oldpath = files.filetoupload.path;
var newpath = dir + files.filetoupload.name;
fs.rename(oldpath, newpath, function (err) {
if (err) throw err;
res.write('File uploaded and moved!');
res.end();
});
});
//form.on('file', (field, file) => {
//})
//form.on('end', () => {
// res.json()
// })
// form.parse(req)
}
|
'''
Modules for Numerical Relativity Simulation Catalog:
* catalog: builds catalog given a cinfiguration file, or directory containing many configuration files.
* scentry: class for simulation catalog entry (should include io)
'''
#
from nrutils.core import settings as gconfig
from nrutils.core.basics import *
from nrutils.core import M_RELATIVE_SIGN_CONVENTION
import warnings,sys
# Class representation of configuration files. The contents of these files define where the metadata for each simulation is stored, and where the related NR data is stored.
class scconfig(smart_object):
# Create scconfig object from configuration file location
def __init__(this,config_file_location=None,overwrite=True):
# Required fields from smart_object
this.source_file_path = []
this.source_dir = []
this.overwrite = overwrite
# call wrapper for constructor
this.config_file_location = config_file_location
this.reconfig()
# The actual constructor: this will be called within utility functions so that scentry objects are configured with local settings.
def reconfig(this):
#
if this.config_file_location is None:
msg = '(!!) scconfig objects cannot be initialted/reconfigured without a defined "config_file_location" location property (i.e. string where the related config file lives)'
raise ValueError(msg)
# learn the contents of the configuration file
if os.path.exists( this.config_file_location ):
this.learn_file( this.config_file_location, comment=[';','#'] )
# validate the information learned from the configuration file against minimal standards
this.validate()
this.config_exists = True
else:
msg = 'There is a simulation catalog entry (scentry) object which references \"%s\", however such a file cannot be found by the OS. The related scentry object will be marked as invalid.'%cyan(this.config_file_location)
this.config_exists = False
warning(msg,'scconfig.reconfig')
# In some cases, it is useful to have this function return this
return this
# Validate the config file against a minimal set of required fields.
def validate(this):
# Import useful things
from os.path import expanduser
# Create a string with the current process name
thisfun = inspect.stack()[0][3]
# each scconfig object (and the realted file) MUST have the following attributes
required_attrs = [ 'institute', # school or collaboration authoring run
'metadata_id', # unique string that defines metadata files
'catalog_dir', # local directory where all simulation folders are stored
# this directory allows catalog files to be portable
'data_file_name_format', # formatting string for referencing l m and extraction parameter
'handler_location', # location of python script which contains validator and
# learn_metadata functions
'is_extrapolated', # users should set this to true if waveform is extrapolated
# to infinity
'is_rscaled', # Boolean for whether waveform data are scaled by extraction radius (ie rPsi4)
'default_par_list' ] # list of default parameters for loading: default_extraction_parameter, default_level. NOTE that list must be of length 2
# Make sure that each required attribute is a member of this objects dictionary representation. If it's not, throw an error.
for attr in required_attrs:
if not ( attr in this.__dict__ ):
msg = '(!!) Error -- config file at %s does NOT contain required field %s' % ( magenta(this.config_file_location), attr )
raise ValueError(msg)
# Make sure that data_file_name_format is list of strings. The intention is to give the user the ability to define multiple formats for loading. For example, the GT dataset may have files that begin with Ylm_Weyl... and others that begin with mp_Weylscalar... .
if isinstance( this.data_file_name_format, str ):
this.data_file_name_format = [this.data_file_name_format]
elif isinstance(this.data_file_name_format,list):
for k in this.data_file_name_format:
if not isinstance(k,str):
msg = '(!!) Error in %s: each element of data_file_name_format must be character not numeric. Found data_file_name_format = %s' % (magenta(this.config_file_location),k)
raise ValueError(msg)
if False: # NOTE that this is turned off becuase it is likely not the appropriate way to check. More thought needed. Original line: len( k.split('%i') ) != 4:
msg = '(!!) Error in %s: All elements of data_file_name_format must have three integer formatting tags (%%i). The offending entry is %s.' % ( magenta(this.config_file_location), red(k) )
raise ValueError(msg)
else:
msg = '(!!) Error in %s: data_file_name_format must be comma separated list.' % magenta(this.config_file_location)
# Make sure that catalog_dir is string
if not isinstance( this.catalog_dir, str ):
msg = 'catalog_dir values must be string'
error(red(msg),thisfun)
if 2 != len(this.default_par_list):
msg = '(!!) Error in %s: default_par_list must be list containing default extraction parameter (Numeric value) and default level (also Numeric in value). Invalide case found: %s' % (magenta(this.config_file_location),list(this.default_par_list))
raise ValueError(msg)
# Make sure that all directories end with a forward slash
for attr in this.__dict__:
if 'dir' in attr:
if this.__dict__[attr][-1] != '/':
this.__dict__[attr] += '/'
# Make sure that user symbols (~) are expanded
for attr in this.__dict__:
if ('dir' in attr) or ('location' in attr):
if isinstance(this.__dict__[attr],str):
this.__dict__[attr] = expanduser( this.__dict__[attr] )
elif isinstance(this.__dict__[attr],list):
for k in this.__dict__[attr]:
if isinstance(k,str):
k = expanduser(k)
# Class for simulation catalog e.
class scentry:
# Create scentry object given location of metadata file
def __init__( this, config_obj, metadata_file_location, verbose=False ):
# Keep an internal log for each scentry created
this.log = '[Log for %s] The file is "%s".' % (this,metadata_file_location)
# Store primary inputs as object attributes
this.config = config_obj
this.metadata_file_location = metadata_file_location
# Validate the location of the metadata file: does it contain waveform information? is the file empty? etc
this.isvalid = this.validate()
#
this.verbose = verbose
# If valid, learn metadata. Note that metadata property are defined as none otherise. Also NOTE that the standard metadata is stored directly to this object's attributes.
this.raw_metadata = None
if this.isvalid is True:
#
print '## Working: %s' % cyan(metadata_file_location)
this.log += ' This entry\'s metadata file is valid.'
# i.e. learn the meta_data_file
# this.learn_metadata(); raise(TypeError,'This line should only be uncommented when debugging.')
# this.label = sclabel( this )
try:
this.learn_metadata()
this.label = sclabel( this )
except:
emsg = sys.exc_info()[1].message
this.log += '%80s'%' [FATALERROR] The metadata failed to be read. There may be an external formatting inconsistency. It is being marked as invalid with None. The system says: %s'%emsg
warning( 'The following error message will be logged: '+red(emsg),'scentry')
this.isvalid = None # An external program may use this to do something
this.label = 'invalid!'
elif this.isvalid is False:
print '## The following is '+red('invalid')+': %s' % cyan(metadata_file_location)
this.log += ' This entry\'s metadta file is invalid.'
# Method to load handler module
def loadhandler(this):
# Import the module
from imp import load_source
handler_module = load_source( '', this.config.handler_location )
# Validate the handler module: it has to have a few requried methods
required_methods = [ 'learn_metadata', 'validate', 'extraction_map' ]
for m in required_methods:
if not ( m in handler_module.__dict__ ):
msg = 'Handler module must contain a method of the name %s, but no such method was found'%(cyan(m))
error(msg,'scentry.validate')
# Return the module
return handler_module
# Validate the metadata file using the handler's validation function
def validate(this):
# import validation function given in config file
# Name the function representation that will be used to load the metadata file, and convert it to raw and standardized metadata
validator = this.loadhandler().validate
# vet the directory where the metadata file lives for: waveform and additional metadata
status = validator( this.metadata_file_location, config = this.config )
#
return status
# Standardize metadata
def learn_metadata(this):
#
from numpy import allclose
# Load the handler for this entry. It will be used multiple times below.
handler = this.loadhandler()
# Name the function representation that will be used to load the metadata file, and convert it to raw and standardized metadata
learn_institute_metadata = handler.learn_metadata
# Eval and store standard metadata
[standard_metadata, this.raw_metadata] = learn_institute_metadata( this.metadata_file_location )
# Validate the standard metadata
required_attrs = [ 'date_number', # creation date (number!) of metadata file
'note', # informational note relating to metadata
'madm', # initial ADM mass = m1+m2 - initial binding energy
'b', # initial orbital separation (scalar: M)
'R1', 'R2', # initial component masses (scalars: M = m1+m2)
'm1', 'm2', # initial component masses (scalars: M = m1+m2)
'P1', 'P2', # initial component linear momenta (Vectors ~ M )
'L1', 'L2', # initial component angular momental (Vectors ~ M)
'S1', 'S2', # initial component spins (Vectors ~ M*M)
'mf', 'Sf', # Final mass (~M) and final dimensionful spin (~M*M)
'Xf', 'xf' ] # Final dimensionless spin: Vector,Xf, and *Magnitude*: xf = sign(Sf_z)*|Sf|/(mf*mf) (NOTE the definition)
for attr in required_attrs:
if attr not in standard_metadata.__dict__:
msg = '(!!) Error -- Output of %s does NOT contain required field %s' % ( this.config.handler_location, attr )
raise ValueError(msg)
# Confer the required attributes to this object for ease of referencing
for attr in standard_metadata.__dict__.keys():
setattr( this, attr, standard_metadata.__dict__[attr] )
# tag this entry with its inferred setname
this.setname = this.raw_metadata.source_dir[-1].split( this.config.catalog_dir )[-1].split('/')[0]
# tag this entry with its inferred simname
this.simname = this.raw_metadata.source_dir[-1].split('/')[-1] if this.raw_metadata.source_dir[-1][-1]!='/' else this.raw_metadata.source_dir[-1].split('/')[-2]
# tag this entry with the directory location of the metadata file. NOTE that the waveform data must be reference relative to this directory via config.data_file_name_format
this.relative_simdir = this.raw_metadata.source_dir[-1].split( this.config.catalog_dir )[-1]
# NOTE that is is here that we may infer the default extraction parameter and related extraction radius
# Load default values for extraction_parameter and level (e.g. resolution level)
# NOTE that the special method defined below must take in an scentry object, and output extraction_parameter and level
special_method = 'infer_default_level_and_extraction_parameter'
if special_method in handler.__dict__:
# Let the people know
if this.verbose:
msg = 'The handler is found to have a "%s" method. Rather than the config file, this method will be used to determine the default extraction parameter and level.' % green(special_method)
alert(msg,'scentry.learn_metadata')
# Estimate a good extraction radius and level for an input scentry object from the BAM catalog
this.default_extraction_par,this.default_level,this.extraction_radius_map = handler.__dict__[special_method](this)
# NOTE that and extraction_radius_map is also defined here, which allows referencing between extraction parameter and extaction radius
else:
# NOTE that otherwise, values from the configuration file will be used
this.default_extraction_par = this.config.default_par_list[0]
this.default_level = this.config.default_par_list[1]
this.extraction_radius_map = None
# NOTE that and extraction_radius_map is also defined here, which allows referencing between extraction parameter and extaction radius, the dault value is currently None
# Basic sanity check for standard attributes. NOTE this section needs to be completed and perhaps externalized to the current function.
# Check that initial binary separation is float
if not isinstance( this.b , float ) :
msg = 'b = %g' % this.b
raise ValueError(msg)
# Check that final mass is float
if not isinstance( this.mf , float ) :
msg = 'final mass must be float, but %s found' % type(this.mf).__name__
raise ValueError(msg)
# Check that inital mass1 is float
if not isinstance( this.m1 , float ) :
msg = 'm1 must be float but %s found' % type(this.m1).__name__
raise ValueError(msg)
# Check that inital mass2 is float
if not isinstance( this.m2 , float ) :
msg = 'm2 must be float but %s found' % type(this.m2).__name__
raise ValueError(msg)
# Enfore m1>m2 convention.
satisfies_massratio_convetion = lambda e: (not e.m1 > e.m2) and (not allclose(e.m1,e.m2,atol=1e-4))
if satisfies_massratio_convetion(this):
this.flip()
if satisfies_massratio_convetion(this):
msg = 'Mass ratio convention m1>m2 must be used. Check scentry.flip(). It should have corrected this! \n>> m1 = %g, m2 = %g' % (this.m1,this.m2)
raise ValueError(msg)
# Create dynamic function that references the user's current configuration to construct the simulation directory of this run.
def simdir(this):
ans = this.config.reconfig().catalog_dir + this.relative_simdir
if not this.config.config_exists:
msg = 'The current object has been marked as '+red('non-existent')+', likely by reconfig(). Please verify that the ini file for the related run exists. You may see this message for other (yet unpredicted) reasons.'
error(msg,'scentry.simdir()')
return ans
# Flip 1->2 associations.
def flip(this):
#
from numpy import array,double
# Store the flippoed variables to placeholders
R1 = array(this.R2); R2 = array(this.R1);
m1 = double(this.m2); m2 = double(this.m1);
P1 = array(this.P2); P2 = array(this.P1);
L1 = array(this.L2); L2 = array(this.L1);
S1 = array(this.S2); S2 = array(this.S1);
# Apply the flip to the current object
this.R1 = R1; this.R2 = R2
this.m1 = m1; this.m2 = m2
this.P1 = P1; this.P2 = P2
this.L1 = L1; this.L2 = L2
this.S1 = S1; this.S2 = S2
# Compare this scentry object to another using initial parameter fields. Return true false statement
def compare2( this, that, atol=1e-3 ):
#
from numpy import allclose,hstack,double
# Calculate an array of initial parameter values (the first element is 0 or 1 describing quasi-circularity)
def param_array( entry ):
# List of fields to add to array: initial parameters that are independent of initial separation
field_list = [ 'm1', 'm2', 'S1', 'S2' ]
#
a = double( 'qc' in entry.label )
for f in field_list:
a = hstack( [a, entry.__dict__[f] ] )
#
return a
# Perform comparison and return
return allclose( param_array(this), param_array(that), atol=atol )
# Create the catalog database, and store it as a pickled file.
def scbuild(keyword=None,save=True):
# Load useful packages
from commands import getstatusoutput as bash
from os.path import realpath, abspath, join, splitext, basename
from os import pardir,system,popen
import pickle
# Create a string with the current process name
thisfun = inspect.stack()[0][3]
# Look for config files
cpath_list = glob.glob( gconfig.config_path+'*.ini' )
# If a keyword is give, filter against found config files
if isinstance(keyword,(str,unicode)):
msg = 'Filtering ini files for \"%s\"'%cyan(keyword)
alert(msg,'scbuild')
cpath_list = filter( lambda path: keyword in path, cpath_list )
#
if not cpath_list:
msg = 'Cannot find configuration files (*.ini) in %s' % gconfig.config_path
error(msg,thisfun)
# Create config objects from list of config files
configs = [ scconfig( config_path ) for config_path in cpath_list ]
# For earch config
for config in configs:
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #
# Create streaming log file #
logfstr = gconfig.database_path + '/' + splitext(basename(config.config_file_location))[0] + '.log'
msg = 'Opening log file in: '+cyan(logfstr)
alert(msg,thisfun)
logfid = open(logfstr, 'w')
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #
# Search recurssively within the config's catalog_dir for files matching the config's metadata_id
msg = 'Searching for %s in %s.' % ( cyan(config.metadata_id), cyan(config.catalog_dir) ) + yellow(' This may take a long time if the folder being searched is mounted from a remote drive.')
alert(msg,thisfun)
mdfile_list = rfind(config.catalog_dir,config.metadata_id,verbose=True)
alert('done.',thisfun)
# (try to) Create a catalog entry for each valid metadata file
catalog = []
h = -1
for mdfile in mdfile_list:
# Create tempoary scentry object
entry = scentry(config,mdfile,verbose=True)
# Write to the master log file
h+=1
logfid.write( '%5i\t%s\n'% (h,entry.log) )
# If the obj is valid, add it to the catalog list, else ignore
if entry.isvalid:
catalog.append( entry )
else:
del entry
# Store the catalog to the database_path
if save:
db = gconfig.database_path + '/' + splitext(basename(config.config_file_location))[0] + '.' + gconfig.database_ext
msg = 'Saving database file to %s'%cyan(db)
alert(msg,'scbuild')
with open(db, 'wb') as dbf:
pickle.dump( catalog , dbf, pickle.HIGHEST_PROTOCOL )
# Close the log file
logfid.close()
#
wave_train = ''#'~~~~<vvvvvvvvvvvvvWw>~~~~'
hline = wave_train*3
msg = '\n\n#%s#\n%s with \"%s\". The related log file is at \"%s\".\n#%s#'%(hline,hlblack('Done'),green(config.catalog_dir),green(logfstr),hline)
alert(msg,'scbuild')
# Function for searching through catalog files.
def scsearch( catalog = None, # Manually input list of scentry objects to search through
q = None, # RANGE of mass ratios (>=1) to search for
nonspinning = None, # Non-spinning initially
spinaligned = None, # spin-aligned with L AND no in-plane spin INITIALLY
spinantialigned = None, # spin-anti-aligned with L AND no in-plane spin INITIALLY
precessing = None, # not spin aligned
nonprecessing = None, # not precessing
equalspin = None, # equal spin magnitudes
unequalspin = None, # not equal spin magnitudes
antialigned = None, # spin is in opposite direction of L
setname = None, # name of simulation set
notsetname = None, # list of setnames to ignore
institute = None, # list of institutes to accept
keyword = None, # list of keywords to accept (based on metadata directory string)
notkeyword = None, # list of keywords to not accept (based on metadata
# directory string
unique = None, # if true, only simulations with unique initial conditions will be used
plot = None, # whether or not to show a plot of results
exists=None, # Test whether data directory related to scentry and ini file exist (True/False)
validate_remnant=False, # If true, ensure that final mass adn spin are well defined
verbose = None): # be verbose
# Print non None inputs to screen
thisfun = inspect.stack()[0][3]
if verbose is not None:
for k in dir():
if (eval(k) is not None) and (k != 'thisfun'):
print '[%s]>> Found %s (=%r) keyword.' % (thisfun,textul(k),eval(k))
'''
Handle individual cases in serial
'''
#
from os.path import realpath, abspath, join
from os import pardir
from numpy.linalg import norm
from numpy import allclose,dot
import pickle, glob
# absolute tolerance for num comparisons
tol = 1e-6
# Handle the catalog input
if catalog is None:
# Get a list of all catalog database files. NOTE that .cat files are either placed in database_path directly, or by scbuild()
dblist = glob.glob( gconfig.database_path+'*.'+gconfig.database_ext )
# Load the catalog file(s)
catalog = []
for db in dblist:
with open( db , 'rb') as dbf:
catalog = catalog + pickle.load( dbf )
# Determine whether remnant properties are already stored
if validate_remnant is True:
from numpy import isnan,sum
test = lambda k: (sum(isnan( k.xf ))==0) and (isnan(k.mf)==0)
catalog = filter( test, catalog )
# mass-ratio
qtol = 1e-3
if q is not None:
# handle int of float input
if isinstance(q,(int,float)): q = [q-qtol,q+qtol]
# NOTE: this could use error checking
test = lambda k: k.m1/k.m2 >= min(q) and k.m1/k.m2 <= max(q)
catalog = filter( test, catalog )
# nonspinning
if nonspinning is True:
test = lambda k: norm(k.S1)+norm(k.S2) < tol
catalog = filter( test, catalog )
# spin aligned with orbital angular momentum
if spinaligned is True:
test = lambda k: allclose( dot(k.S1,k.L1+k.L2) , norm(k.S1)*norm(k.L1+k.L2) , atol=tol ) and allclose( dot(k.S2,k.L1+k.L2) , norm(k.S2)*norm(k.L1+k.L2) , atol=tol ) and not allclose( norm(k.S1)+norm(k.S2), 0.0, atol=tol )
catalog = filter( test, catalog )
# spin anti-aligned with orbital angular momentum
if spinantialigned is True:
test = lambda k: allclose( dot(k.S1,k.L1+k.L2) , -norm(k.S1)*norm(k.L1+k.L2) , atol=tol ) and allclose( dot(k.S2,k.L1+k.L2) , -norm(k.S2)*norm(k.L1+k.L2) , atol=tol ) and not allclose( norm(k.S1)+norm(k.S2), 0.0, atol=tol )
catalog = filter( test, catalog )
# precessing
if precessing is True:
test = lambda k: not allclose( abs(dot(k.S1+k.S2,k.L1+k.L2)), norm(k.L1+k.L2)*norm(k.S1+k.S2) , atol = tol )
catalog = filter( test, catalog )
# non-precessing, same as spinaligned & spin anti aligned
nptol = 1e-4
if nonprecessing is True:
test = lambda k: allclose( abs(dot(k.S1+k.S2,k.L1+k.L2)), norm(k.L1+k.L2)*norm(k.S1+k.S2) , atol = nptol )
catalog = filter( test, catalog )
# spins have equal magnitude
if equalspin is True:
test = lambda k: allclose( norm(k.S1), norm(k.S2), atol = tol )
catalog = filter( test, catalog )
# spins have unequal magnitude
if unequalspin is True:
test = lambda k: not allclose( norm(k.S1), norm(k.S2), atol = tol )
catalog = filter( test, catalog )
#
if antialigned is True:
test = lambda k: allclose( dot(k.S1+k.S2,k.L1+k.L2)/(norm(k.S1+k.S2)*norm(k.L1+k.L2)), -1.0, atol = tol )
catalog = filter( test, catalog )
# Compare setname strings
if setname is not None:
if isinstance( setname, str ):
setname = [setname]
setname = filter( lambda s: isinstance(s,str), setname )
setname = [ k.lower() for k in setname ]
if isinstance( setname, list ) and len(setname)>0:
test = lambda k: k.setname.lower() in setname
catalog = filter( test, catalog )
else:
msg = '[%s]>> setname input must be nonempty string or list.' % thisfun
raise ValueError(msg)
# Compare not setname strings
if notsetname is not None:
if isinstance( notsetname, str ):
notsetname = [notsetname]
notsetname = filter( lambda s: isinstance(s,str), notsetname )
notsetname = [ k.lower() for k in notsetname ]
if isinstance( notsetname, list ) and len(notsetname)>0:
test = lambda k: not ( k.setname.lower() in notsetname )
catalog = filter( test, catalog )
else:
msg = '[%s]>> notsetname input must be nonempty string or list.' % thisfun
raise ValueError(msg)
# Compare institute strings
if institute is not None:
if isinstance( institute, str ):
institute = [institute]
institute = filter( lambda s: isinstance(s,str), institute )
institute = [ k.lower() for k in institute ]
if isinstance( institute, list ) and len(institute)>0:
test = lambda k: k.config.institute.lower() in institute
catalog = filter( test, catalog )
else:
msg = '[%s]>> institute input must be nonempty string or list.' % thisfun
raise ValueError(msg)
# Compare keyword
if keyword is not None:
# If string, make list
if isinstance( keyword, str ):
keyword = [keyword]
keyword = filter( lambda s: isinstance(s,str), keyword )
# Determine whether to use AND or OR based on type
if isinstance( keyword, list ):
allkeys = True
if verbose:
msg = 'List of keywords or string keyword found: '+cyan('ALL scentry objects matching will be passed.')+' To pass ANY entries matching the keywords, input the keywords using an iterable of not of type list.'
alert(msg,'scsearch')
else:
allkeys = False # NOTE that this means: ANY keys will be passed
if verbose:
msg = 'List of keywords found: '+cyan('ANY scentry objects matching will be passed.')+' To pass ALL entries matching the keywords, input the kwywords using a list object.'
alert(msg,'scsearch')
# Always lower
keyword = [ k.lower() for k in keyword ]
# Handle two cases
if allkeys:
# Treat different keys with AND
for key in keyword:
test = lambda k: key in k.metadata_file_location.lower()
catalog = filter( test, catalog )
else:
# Treat different keys with OR
temp_catalogs = [ catalog for w in keyword ]
new_catalog = []
for j,key in enumerate(keyword):
test = lambda k: key in k.metadata_file_location.lower()
new_catalog += filter( test, temp_catalogs[j] )
catalog = list(set(new_catalog))
# Compare not keyword
if notkeyword is not None:
if isinstance( notkeyword, str ):
notkeyword = [notkeyword]
notkeyword = filter( lambda s: isinstance(s,str), notkeyword )
notkeyword = [ k.lower() for k in notkeyword ]
for w in notkeyword:
test = lambda k: not ( w in k.metadata_file_location.lower() )
catalog = filter( test, catalog )
# Validate the existance of the related config files and simulation directories
# NOTE that this effectively requires two reconfigure instances and is surely suboptimal
if not ( exists is None ):
def isondisk(e):
ans = (e.config).reconfig().config_exists and os.path.isdir(e.simdir())
if not ans:
msg = 'Ignoring entry at %s becuase its config file cannot be found and/or its simulation directory cannot be found.' % cyan(e.simdir())
warning(msg,'scsearch')
return ans
if catalog is not None:
catalog = filter( isondisk , catalog )
# Filter out physically degenerate simuations within a default tolerance
output_descriptor = magenta(' possibly degenerate')
if unique:
catalog = scunique(catalog,verbose=False)
output_descriptor = green(' unique')
# Sort by date
catalog = sorted( catalog, key = lambda e: e.date_number, reverse = True )
#
if verbose:
if len(catalog)>0:
print '## Found %s%s simulations:' % ( bold(str(len(catalog))), output_descriptor )
for k,entry in enumerate(catalog):
# tag this entry with its inferred simname
simname = entry.raw_metadata.source_dir[-1].split('/')[-1] if entry.raw_metadata.source_dir[-1][-1]!='/' else entry.raw_metadata.source_dir[-1].split('/')[-2]
print '[%04i][%s] %s: %s\t(%s)' % ( k+1, green(entry.config.config_file_location.split('/')[-1].split('.')[0]), cyan(entry.setname), entry.label, cyan(simname ) )
else:
print red('!! Found %s simulations.' % str(len(catalog)))
print ''
#
return catalog
# Given list of scentry objects, make a list unique in initial parameters
def scunique( catalog = None, tol = 1e-3, verbose = False ):
# import useful things
from numpy import ones,argmax,array
# This mask will be augmented such that only unique indeces are true
umap = ones( len(catalog), dtype=bool )
# Keep track of which items have been compared using another map
tested_map = ones( len(catalog), dtype=bool )
# For each entry in catalog
for d,entry in enumerate(catalog):
#
if tested_map[d]:
# Let the people know.
if verbose:
alert( '[%i] %s:%s' % (d,entry.setname,entry.label), 'scunique' )
# Create a map of all simulations with matching initial parameters (independently of initial setaration)
# 1. Filter out all matching objects. NOTE that this subset include the current object
subset = filter( lambda k: entry.compare2(k,atol=tol), catalog )
# 2. Find index locations of subset
subdex = [ catalog.index(k) for k in subset ]
# 3. By default, select longest run to keep. maxdex is the index in subset where b takes on its largest value.
maxdex = argmax( [ e.b for e in subset ] ) # recall that b is initial separation
# Let the people know.
for ind,k in enumerate(subset):
tested_map[ subdex[ind] ] = False
if k is subset[maxdex]:
if verbose: print '>> Keeping: [%i] %s:%s' % (catalog.index(k),k.setname,k.label)
else:
umap[ subdex[ind] ] = False
if verbose: print '## Removing:[%i] %s:%s' % (catalog.index(k),k.setname,k.label)
else:
if verbose: print magenta('[%i] Skipping %s:%s. It has already been checked.' % (d,entry.setname,entry.label) )
# Create the unique catalog using umap
unique_catalog = list( array(catalog)[ umap ] )
# Let the people know.
if verbose:
print green('Note that %i physically degenerate simulations were removed.' % (len(catalog)-len(unique_catalog)) )
print green( 'Now %i physically unique entries remain:' % len(unique_catalog) )
for k,entry in enumerate(unique_catalog):
print green( '>> [%i] %s: %s' % ( k+1, entry.setname, entry.label ) )
print ''
# return the unique subset of runs
return unique_catalog
# Construct string label for members of the scentry class
def sclabel( entry, # scentry object
use_q = True ): # if True, mass ratio will be used in the label
#
def sclabel_many( entry = None, use_q = None ):
#
from numpy import sign
#
tag_list = []
for e in entry:
# _,tg = sclabel_single( entry = e, use_q = use_q )
tg = e.label.split('-')
tag_list.append(tg)
#
common_tag_set = set(tag_list[0])
for k in range(2,len(tag_list)):
common_tag_set &= set(tag_list[k])
#
common_tag = [ k for k in tag_list[0] if k in common_tag_set ]
#
single_q = False
for tg in common_tag:
single_q = single_q or ( ('q' in tg) and (tg!='qc') )
#
tag = common_tag
#
if not single_q:
tag .append('vq') # variable q
# concat tags together to make label
label = ''
for k in range(len(tag)):
label += sign(k)*'-' + tag[k]
#
return label
#
def sclabel_single( entry = None, use_q = None ):
#
from numpy.linalg import norm
from numpy import allclose,dot,sign
#
if not isinstance( entry, scentry ):
msg = '(!!) First input must be member of scentry class.'
raise ValueError(msg)
# Initiate list to hold label parts
tag = []
#
tol = 1e-4
# shorthand for entry
e = entry
# Calculate the entry's net spin and oribal angular momentum
S = e.S1+e.S2; L = e.L1+e.L2
# Run is quasi-circular if momenta are perpindicular to separation vector
R = e.R2 - e.R1
if allclose( dot(e.P1,R), 0.0 , atol=tol ) and allclose( dot(e.P2,R), 0.0 , atol=tol ):
tag.append('qc')
# Run is nonspinning if both spin magnitudes are close to zero
if allclose( norm(e.S1) + norm(e.S2) , 0.0 , atol=tol ):
tag.append('ns')
# Label by spin on BH1 if spinning
if not allclose( norm(e.S1), 0.0, atol=tol ) :
tag.append( '1chi%1.2f' % ( norm(e.S1)/e.m1**2 ) )
# Label by spin on BH2 if spinning
if not allclose( norm(e.S2), 0.0, atol=tol ) :
tag.append( '2chi%1.2f' % ( norm(e.S2)/e.m2**2 ) )
# Run is spin aligned if net spin is parallel to net L
if allclose( dot(e.S1,L) , norm(e.S1)*norm(L) , atol=tol ) and allclose( dot(e.S2,L) , norm(e.S2)*norm(L) , atol=tol ) and (not 'ns' in tag):
tag.append('sa')
# Run is spin anti-aligned if net spin is anti-parallel to net L
if allclose( dot(e.S1,L) , -norm(e.S1)*norm(L) , atol=tol ) and allclose( dot(e.S2,L) , -norm(e.S2)*norm(L) , atol=tol ) and (not 'ns' in tag):
tag.append('saa')
# Run is precessing if component spins are not parallel with L
if (not 'sa' in tag) and (not 'saa' in tag) and (not 'ns' in tag):
tag.append('p')
# mass ratio
if use_q:
tag.append( 'q%1.2f' % (e.m1/e.m2) )
# concat tags together to make label
label = ''
for k in range(len(tag)):
label += sign(k)*'-' + tag[k]
#
return label, tag
#
if isinstance( entry, list ):
label = sclabel_many( entry = entry, use_q = use_q )
elif isinstance( entry, scentry ):
label,_ = sclabel_single( entry = entry, use_q = use_q )
else:
msg = 'input must be list scentry objects, or single scentry'
raise ValueError(msg)
#
return label
# Lowest level class for gravitational waveform data
class gwf:
# Class constructor
def __init__( this, # The object to be created
wfarr=None, # umpy array of waveform data in to format [time plus imaginary]
dt = None, # If given, the waveform array will be interpolated to this
# timestep if needed
ref_scentry = None, # reference scentry object
l = None, # Optional polar index (an eigenvalue of a differential eq)
m = None, # Optional azimuthal index (an eigenvalue of a differential eq)
extraction_parameter = None, # Optional extraction parameter ( a map to an extraction radius )
kind = None, # strain or psi4
friend = None, # gwf object from which to clone fields
mf = None, # Optional remnant mass input
xf = None, # Optional remnant spin input
m1=None,m2=None, # Optional masses
label = None, # Optional label input (see gwylm)
preinspiral = None, # Holder for information about the raw waveform's turn-on
postringdown = None, # Holder for information about the raw waveform's turn-off
verbose = False ): # Verbosity toggle
#
this.dt = dt
# The kind of obejct to be created : e.g. psi4 or strain
if kind is None:
kind = r'$y$'
this.kind = kind
# Optional field to be set externally if needed
source_location = None
# Set optional fields to none as default. These will be set externally is they are of use.
this.l = l
this.m = m
this.extraction_parameter = extraction_parameter
#
this.verbose = verbose
# Fix nans, nonmonotinicities and jumps in time series waveform array
wfarr = straighten_wfarr( wfarr, verbose=this.verbose )
# use the raw waveform data to define all fields
this.wfarr = wfarr
# optional component masses
this.m1,this.m2 = m1,m2
# Optional Holders for remnant mass and spin
this.mf = mf
this.xf = xf
# Optional label input (see gwylm)
this.label = label
#
this.preinspiral = preinspiral
this.postringdown = postringdown
#
this.ref_scentry = ref_scentry
this.setfields(wfarr=wfarr,dt=dt)
# If desired, Copy fields from related gwf object.
if type(friend).__name__ == 'gwf' :
this.meet( friend )
elif friend is not None:
msg = 'value of "friend" keyword must be a member of the gwf class'
error(mgs,'gwf')
# Store wfarr in a field that will not be touched beyond this point. This is useful because
# the properties defined in "setfields" may change as the waveform is manipulated (e.g. windowed,
# scaled, phase shifted), and after any of these changes, we may want to reaccess the initial waveform
# though the "reset" method (i.e. this.reset)
this.__rawgwfarr__ = wfarr
# Tag for whether the wavform has been low pass filtered since creation
this.__lowpassfiltered__ = False
# set fields of standard wf object
def setfields(this, # The current object
wfarr=None, # The waveform array to apply to the current object
dt=None): # The time spacing to apply to the current object
# If given dt, then interpolote waveform array accordingly
if dt is not None:
if this.verbose:
msg = 'Interpolating data to '+cyan('dt=%f'%dt)
alert(msg,'gwylm.setfields')
wfarr = intrp_wfarr(wfarr,delta=dt)
# Alert the use if improper input is given
if (wfarr is None) and (this.wfarr is None):
msg = 'waveform array input (wfarr=) must be given'
raise ValueError(msg)
elif wfarr is not None:
this.wfarr = wfarr
elif (wfarr is None) and not (this.wfarr is None):
wfarr = this.wfarr
else:
msg = 'unhandled waveform array configuration: input wfarr is %s and this.wfarr is %s'%(wfarr,this.wfarr)
error(msg,'gwf.setfields')
##########################################################
# Make sure that waveform array is in t-plus-cross format #
##########################################################
# Imports
from numpy import abs,sign,linspace,exp,arange,angle,diff,ones,isnan,pi
from numpy import vstack,sqrt,unwrap,arctan,argmax,mod,floor,logical_not
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy.fftpack import fft, fftfreq, fftshift, ifft
# Time domain attributes
this.t = None # Time vals
this.plus = None # Plus part
this.cross = None # Cross part
this.y = None # Complex =(def) plus + 1j*cross
this.amp = None # Amplitude = abs(y)
this.phi = None # Complex argument
this.dphi = None # Time rate of complex argument
this.k_amp_max = None # Index location of amplitude max
this.window = None # The time domain window function applid to the original waveform. This
# initiated as all ones, but changed in the taper method (if it is called)
# Frequency domain attributes. NOTE that this will not currently be set by default.
# Instead, the current approach will be to set these fields once gwf.fft() has been called.
this.f = None # double sided frequency range
this.w = None # double sided angular frequency range
this.fd_plus = None # fourier transform of time domain plus part
this.fd_cross = None # fourier transform of time domain cross part
this.fd_y = None # both polarisations (i.e. plus + ij*cross)
this.fd_wfarr = None # frequency domain waveform array
this.fd_amp = None # total frequency domain amplitude: abs(right+left)
this.fd_phi = None # total frequency domain phase: arg(right+left)
this.fd_dphi = None # frequency derivative of fdphi
this.fd_k_amp_max = None # index location of fd amplitude max
# Domain independent attributes
this.n = None # length of arrays
this.fs = None # samples per unit time
this.df = None # frequnecy domain spacing
# Validate time step. Interpolate for constant time steo if needed.
this.__validatet__()
# Determine formatting of wfarr
t = this.wfarr[:,0]; A = this.wfarr[:,1]; B = this.wfarr[:,2];
# if all elements of A are greater than zero
if (A>0).all() :
typ = 'amp-phase'
elif ((abs(A.imag)>0).any() or (abs(B.imag)>0).any()): # else if A or B are complex
#
msg = 'The current code version only works with plus valued time domain inputs to gwf().'
raise ValueError(msg)
else:
typ = 'plus-imag'
# from here on, we are to work with the plus-cross format
if typ == 'amp-phase':
C = A*exp(1j*B)
this.wfarr = vstack( [ t, C.real, C.imag ] ).T
this.__validatewfarr__()
# --------------------------------------------------- #
# Set time domain properties
# --------------------------------------------------- #
# NOTE that it will always be assumed that the complex waveform is plus+j*imag
# Here, we trust the user to know that if one of these quantities is changed, then it will affect the other, and
# that to have all quantities consistent, then one should modify wfarr, and then perform this.setfields()
# (and not modify e.g. amp and phase). All functions on gwf objects will respect this.
# Time domain attributed
this.t = this.wfarr[:,0] # Time
this.plus = this.wfarr[:,1] # Real part
this.cross = this.wfarr[:,2] # Imaginary part
this.y = this.plus + 1j*this.cross # Complex waveform
this.amp = abs( this.y ) # Amplitude
phi_ = unwrap( angle( this.y ) ) # Phase: NOTE, here we make the phase constant where the amplitude is zero
# print find( (this.amp > 0) * (this.amp<max(this.amp)) )
# k = find( (this.amp > 0) * (this.amp<max(this.amp)) )[0]
# phi_[0:k] = phi_[k]
this.phi = phi_
this.dphi = intrp_diff( this.t, this.phi ) # Derivative of phase, last point interpolated to preserve length
# this.dphi = diff( this.phi )/this.dt # Derivative of phase, last point interpolated to preserve length
this.k_amp_max = argmax(this.amp) # index location of max ampitude
this.intrp_t_amp_max = intrp_argmax(this.amp,domain=this.t) # Interpolated time coordinate of max
#
this.n = len(this.t) # Number of time samples
this.window = ones( this.n ) # initial state of time domain window
this.fs = 1.0/this.dt # Sampling rate
this.df = this.fs/this.n # freq resolution
# --------------------------------------------------- #
# Always calculate frequency domain data
# --------------------------------------------------- #
# compute the frequency domain
this.f = fftshift(fftfreq( this.n, this.dt ))
this.w = 2*pi*this.f
# compute fourier transform values
this.fd_plus = fftshift(fft( this.plus )) * this.dt # fft of plus
this.fd_cross = fftshift(fft( this.cross )) * this.dt # fft of cross
this.fd_y = this.fd_plus + 1j*this.fd_cross # full fft
this.fd_amp = abs( this.fd_y ) # amp of full fft
this.fd_phi = unwrap( angle( this.fd_y ) ) # phase of full fft
# this.fd_dphi = diff( this.fd_phi )/this.df # phase rate: dphi/df
this.fd_dphi = intrp_diff( this.f, this.fd_phi ) # phase rate: dphi/df
this.fd_k_amp_max = argmax( this.fd_amp )
# Starting frequency in rad/sec
this.wstart = None
# Copy attrributed from friend.
def meet(this,friend,init=False,verbose=False):
# If wrong type input, let the people know.
if not isinstance(friend,gwf):
msg = '1st input must be of type ' + bold(type(this).__name__)+'.'
error( msg, fname=inspect.stack()[0][3] )
# Copy attrributed from friend. If init, then do not check if attribute already exists in this.
for attr in friend.__dict__:
proceed = (attr in this.__dict__)
proceed = proceed and type(friend.__dict__[attr]).__name__ in ('int','int64','float','scentry', 'string')
# msg = '%s is %s and %s' % (attr,type(friend.__dict__[attr]).__name__,magenta('proceed=%r'%proceed))
# alert(msg)
if proceed or init:
if verbose: print '\t that.%s --> this.%s (%s)' % (attr,attr,type(friend.__dict__[attr]).__name__)
setattr( this, attr, friend.__dict__[attr] )
#
dir(this)
return this
# validate whether there is a constant time step
def __validatet__(this):
#
from numpy import diff,var,allclose,vstack,mean,linspace,diff,amin,allclose
from numpy import arange,array,double,isnan,nan,logical_not,hstack
from scipy.interpolate import InterpolatedUnivariateSpline
# # Look for and remove nans
# t,A,B = this.wfarr[:,0],this.wfarr[:,1],this.wfarr[:,2]
# nan_mask = logical_not( isnan(t) ) * logical_not( isnan(A) ) * logical_not( isnan(B) )
# if logical_not(nan_mask).any():
# msg = red('There are NANs in the data which mill be masked away.')
# warning(msg,'gwf.setfields')
# this.wfarr = this.wfarr[nan_mask,:]
# t = this.wfarr[:,0]; A = this.wfarr[:,1]; B = this.wfarr[:,2];
# Note the shape convention
t = this.wfarr[:,0]
# check whether t is monotonically increasing
isincreasing = allclose( t, sorted(t), 1e-6 )
if not isincreasing:
# Let the people know
msg = red('The time series has been found to be non-monotonic. We will sort the data to enforce monotinicity.')
warning(msg,'gwf.__validatet__')
# In this case, we must sort the data and time array
map_ = arange( len(t) )
map_ = sorted( map_, key = lambda x: t[x] )
this.wfarr = this.wfarr[ map_, : ]
t = this.wfarr[:,0]
# Look for duplicate time data
hasduplicates = 0 == amin( diff(t) )
if hasduplicates:
# Let the people know
msg = red('The time series has been found to have duplicate data. We will delete the corresponding rows.')
warning(msg,'gwf.__validatet__')
# delete the offending rows
dup_mask = hstack( [True, diff(t)!=0] )
this.wfarr = this.wfarr[dup_mask,:]
t = this.wfarr[:,0]
# if there is a non-uniform timestep, or if the input dt is not None and not equal to the given dt
NONUNIFORMT = not isunispaced(t)
INPUTDTNOTGIVENDT = this.dt is None
if NONUNIFORMT and (not INPUTDTNOTGIVENDT):
msg = '(**) Waveform not uniform in time-step. Interpolation will be applied.'
if verbose: print magenta(msg)
if NONUNIFORMT and INPUTDTNOTGIVENDT:
# if dt is not defined and not none, assume smallest dt
if this.dt is None:
this.dt = diff(lim(t))/len(t)
msg = '(**) Warning: No dt given to gwf(). We will assume that the input waveform array is in geometric units, and that dt = %g will more than suffice.' % this.dt
if this.verbose:
print magenta(msg)
# Interpolate waveform array
intrp_t = arange( min(t), max(t), this.dt )
intrp_R = InterpolatedUnivariateSpline( t, this.wfarr[:,1] )( intrp_t )
intrp_I = InterpolatedUnivariateSpline( t, this.wfarr[:,2] )( intrp_t )
# create final waveform array
this.wfarr = vstack([intrp_t,intrp_R,intrp_I]).T
else:
# otherwise, set dt automatically
this.dt = mean(diff(t))
# validate shape of waveform array
def __validatewfarr__(this):
# check shape width
if this.wfarr.shape[-1] != 3 :
msg = '(!!) Waveform arr should have 3 columns'
raise ValueError(msg)
# check shape depth
if len(this.wfarr.shape) != 2 :
msg = '(!!) Waveform array should have two dimensions'
raise ValueError(msg)
# General plotting
def plot( this,
show=False,
fig = None,
title = None,
ref_gwf = None,
labels = None,
domain = None):
# Handle which default domain to plot
if domain is None:
domain = 'time'
elif not ( domain in ['time','freq'] ):
msg = 'Error: domain keyword must be either "%s" or "%s".' % (cyan('time'),cyan('freq'))
error(msg,'gwylm.plot')
# Plot selected domain.
if domain == 'time':
ax = this.plottd( show=show,fig=fig,title=title, ref_gwf=ref_gwf, labels=labels )
elif domain == 'freq':
ax = this.plotfd( show=show,fig=fig,title=title, ref_gwf=ref_gwf, labels=labels )
#
from matplotlib.pyplot import gcf
#
return ax,gcf()
# Plot frequency domain
def plotfd( this,
show = False,
fig = None,
title = None,
ref_gwf = None,
labels = None,
verbose = False ):
#
from matplotlib.pyplot import plot,subplot,figure,tick_params,subplots_adjust
from matplotlib.pyplot import grid,setp,tight_layout,margins,xlabel,legend
from matplotlib.pyplot import show as shw
from matplotlib.pyplot import ylabel as yl
from matplotlib.pyplot import title as ttl
from numpy import ones,sqrt,hstack,array
#
if ref_gwf:
that = ref_gwf
#
if fig is None:
fig = figure(figsize = 1.1*array([8,7.2]))
fig.set_facecolor("white")
#
kind = this.kind
#
clr = rgb(3)
grey = 0.9*ones(3)
lwid = 1
txclr = 'k'
fs = 18
font_family = 'serif'
gclr = '0.9'
#
ax = []
# xlim = lim(this.t) # [-400,this.t[-1]]
#
pos_mask = this.f>0
if ref_gwf:
that_pos_mask = that.f>0
that_lwid = 4
that_alpha = 0.22
#
set_legend = False
if not labels:
labels = ('','')
else:
set_legend=True
# ------------------------------------------------------------------- #
# Amplitude
# ------------------------------------------------------------------- #
ax.append( subplot(3,1,1) );
grid(color=gclr, linestyle='-')
setp(ax[-1].get_xticklabels(), visible=False)
ax[-1].set_xscale('log', nonposx='clip')
ax[-1].set_yscale('log', nonposy='clip')
#
plot( this.f[pos_mask], this.fd_amp[pos_mask], color=clr[0], label=labels[0] )
if ref_gwf:
plot( that.f[that_pos_mask], that.fd_amp[that_pos_mask], color=clr[0], linewidth=that_lwid, alpha=that_alpha, label=labels[-1] )
pylim( this.f[pos_mask], this.fd_amp[pos_mask], pad_y=10 )
#
yl('$|$'+kind+'$|(f)$',fontsize=fs,color=txclr, family=font_family )
if set_legend: legend(frameon=False)
# ------------------------------------------------------------------- #
# Total Phase
# ------------------------------------------------------------------- #
ax.append( subplot(3,1,2, sharex=ax[0]) );
grid(color=gclr, linestyle='-')
setp(ax[-1].get_xticklabels(), visible=False)
ax[-1].set_xscale('log', nonposx='clip')
#
plot( this.f[pos_mask], this.fd_phi[pos_mask], color=1-clr[0] )
if ref_gwf:
plot( that.f[that_pos_mask], that.fd_phi[that_pos_mask], color=1-clr[0], linewidth=that_lwid, alpha=that_alpha )
pylim( this.f[pos_mask], this.fd_phi[pos_mask] )
#
yl(r'$\phi = \mathrm{arg}($'+kind+'$)$',fontsize=fs,color=txclr, family=font_family )
# ------------------------------------------------------------------- #
# Total Phase Rate
# ------------------------------------------------------------------- #
ax.append( subplot(3,1,3, sharex=ax[0]) );
grid(color=gclr, linestyle='-')
ax[-1].set_xscale('log', nonposx='clip')
#
plot( this.f[pos_mask], this.fd_dphi[pos_mask], color=sqrt(clr[0]) )
if ref_gwf:
plot( that.f[that_pos_mask], that.fd_dphi[that_pos_mask], color=sqrt(clr[0]), linewidth=that_lwid, alpha=that_alpha )
pylim( this.f[pos_mask], this.fd_dphi[pos_mask] )
#
yl(r'$\mathrm{d}{\phi}/\mathrm{d}f$',fontsize=fs,color=txclr, family=font_family)
# ------------------------------------------------------------------- #
# Full figure settings
# ------------------------------------------------------------------- #
if title is not None:
ax[0].set_title( title, family=font_family )
# Set axis lines (e.g. grid lines) below plot lines
for a in ax:
a.set_axisbelow(True)
# Ignore renderer warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
tight_layout(pad=2, w_pad=1.2)
subplots_adjust(hspace = .001)
#
xlabel(r'$f$',fontsize=fs,color=txclr)
#
if show:
shw()
#
return ax
# Plot time domain
def plottd( this,
show=False,
fig = None,
ref_gwf = None,
labels = None,
title = None):
#
import warnings
from numpy import array
#
from matplotlib.pyplot import plot,subplot,figure,tick_params,subplots_adjust
from matplotlib.pyplot import grid,setp,tight_layout,margins,xlabel,legend
from matplotlib.pyplot import show as shw
from matplotlib.pyplot import ylabel as yl
from matplotlib.pyplot import title as ttl
from numpy import ones,sqrt,hstack
#
if fig is None:
fig = figure(figsize = 1.1*array([8,7.2]))
fig.set_facecolor("white")
#
clr = rgb(3)
grey = 0.9*ones(3)
lwid = 1
txclr = 'k'
fs = 18
font_family = 'serif'
gclr = '0.9'
#
ax = []
xlim = lim(this.t) # [-400,this.t[-1]]
#
if ref_gwf:
that = ref_gwf
that_lwid = 4
that_alpha = 0.22
#
set_legend = False
if not labels:
labels = ('','')
else:
set_legend=True
# Time domain plus and cross parts
ax.append( subplot(3,1,1) );
grid(color=gclr, linestyle='-')
setp(ax[-1].get_xticklabels(), visible=False)
# actual plotting
plot( this.t, this.plus, linewidth=lwid, color=0.8*grey )
plot( this.t, this.cross, linewidth=lwid, color=0.5*grey )
plot( this.t, this.amp, linewidth=lwid, color=clr[0], label=labels[0] )
plot( this.t,-this.amp, linewidth=lwid, color=clr[0] )
if ref_gwf:
plot( that.t, that.plus, linewidth=that_lwid, color=0.8*grey, alpha=that_alpha )
plot( that.t, that.cross, linewidth=that_lwid, color=0.5*grey, alpha=that_alpha )
plot( that.t, that.amp, linewidth=that_lwid, color=clr[0], alpha=that_alpha, label=labels[-1] )
plot( that.t,-that.amp, linewidth=that_lwid, color=clr[0], alpha=that_alpha )
if set_legend: legend(frameon=False)
# Ignore renderer warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
tight_layout(pad=2, w_pad=1.2)
subplots_adjust(hspace = .001)
#
pylim( this.t, this.amp, domain=xlim, symmetric=True )
kind = this.kind
yl(kind,fontsize=fs,color=txclr, family=font_family )
# Time domain phase
ax.append( subplot(3,1,2, sharex=ax[0]) );
grid(color=gclr, linestyle='-')
setp(ax[-1].get_xticklabels(), visible=False)
# actual plotting
plot( this.t, this.phi, linewidth=lwid, color=1-clr[0] )
if ref_gwf:
plot( that.t, that.phi, linewidth=that_lwid, color=1-clr[0], alpha=that_alpha )
pylim( this.t, this.phi, domain=xlim )
yl( r'$\phi = \mathrm{arg}(%s)$' % kind.replace('$','') ,fontsize=fs,color=txclr, family=font_family)
# Time domain frequency
ax.append( subplot(3,1,3, sharex=ax[0]) );
grid(color=gclr, linestyle='-')
# Actual plotting
plot( this.t, this.dphi, linewidth=lwid, color=sqrt(clr[0]) )
if ref_gwf:
plot( that.t, that.dphi, linewidth=that_lwid, color=sqrt(clr[0]), alpha=that_alpha )
pylim( this.t, this.dphi, domain=xlim )
yl(r'$\mathrm{d}{\phi}/\mathrm{d}t$',fontsize=fs,color=txclr, family=font_family)
# Full figure settings
ax[0].set_xlim(lim(this.t))
if title is not None:
ax[0].set_title( title, family=font_family )
# Set axis lines (e.g. grid lines) below plot lines
for a in ax:
a.set_axisbelow(True)
#
xlabel(r'$t$',fontsize=fs,color=txclr)
#
if show:
shw()
#
return ax
# Apply a time domain window to the waveform. Either the window vector OR a set of indeces to be tapered is given as input. NOTE that while this method modifies the current object, one can revert to object's original state by using the reset() method. OR one can make a backup of the current object by using the clone() method.
def apply_window( this, # gwf object to be windowed
state = None, # Index values defining region to be tapered:
# For state=[a,b], if a>b then the taper is 1 at b and 0 at a
# If a<b, then the taper is 1 at a and 0 at b.
window = None): # optional input: use known taper/window
# Store the initial state of the waveform array just in case the user wishes to undo the window
this.__prevarr__ = this.wfarr
# Use low level function
if (state is not None) and (window is None):
window = maketaper( this.t, state)
elif (state is None) and (window is None):
msg = '(!!) either "state" or "window" keyword arguments must be given and not None.'
error(msg,'gwf.taper')
# Set this object's window
this.window = this.window * window
#
wfarr = this.wfarr
wfarr[:,1] = this.window * this.wfarr[:,1]
wfarr[:,2] = this.window * this.wfarr[:,2]
# NOTE that objects cannot be redefined within their methods, but their properties can be changed. For this reason, the line below uses setfields() rather than gwf() to apply the taper.
this = this.setfields( wfarr=wfarr )
# Apply mask
def apply_mask( this, mask=None ):
#
if mask is None: error('the mask input must be given, and it must be index or boolean ')
#
this.setfields( this.wfarr[mask,:] )
# If desired, reset the waveform object to its original state (e.g. it's state just afer loading).
# Note that after this methed is called, the current object will occupy a different address in memory.
def reset(this): this.setfields( this.__rawgwfarr__ )
# return a copy of the current object
def copy(this):
#
from copy import deepcopy as copy
return copy(this)
# RETURN a clone the current waveform object. NOTE that the copy package may also be used here
def clone(this): return gwf(this.wfarr).meet(this)
# Interpolate the current object
def interpolate(this,dt=None,domain=None):
# Validate inputs
if (dt is None) and (domain is None):
msg = red('First "dt" or "domain" must be given. See traceback above.')
error(msg,'gwf.interpolate')
if (dt is not None) and (domain is not None):
msg = red('Either "dt" or "domain" must be given, not both. See traceback above.')
error(msg,'gwf.interpolate')
# Create the new wfarr by interpolating
if domain is None:
wfarr = intrp_wfarr(this.wfarr,delta=dt)
else:
wfarr = intrp_wfarr(this.wfarr,domain=domain)
# Set the current object to its new state
this.setfields(wfarr)
# Pad this waveform object in the time domain with zeros
def pad(this,new_length=None,where=None):
# Pad this waveform object to the left and right with zeros
ans = this.copy()
if new_length is not None:
# Create the new wfarr
wfarr = pad_wfarr( this.wfarr, new_length,where=where )
# Confer to the current object
ans.setfields(wfarr)
return ans
# Analog of the numpy ndarray conj()
def conj(this):
this.wfarr[:,2] *= -1
this.setfields()
return this
# Align the gwf with a reference gwf using a desired method
def align( this,
that, # The reference gwf object
method=None, # The alignment type e.g. phase
options=None, # Addtional options for subroutines
mask=None, # Boolean mask to apply for alignment (useful e.g. for average-phase alignment)
verbose=False ):
#
if not isinstance(that,gwf):
msg = 'first input must be gwf -- the gwf object to alignt the current object to'
error(msg,'gwf.align')
# Set default method
if method is None:
msg = 'No method chosen. We will proceed by aligning the waveform\'s initial phase.'
warning(msg,'gwf.align')
memthod = ['initial-phase']
# Make sure method is list or tuple
if not isinstance(method,(list,tuple)):
method = [method]
# Make sure all methods are strings
for k in method:
if not isinstance(k,str):
msg = 'non-string method type found: %s'%k
error(msg,'gwf.align')
# Check for handled methods
handled_methods = [ 'initial-phase','average-phase' ]
for k in method:
if not ( k in handled_methods ):
msg = 'non-handled method input: %s. Handled methods include %s'%(red(k),handled_methods)
error(msg,'gwf.align')
# Look for phase-alignement
if 'initial-phase' in method:
this.wfarr = align_wfarr_initial_phase( this.wfarr, that.wfarr )
this.setfields()
if 'average-phase' in method:
this.wfarr = align_wfarr_average_phase( this.wfarr, that.wfarr, mask=mask, verbose=verbose)
this.setfields()
#
return this
# Shift the waveform phase
def shift_phase(this,
dphi,
fromraw=False, # If True, rotate the wavefor relative to its default wfarr (i.e. __rawgwfarr__)
verbose=False):
#
if not isinstance(dphi,(float,int)):
error('input must of float or int real valued','gwf.shift_phase')
if not fromraw:
wfarr = this.__rawgwfarr__
else:
wfarr = this.wfarr
#
msg = 'This function could be spead up by manually aligning relevant fields, rather than regenerating all fields which includes taking an FFT.'
warning(msg,'gwf.shift_phase')
#
this.wfarr = shift_wfarr_phase( wfarr, dphi )
this.setfields()
# frequency domain filter the waveform given a window state for the frequency domain
def fdfilter(this,window):
#
from scipy.fftpack import fft, fftfreq, fftshift, ifft
from numpy import floor,array,log
from matplotlib.pyplot import plot,show
#
if this.__lowpassfiltered__:
msg = 'wavform already low pass filtered'
warning(msg,'gwf.lowpass')
else:
#
fd_y = this.fd_y * window
plot( log(this.f), log( abs(this.fd_y) ) )
plot( log(this.f), log( abs(fd_y) ) )
show()
#
y = ifft( fftshift( fd_y ) )
this.wfarr[:,1],this.wfarr[:,2] = y.real,y.imag
#
this.setfields()
#
this.__lowpassfiltered__ = True
# Class for waveforms: Psi4 multipoles, strain multipoles (both spin weight -2), recomposed waveforms containing h+ and hx. NOTE that detector response waveforms will be left to pycbc to handle
class gwylm:
'''
Class to hold spherical multipoles of gravitaiton wave radiation from NR simulations. A simulation catalog entry obejct (of the scentry class) as well as the l and m eigenvalue for the desired multipole (aka mode) is needed.
'''
# Class constructor
def __init__( this, # reference for the object to be created
scentry_obj, # member of the scentry class
lm = None, # iterable of length 2 containing multipolr l and m
lmax = None, # if set, multipoles with all |m| up to lmax will be loaded.
# This input is not compatible with the lm tag
dt = None, # if given, the waveform array will beinterpolated to
# this timestep
load = None, # IF true, we will try to load data from the scentry_object
clean = None, # Toggle automatic tapering
extraction_parameter = None, # Extraction parameter labeling extraction zone/radius for run
level = None, # Opional refinement level for simulation. NOTE that not all NR groups use this specifier. In such cases, this input has no effect on loading.
w22 = None, # Optional input for lowest physical frequency in waveform; by default an wstart value is calculated from the waveform itself and used in place of w22
lowpass=None, # Toggle to lowpass filter waveform data upon load using "romline" (in basics.py) routine to define window
calcstrain = None, # If True, strain will be calculated upon loading
verbose = None ): # be verbose
# NOTE that this method is setup to print the value of each input if verbose is true.
# NOTE that default input values are handled just below
# Print non None inputs to screen
thisfun = this.__class__.__name__
if not ( verbose in (None,False) ):
for k in dir():
if (eval(k) is not None) and (eval(k) is not False) and not ('this' in k):
msg = 'Found %s (=%r) keyword.' % (textul(k),eval(k))
alert( msg, 'gwylm' )
# Handle default values
load = True if load is None else load
clean = False if clean is None else clean
calcstrain = True if calcstrain is None else calcstrain
# Validate the lm input
this.__valinputs__(thisfun,lm=lm,lmax=lmax,scentry_obj=scentry_obj)
# Confer the scentry_object's attributes to this object for ease of referencing
for attr in scentry_obj.__dict__.keys():
setattr( this, attr, scentry_obj.__dict__[attr] )
# NOTE that we don't want the scentry's verbose property to overwrite the input above, so we definte this.verbose at this point, not before.
this.verbose = verbose
# Store the scentry object to optionally access its methods
this.__scentry__ = scentry_obj
''' Explicitely reconfigure the scentry object for the current user. '''
# this.config.reconfig() # NOTE that this line is commented out because scentry_obj.simdir() below calls the reconfigure function internally.
# Tag this object with the simulation location of the given scentry_obj. NOTE that the right hand side of this assignment depends on the user's configuration file. Also NOTE that the configuration object is reconfigured to the system's settings within simdir()
this.simdir = scentry_obj.simdir()
# If no extraction parameter is given, retrieve default. NOTE that this depends on the current user's configuration.
# NOTE that the line below is commented out becuase the line above (i.e. ... simdir() ) has already reconfigured the config object
# scentry_obj.config.reconfig() # This line ensures that values from the user's config are taken
if extraction_parameter is None:
extraction_parameter = scentry_obj.default_extraction_par
if level is None:
level = scentry_obj.default_level
#
config_extraction_parameter = scentry_obj.config.default_par_list[0]
config_level = scentry_obj.config.default_par_list[1]
if (config_extraction_parameter,config_level) != (extraction_parameter,level):
msg = 'The (%s,%s) is (%s,%s), which differs from the config values of (%s,%s). You have either manually input the non-config values, or the handler has set them by looking at the contents of the simulation directory. '%(magenta('extraction_parameter'),green('level'),magenta(str(extraction_parameter)),green(str(level)),str(config_extraction_parameter),str(config_level))
if this.verbose: alert( msg, 'gwylm' )
# Store the extraction parameter and level
this.extraction_parameter = extraction_parameter
this.level = level
# Store the extraction radius if a map is provided in the handler file
special_method,handler = 'extraction_map',scentry_obj.loadhandler()
if special_method in handler.__dict__:
this.extraction_radius = handler.__dict__[special_method]( scentry_obj, this.extraction_parameter )
else:
this.extraction_radius = None
# These fields are initiated here for visiility, but they are filled as lists of gwf object in load()
this.ylm,this.hlm,this.flm = [],[],[] # psi4 (loaded), strain(calculated by default), news(optional non-default)
# time step
this.dt = dt
# Load the waveform data
if load==True: this.__load__(lmax=lmax,lm=lm,dt=dt)
# Characterize the waveform's start and store related information to this.preinspiral
this.preinspiral = None # In charasterize_start(), the information about the start of the waveform is actually stored to "starting". Here this field is inintialized for visibility.
this.characterize_start_end()
# If w22 is input, then use the input value for strain calculation. Otherwise, use the algorithmic estimate.
if w22 is None:
w22 = this.wstart_pn
if verbose:
# msg = 'Using w22 from '+bold(magenta('algorithmic estimate'))+' to calculate strain multipoles.'
msg = 'Storing w22 from a '+bold(magenta('PN estimate'))+'[see pnw0 in basics.py, and/or arxiv:1310.1528v4]. This will be the frequency parameter used if strain is to be calculated.'
alert( msg, 'gwylm' )
else:
if verbose:
msg = 'Storing w22 from '+bold(magenta('user input'))+'. This will be the frequency parameter used if strain is to be calculated.'
alert( msg, 'gwylm' )
# Low-pass filter waveform (Psi4) data using "romline" routine in basics.py to determin windowed region
this.__lowpassfiltered__ = False
if lowpass:
this.lowpass()
# Calculate strain
if calcstrain:
this.calchlm(w22=w22)
# Clean the waveforms of junk radiation if desired
this.__isclean__ = False
if clean:
this.clean()
# Set some boolean tags
this.__isringdownonly__ = False # will switch to True if, ringdown is cropped. See gwylm.ringdown().
# Create a dictionary representation of the mutlipoles
this.__curate__()
# Create a dictionary representation of the mutlipoles
def __curate__(this):
'''Create a dictionary representation of the mutlipoles'''
# NOTE that this method should be called every time psi4, strain and/or news is loaded.
# NOTE that the related methods are: __load__, calchlm and calcflm
# Initiate the dictionary
this.lm = {}
for l,m in this.__lmlist__:
this.lm[l,m] = {}
# Seed the dictionary with psi4 gwf objects
for y in this.ylm:
this.lm[(y.l,y.m)]['psi4'] = y
# Seed the dictionary with strain gwf objects
for h in this.hlm:
this.lm[(h.l,h.m)]['strain'] = h
# Seed the dictionary with strain gwf objects
for f in this.flm:
this.lm[(f.l,f.m)]['news'] = f
# Validate inputs to constructor
def __valinputs__(this,thisfun,lm=None,lmax=None,scentry_obj=None):
from numpy import shape
# Raise error upon nonsensical multipolar input
if (lm is not None) and (lmax is not None) and load:
msg = 'lm input is mutually exclusive with the lmax input'
raise NameError(msg)
# Default multipolar values
if (lm is None) and (lmax is None):
lm = [2,2]
# Determine whether the lm input is a songle mode (e.g. [2,2]) or a list of modes (e.g. [[2,2],[3,3]] )
if len( shape(lm) ) == 2 :
if shape(lm)[1] != 2 :
# raise error
msg = '"lm" input must be iterable of length 2 (e.g. lm=[2,2]), or iterable of shape (X,2) (e.g. [[2,2],[3,3],[4,4]])'
error(msg,thisfun)
# Raise error upon nonsensical multipolar input
if not isinstance(lmax,int) and lm is None:
msg = '(!!) lmax must be non-float integer.'
raise ValueError(msg)
# Make sure that only one scentry in instput (could be updated later)
if not isinstance(scentry_obj,scentry):
msg = 'First input must be member of scentry class (e.g. as returned from scsearch() ).'
error(msg,thisfun)
# Make a list of lm values related to this gwylm object
def __make_lmlist__( this, lm, lmax ):
#
from numpy import shape
#
this.__lmlist__ = []
# If if an lmax value is given.
if lmax is not None:
# Then load all multipoles within lmax
for l in range(2,lmax+1):
#
for m in range(-l,l+1):
#
this.__lmlist__.append( (l,m) )
else: # Else, load the given lis of lm values
# If lm is a list of specific multipole indeces
if isinstance(lm[0],(list,tuple)):
#
for k in lm:
if len(k)==2:
l,m = k
this.__lmlist__.append( (l,m) )
else:
msg = '(__make_lmlist__) Found list of multipole indeces (e.g. [[2,2],[3,3]]), but length of one of the index values is not two. Please check your lm input.'
error(msg)
else: # Else, if lm is a single mode index
#
l,m = lm
this.__lmlist__.append( (l,m) )
# Store the input lm list
this.__input_lmlist__ = list(this.__lmlist__)
# Always load the m=l=2 waveform
if not ( (2,2) in this.__lmlist__ ):
msg = 'The l=m=2 multipole will be loaded in order to determine important characteristice of all modes such as noise floor and junk radiation location.'
warning(msg,'gwylm')
this.__lmlist__.append( (2,2) )
# Let the people know
if this.verbose:
alert('The following spherical multipoles will be loaded:%s'%cyan(str(this.__lmlist__)))
# Wrapper for core load function. NOTE that the extraction parameter input is independent of the usage in the class constructor.
def __load__( this, # The current object
lmax=None, # max l to use
lm=None, # (l,m) pair or list of pairs to use
extraction_parameter=None, # the label for different extraction zones/radii
level = None, # Simulation resolution level (Optional and not supported for all groups )
dt=None,
verbose=None ):
#
from numpy import shape
# Make a list of l,m values and store it to the current object as __lmlist__
this.__make_lmlist__( lm, lmax )
# Load all values in __lmlist__
for lm in this.__lmlist__:
this.load(lm=lm,dt=dt,extraction_parameter=extraction_parameter,level=level,verbose=verbose)
# Ensuer that all modes are the same length
this.__valpsi4multipoles__()
# Create a dictionary representation of the mutlipoles
this.__curate__()
# Validate individual multipole against the l=m=2 multipole: e.g. test lengths are same
def __valpsi4multipoles__(this):
#
this.__curate__()
#
t22 = this.lm[2,2]['psi4'].t
n22 = len(t22)
#
for lm in this.lm:
if lm != (2,2):
ylm = this.lm[lm]['psi4']
if len(ylm.t) != n22:
#
if True: #this.verbose:
warning('[valpsi4multipoles] The (l,m)=(%i,%i) multipole was found to not have the same length as its (2,2) counterpart. The offending waveform will be interpolated on the l=m=2 time series.'%lm,'gwylm')
# Interpolate the mode at t22, and reset fields
wfarr = intrp_wfarr(ylm.wfarr,domain=t22)
# Reset the fields
ylm.setfields(wfarr=wfarr)
#Given an extraction parameter, use the handler's extraction_map to determine extraction radius
def __r__(this,extraction_parameter):
#
return this.__scentry__.loadhandler().extraction_map(this,extraction_parameter)
# load the waveform data
def load(this, # The current object
lm=None, # the l amd m values of the multipole to load
file_location=None, # (Optional) is give, this file string will be used to load the file,
# otherwise the function determines teh file string automatically.
dt = None, # Time step to enforce for data
extraction_parameter=None,
level=None, # (Optional) Level specifyer for simulation. Not all simulation groups use this!
output=False, # Toggle whether to store data to the current object, or output it
verbose=None):
# Import useful things
from os.path import isfile,basename
from numpy import sign,diff,unwrap,angle,amax,isnan,amin
from scipy.stats.mstats import mode
from scipy.version import version as scipy_version
thisfun=inspect.stack()[0][3]
# Default multipolar values
if lm is None:
lm = [2,2]
# Raise error upon nonsensical multipolar input
if lm is not None:
if len(lm) != 2 :
msg = '(!!) lm input must contain iterable of length two containing multipolar indeces'
raise ValueError(msg)
if abs(lm[1]) > lm[0]:
msg = '(!!) Note that m=lm[1], and it must be maintained that abs(m) <= lm[0]=l. Instead (l,m)=(%i,%i).' % (lm[0],lm[1])
raise ValueError(msg)
# If file_location is not string, then let the people know.
if not isinstance( file_location, (str,type(None)) ):
msg = '(!!) '+yellow('Error. ')+'Input file location is type %s, but must instead be '+green('str')+'.' % magenta(type(file_location).__name__)
raise ValueError(msg)
# NOTE that l,m and extraction_parameter MUST be defined for the correct file location string to be created.
l = lm[0]; m = lm[1]
# Load default file name parameters: extraction_parameter,l,m,level
if extraction_parameter is None:
# Use the default value
extraction_parameter = this.extraction_parameter
if verbose: alert('Using the '+cyan('default')+' extraction_parameter of %g' % extraction_parameter)
else:
# Use the input value
this.extraction_parameter = extraction_parameter
if verbose: alert('Using the '+cyan('input')+' extraction_parameter of '+cyan('%g' % extraction_parameter))
if level is None:
# Use the default value
level = this.level
if verbose: alert('Using the '+cyan('default')+' level of %g' % level)
else:
# Use the input value
this.level = level
if verbose: alert('Using the '+cyan('input')+' level of '+cyan('%g' % level))
# This boolean will be set to true if the file location to load is found to exist
proceed = False
# Construct the string location of the waveform data. NOTE that config is inhereted indirectly from the scentry_obj. See notes in the constructor.
if file_location is None: # Find file_location automatically. Else, it must be input
# file_location = this.config.make_datafilename( extraction_parameter, l,m )
# For all formatting possibilities in the configuration file
# NOTE standard parameter order for every simulation catalog
# extraction_parameter l m level
for fmt in this.config.data_file_name_format :
# NOTE the ordering here, and that the filename format in the config file has to be consistent with: extraction_parameter, l, m, level
file_location = (this.simdir + fmt).format( extraction_parameter, l, m, level )
# OLD Formatting Style:
# file_location = this.simdir + fmt % ( extraction_parameter, l, m, level )
# test whether the file exists
if isfile( file_location ):
break
# If the file location exists, then proceed. If not, then this error is handled below.
if isfile( file_location ):
proceed = True
# If the file to be loaded exists, then load it. Otherwise raise error.
if proceed:
# load array data from file
if this.verbose: alert('Loading: %s' % cyan(basename(file_location)) )
wfarr,_ = smart_load( file_location, verbose=this.verbose )
# Handle extraction radius scaling
if not this.config.is_rscaled:
# If the data is not in the format r*Psi4, then multiply by r (units M) to make it so
extraction_radius = this.__r__(extraction_parameter)
wfarr[:,1:3] *= extraction_radius
# Fix nans, nonmonotinicities and jumps in time series waveform array
# NOTE that the line below is applied within the gwf constructor
# wfarr = straighten_wfarr( wfarr )
# Initiate waveform object and check that sign convetion is in accordance with core settings
def mkgwf(wfarr_):
return gwf( wfarr_,
l=l,
m=m,
extraction_parameter=extraction_parameter,
dt=dt,
verbose=this.verbose,
mf = this.mf,
m1 = this.m1, m2 = this.m2,
xf = this.xf,
label = this.label,
ref_scentry = this.__scentry__,
kind='$rM\psi_{%i%i}$'%(l,m) )
#
y_ = mkgwf(wfarr)
# ---------------------------------------------------- #
# Enforce internal sign convention for Psi4 multipoles
# ---------------------------------------------------- #
msk_ = y_.amp > 0.01*amax(y_.amp)
if int(scipy_version.split('.')[1])<16:
# Account for old scipy functionality
external_sign_convention = sign(m) * mode( sign( y_.dphi[msk_] ) )[0][0]
else:
# Account for modern scipy functionality
external_sign_convention = sign(m) * mode( sign( y_.dphi[msk_] ) ).mode[0]
if M_RELATIVE_SIGN_CONVENTION != external_sign_convention:
wfarr[:,2] = -wfarr[:,2]
y_ = mkgwf(wfarr)
# Let the people know what is happening.
msg = yellow('Re-orienting waveform phase')+' to be consistent with internal sign convention for Psi4, where sign(dPhi/dt)=%i*sign(m).' % M_RELATIVE_SIGN_CONVENTION + ' Note that the internal sign convention is defined in ... nrutils/core/__init__.py as "M_RELATIVE_SIGN_CONVENTION". This message has appeared becuase the waveform is determioned to obey and sign convention: sign(dPhi/dt)=%i*sign(m).'%(external_sign_convention)
thisfun=inspect.stack()[0][3]
if verbose: alert( msg )
# use array data to construct gwf object with multipolar fields
if not output:
this.ylm.append( y_ )
else:
return y_
else:
# There has been an error. Let the people know.
msg = '(!!) Cannot find "%s". Please check that catalog_dir and data_file_name_format in %s are as desired. Also be sure that input l and m are within ranges that are actually present on disk.' % ( red(file_location), magenta(this.config.config_file_location) )
raise NameError(msg)
# Plotting function for class: plot plus cross amp phi of waveforms USING the plot function of gwf()
def plot(this,show=False,fig=None,kind=None,verbose=False,domain=None):
#
from matplotlib.pyplot import show as shw
from matplotlib.pyplot import figure
from numpy import array,diff,pi
# Handle default kind of waveform to plot
if kind is None:
kind = 'both'
# Handle which default domain to plot
if domain is None:
domain = 'time'
elif not ( domain in ['time','freq'] ):
msg = 'Error: domain keyword must be either "%s" or "%s".' % (cyan('time'),cyan('freq'))
error(msg,'gwylm.plot')
# If the plotting of only psi4 or only strain is desired.
if kind != 'both':
# Handle kind options
if kind in ['psi4','y4','psilm','ylm','psi4lm','y4lm']:
wflm = this.ylm
elif kind in ['hlm','h','strain']:
# Determine whether to calc strain here. If so, then let the people know.
if len(this.hlm) == 0:
msg = '(**) You have requested that strain be plotted before having explicitelly called MMRDNSlm.calchlm(). I will now call calchlm() for you.'
print magenta(msg)
this.calchlm()
# Assign strain to the general placeholder.
wflm = this.hlm
# Plot waveform data
for y in wflm:
#
fig = figure( figsize = 1.1*array([8,7.2]) )
fig.set_facecolor("white")
ax,_ = y.plot(fig=fig,title='%s: %s, (l,m)=(%i,%i)' % (this.setname,this.label,y.l,y.m),domain=domain)
# If there is start characterization, plot some of it
if 'starting' in this.__dict__:
clr = 0.4*array([1./0.6,1./0.6,1])
dy = 100*diff( ax[0].get_ylim() )
for a in ax:
dy = 100*diff( a.get_ylim() )
if domain == 'time':
a.plot( wflm[0].t[this.startindex]*array([1,1]) , [-dy,dy], ':', color=clr )
if domain == 'freq':
a.plot( this.wstart*array([1,1])/(2*pi) , [-dy,dy], ':', color=clr )
#
if show:
# Let the people know what is being plotted.
if verbose: print cyan('>>')+' Plotting '+darkcyan('%s'%kind)
shw()
else: # Else, if both are desired
# Plot both psi4 and strain
for kind in ['psi4lm','hlm']:
ax = this.plot(show=show,kind=kind,domain=domain)
#
return ax
# Strain via ffi method
def calchlm(this,w22=None):
# Calculate strain according to the fixed frequency method of http://arxiv.org/pdf/1006.1632v3
#
from numpy import array,double
# If there is no w22 given, then use the internally defined value of wstart
if w22 is None:
# w22 = this.wstart
# NOTE: here we choose to use the ORBITAL FREQUENCY as a lower bound for the l=m=2 mode.
w22 = this.wstart_pn
# Reset
this.hlm = []
for y in this.ylm:
# Calculate the strain for each part of psi4. NOTE that there is currently NO special sign convention imposed beyond that used for psi4.
w0 = w22 * double(y.m)/2.0 # NOTE that wstart is defined in characterize_start_end() using the l=m=2 Psi4 multipole.
# Here, m=0 is a special case
if 0==y.m: w0 = w22
# Let the people know
if this.verbose:
alert( magenta('w0(w22) = %f' % w0)+yellow(' (this is the lower frequency used for FFI method [arxiv:1006.1632v3])') )
# Create the core waveform information
t = y.t
h_plus = ffintegrate( y.t, y.plus, w0, 2 )
h_cross = ffintegrate( y.t, y.cross, w0, 2 )
#%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%#
# NOTE that there is NOT a minus sign above which is INconsistent with equation 3.4 of
# arxiv:0707.4654v3. Here we choose to be consistent with eq 4 of arxiv:1006.1632 and not add a
# minus sign.
if this.verbose:
msg = yellow('The user should note that there is no minus sign used in front of the double time integral for strain (i.e. Eq 4 of arxiv:1006.1632). This differs from Eq 3.4 of arxiv:0707.4654v3. The net effect is a rotation of the overall polarization of pi degrees. The user should also note that there is no minus sign applied to h_cross meaning that the user must be mindful to write h_pluss-1j*h_cross when appropriate.')
alert(msg,'gwylm.calchlm')
#%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%.%%#
# Constrcut the waveform array for the new strain object
wfarr = array( [ t, h_plus, h_cross ] ).T
# Add the new strain multipole to this object's list of multipoles
this.hlm.append( gwf( wfarr, l=y.l, m=y.m, mf=this.mf, xf=this.xf, kind='$rh_{%i%i}/M$'%(y.l,y.m) ) )
# Create a dictionary representation of the mutlipoles
this.__curate__()
# NOTE that this is the end of the calchlm method
# Characterise the start of the waveform using the l=m=2 psi4 multipole
def characterize_start_end(this):
# Look for the l=m=2 psi4 multipole
y22_list = filter( lambda y: y.l==y.m==2, this.ylm )
# If it doesnt exist in this.ylm, then load it
if 0==len(y22_list):
y22 = this.load(lm=[2,2],output=True,dt=this.dt)
else:
y22 = y22_list[0]
#%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&#
# Characterize the START of the waveform (pre-inspiral) #
#%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&#
# Use the l=m=2 psi4 multipole to determine the waveform start
# store information about the start of the waveform to the current object
this.preinspiral = gwfcharstart( y22 )
# store the expected min frequency in the waveform to this object as:
this.wstart = this.preinspiral.left_dphi
this.startindex = this.preinspiral.left_index
# Estimate the smallest orbital frequency relevant for this waveform using a PN formula.
safety_factor = 0.90
this.wstart_pn = safety_factor*2.0*pnw0(this.m1,this.m2,this.b)
#%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&#
# Characterize the END of the waveform (post-ringdown) #
#%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&%%&#
this.postringdown = gwfcharend( y22 )
# After endindex, the data is dominated by noise
this.noiseindex = this.postringdown.left_index
this.endindex = this.postringdown.right_index
# Clean the time domain waveform by removing junk radiation.
def clean( this, method=None, crop_time=None ):
# Default cleaning method will be smooth windowing
if method is None:
method = 'window'
# ---------------------------------------------------------------------- #
# A. Clean the start and end of the waveform using information from the
# characterize_start_end method
# ---------------------------------------------------------------------- #
if not this.__isclean__ :
if method.lower() == 'window':
# Look for the l=m=2 psi4 multipole
y22_list = filter( lambda y: y.l==y.m==2, this.ylm )
# If it doesnt exist in this.ylm, then load it
if 0==len(y22_list):
y22 = this.load(lm=[2,2],output=True,dt=this.dt)
else:
y22 = y22_list[0]
# Calculate the window to be applied using the starting information. The window nwill be aplied equally to all multipole moments. NOTE: language disambiguation -- a taper is the part of a window that varies from zero to 1 (or 1 to zero); a window may contain many tapers. Also NOTE that the usage of this4[0].ylm[0].t below is an arbitration -- any array of the dame dimentions could be used.
# -- The above is calculated in the gwfcharstart class -- #
# Extract the post-ringdown window
preinspiral_window = this.preinspiral.window
# Extract the post-ringdown window (calculated in the gwfcharend class)
postringdown_window = this.postringdown.window
# Construct the combined window
window = preinspiral_window * postringdown_window
# Apply this window to both the psi4 and strain multipole moments. The function, taper(), is a method of the gwf class.
for y in this.ylm:
y.apply_window( window=window )
for h in this.hlm:
h.apply_window( window=window )
elif method.lower() == 'crop':
# Crop such that the waveform daya starts abruptly
from numpy import arange,double
if not (crop_time is None):
# If there is no crop time given, then use the low frequency value given by the nrutils start characterization time
mask = arange( this.startindex, this.ylm[0].n )
elif isinstance(crop_time,(double,int,float)):
# Otherwise, use an input starting time
mask = this.ylm[0].raw[:,0] > crop_time
for y in this.ylm:
y.apply_mask( mask )
for h in this.hlm:
h.apply_mask( mask )
#
this.__isclean__ = True
# Reset each multipole object to its original state
def reset(this):
#
for y in this.ylm:
y.reset()
for h in this.hlm:
h.reset()
# return a copy of the current object
def copy(this):
#
from copy import deepcopy as copy
return copy(this)
#--------------------------------------------------------------------------------#
# Calculate the luminosity if needed (NOTE that this could be calculated by default during calcstrain but isnt)
#--------------------------------------------------------------------------------#
def calcflm(this, # The current object
w22=None, # Frequency used for FFI integration
force=False, # Force the calculation if it has already been performed
verbose=False): # Let the people know
# Make sure that the l=m=2 multipole exists
if not ( (2,2) in this.lm.keys() ):
msg = 'There must be a l=m=2 multipole prewsent to estimate the waveform\'s ringdown part.'
error(msg,'gwylm.ringdown')
# Determine whether or not to proceed with the calculation
# Only proceed if the calculation has not been performed before and if the force option is False
proceed = (not this.flm) or force
if proceed:
# Import useful things
from numpy import array,double
# If there is no w22 given, then use the internally defined value of wstart
if w22 is None:
# w22 = this.wstart
# NOTE: here we choose to use the ORBITAL FREQUENCY as a lower bound for the l=m=2 mode.
w22 = this.wstart_pn
# Calculate the luminosity for all multipoles
flm = []
proceed = True
for y in this.ylm:
# Calculate the strain for each part of psi4. NOTE that there is currently NO special sign convention imposed beyond that used for psi4.
w0 = w22 * double(y.m)/2.0 # NOTE that wstart is defined in characterize_start_end() using the l=m=2 Psi4 multipole.
# Here, m=0 is a special case
if 0==y.m: w0 = w22
# Let the people know
if this.verbose:
alert( magenta('w0(w22) = %f' % w0)+yellow(' (this is the lower frequency used for FFI method [arxiv:1006.1632v3])') )
# Create the core waveform information
t = y.t
l_plus = ffintegrate( y.t, y.plus, w0, 1 )
l_cross = ffintegrate( y.t, y.cross, w0, 1 )
# Constrcut the waveform array for the news object
wfarr = array( [ t, l_plus, l_cross ] ).T
# Add the news multipole to this object's list of multipoles
flm.append( gwf( wfarr, l=y.l, m=y.m, kind='$r\dot{h}_{%i%i}$'%(y.l,y.m) ) )
else:
msg = 'flm, the first integral of Psi4, will not be calculated because it has already been calculated for the current object'
if verbose: warning(msg,'gwylm.calcflm')
# Store the flm list to the current object
this.flm = flm
# Create a dictionary representation of the mutlipoles
this.__curate__()
# NOTE that this is the end of the calcflm method
#--------------------------------------------------------------------------------#
# Get a gwylm object that only contains ringdown
#--------------------------------------------------------------------------------#
def ringdown(this, # The current object
T0 = 10, # Starting time relative to peak luminosity of the l=m=2 multipole
T1 = None, # Maximum time
df = None, # Optional df in frequency domain (determines time domain padding)
use_peak_strain = False, # Toggle to use peak of strain rather than the peak of the luminosity
verbose = None):
#
from numpy import linspace,array
from scipy.interpolate import InterpolatedUnivariateSpline as spline
# Make sure that the l=m=2 multipole exists
if not ( (2,2) in this.lm.keys() ):
msg = 'There must be a l=m=2 multipole prewsent to estimate the waveform\'s ringdown part.'
error(msg,'gwylm.ringdown')
# Let the people know (about which peak will be used)
if this.verbose or verbose:
alert('Time will be listed relative to the peak of %s.'%cyan('strain' if use_peak_strain else 'luminosity'))
# Use the l=m=2 multipole to estimate the peak location
if use_peak_strain:
# Only calculate strain if its not there already
if (not this.hlm) : this.calchlm()
else:
# Redundancy checking (see above for strain) is handled within calcflm
this.calcflm()
# Retrieve the l=m=2 component
ref_gwf = this.lm[2,2][ 'strain' if use_peak_strain else 'news' ]
# ref_gwf = [ a for a in (this.hlm if use_peak_strain else this.flm) if a.l==a.m==2 ][0]
#
peak_time = ref_gwf.t[ ref_gwf.k_amp_max ]
# peak_time = ref_gwf.intrp_t_amp_max
# Handle T1 Input
if T1 is None:
# NOTE that we will set T1 to be *just before* the noise floor estimate
T_noise_floor = ref_gwf.t[this.postringdown.left_index] - peak_time
# "Just before" means 95% of the way between T0 and T_noise_floor
safety_factor = 0.45 # NOTE that this is quite a low safetey factor -- we wish to definitely avoid noise if possible. T1_min is implemented below just in case this is too strong of a safetey factor.
T1 = T0 + safety_factor * ( T_noise_floor - T0 )
# Make sure that T1 is at least T1_min
T1_min = 60
T1 = max(T1,T1_min)
# NOTE that there is a chance that T1 chould be "too close" to T0
# Validate T1 Value
if T1<T0:
msg = 'T1=%f which is less than T0=%f. This doesnt make sense: the fitting region cannot end before it begins under the working perspective.'%(T1,T0)
error(msg,'gwylm.ringdown')
if T1 > (ref_gwf.t[-1] - peak_time) :
msg = 'Input value of T1=%i extends beyond the end of the waveform. We will stop at the last value of the waveform, not at the requested T1.'%T1
warning(msg,'gwylm.ringdown')
T1 = ref_gwf.t[-1] - peak_time
# Use its time series to define a mask
a = peak_time + T0
b = peak_time + T1
n = abs(float(b-a))/ref_gwf.dt
t = linspace(a,b,n)
#
that = this.copy()
that.__isringdownonly__ = True
that.T0 = T0
that.T1 = T1
#
def __ringdown__(wlm):
#
xlm = []
for k,y in enumerate(wlm):
# Create interpolated plus and cross parts
plus = spline(y.t,y.plus)(t)
cross = spline(y.t,y.cross)(t)
# Create waveform array
wfarr = array( [t-peak_time,plus,cross] ).T
# Create gwf object
xlm.append( gwf(wfarr,l=y.l,m=y.m,mf=this.mf,xf=this.xf,kind=y.kind,label=this.label,m1=this.m1,m2=this.m2,ref_scentry = this.__scentry__) )
#
return xlm
#
that.ylm = __ringdown__( this.ylm )
that.flm = __ringdown__( this.flm )
that.hlm = __ringdown__( this.hlm )
# Create a dictionary representation of the mutlipoles
that.__curate__()
#
return that
# pad each mode to a new_length
def pad(this,new_length=None):
# Pad each mode
for y in this.ylm:
y.pad( new_length=new_length )
for h in this.hlm:
h.pad( new_length=new_length )
# Recompose the waveforms at a sky position about the source
# NOTE that this function returns a gwf object
def recompose( this, # The current object
theta, # The polar angle
phi, # The anzimuthal angle
kind=None,
verbose=False ):
#
from numpy import dot,array,zeros
#
if kind is None:
msg = 'no kind specified for recompose calculation. We will proceed assuming that you desire recomposed strain. Please specify the desired kind (e.g. strain, psi4 or news) you wishe to be output as a keyword (e.g. kind=news)'
warning( msg, 'gwylm.recompose' )
kind = 'strain'
# Create Matrix of Multipole time series
def __recomp__(alm,kind=None):
M = zeros( [ alm[0].n, len(this.__input_lmlist__) ], dtype=complex )
Y = zeros( [ len(this.__input_lmlist__), 1 ], dtype=complex )
# Seed the matrix as well as the vector of spheroical harmonic values
for k,a in enumerate(alm):
if (a.l,a.m) in this.__input_lmlist__:
M[:,k] = a.y
Y[k] = sYlm(-2,a.l,a.m,theta,phi)
# Perform the matrix multiplication and create the output gwf object
Z = dot( M,Y )[:,0]
wfarr = array( [ alm[0].t, Z.real, Z.imag ] ).T
# return the ouput
return gwf( wfarr, kind=kind, ref_scentry = this.__scentry__ )
#
if kind=='psi4':
y = __recomp__( this.ylm, kind=r'$rM\,\psi_4(t,\theta,\phi)$' )
elif kind=='strain':
y = __recomp__( this.hlm, kind=r'$r\,h(t,\theta,\phi)/M$' )
elif kind=='news':
y = __recomp__( this.flm, kind=r'$r\,\dot{h}(t,\theta,\phi)/M$' )
#
return y
# Extrapolate to infinite radius: http://arxiv.org/pdf/1503.00718.pdf
def extrapolate(this,method=None):
msg = 'This method is under development and cannot currently be used.'
error(msg)
# If the simulation is already extrapolated, then do nothing
if this.__isextrapolated__:
# Do nothing
print
else: # Else, extrapolate
# Use radius only scaling
print
return None
# Estimate Remnant BH mass and spin from gwylm object. This is done by "brute" force here (i.e. an actual calculation), but NOTE that values for final mass and spin are Automatically loaded within each scentry; However!, some of these values may be incorrect -- especially for BAM sumulations. Here we make a rough estimate of the remnant mass and spin based on a ringdown fit.
def brute_masspin( this, # IMR gwylm object
T0 = 10, # Time relative to peak luminosity to start ringdown
T1 = None, # Time relative to peak lum where ringdown ends (if None, gwylm.ringdown sets its value to the end of the waveform approx at noise floor)
apply_result = False, # If true, apply result to input this object
verbose = False ): # Let the people know
'''Estimate Remnant BH mass and spin from gwylm object. This is done by "brute"
force here (i.e. an actual calculation), but NOTE that values for final mass
and spin are Automatically loaded within each scentry; However!, some of
these values may be incorrect -- especially for BAM sumulations. Here we make
a rough estimate of the remnant mass and spin based on a ringdown fit.'''
# Import useful things
thisfun='gwylm.brute_masspin'
from scipy.optimize import minimize
from nrutils import FinalSpin0815,EradRational0815
from kerr import qnmfit
# Validate first input type
is_number = isinstance(this,(float,int))
is_gwylm = False if is_number else 'gwylm'==this.__class__.__name__
if not is_gwylm:
msg = 'First input must be member of gwylm class from nrutils.'
error(msg)
# Get the ringdown part starting from 20M after the peak luminosity
g = this.ringdown(T0=T0,T1=T1)
# Define a work function
def action( Mfxf ):
# NOTE that the first psi4 multipole is referenced below.
# There was only one loaded here, s it has to be for l=m=2
f = qnmfit(g.lm[2,2]['psi4'],Mfxf=Mfxf)
# f = qnmfit(g.ylm[0],Mfxf=Mfxf)
return f.frmse
# Use PhenomD fit for guess
eta = this.m1*this.m2/((this.m1+this.m2)**2)
chi1, chi2 = this.S1[-1]/(this.m1**2), this.S2[-1]/(this.m2**2)
guess_xf = FinalSpin0815( eta, chi2, chi1 )
guess_Mf = 1-EradRational0815(eta, chi2, chi1 )
guess = (guess_Mf,guess_xf)
# perform the minization
# NOTE that mass is bound on (0,1) and spin on (-1,1)
Q = minimize( action,guess, bounds=[(1-0.999,1),(-0.999,0.999)] )
# Extract the solution
mf,xf = Q.x
# Apply to the input gwylm object if requested
if apply_result:
this.mf = mf
this.xf = xf
this.Xf = this.Sf / (mf*mf)
attr = [ 'ylm', 'hlm', 'flm' ]
for atr in attr:
for y in this.__dict__[atr]:
y.mf, y.xf = mf, xf
if ('Sf' in y.__dict__) and ('Xf' in y.__dict__):
y.Xf = y.Sf / (mf*mf)
# Return stuff, including the fit object
return mf,xf,Q
# Los pass filter using romline in basics.py to determine window region
def lowpass(this):
#
msg = 'Howdy, partner! This function is experimental and should NOT be used.'
error(msg,'lowpass')
#
from numpy import log,ones
from matplotlib.pyplot import plot,show,axvline
#
for y in this.ylm:
N = 8
if y.m>=0:
mask = y.f>0
lf = log( y.f[ mask ] )
lamp = log( y.fd_amp[ mask ] )
knots,_ = romline(lf,lamp,N,positive=True,verbose=True)
a,b = 0,1
state = knots[[a,b]]
window = ones( y.f.shape )
window[ mask ] = maketaper( lf, state )
elif y.m<0:
mask = y.f<=0
lf = log( y.f[ mask ] )
lamp = log( y.fd_amp[ mask ] )
knots,_ = romline(lf,lamp,N,positive=True,verbose=True)
a,b = -1,-2
state = knots[[a,b]]
window = ones( y.f.shape )
window[ mask ] = maketaper( lf, state )
plot( lf, lamp )
plot( lf, log(window[mask])+lamp, 'k', alpha=0.5 )
plot( lf[knots], lamp[knots], 'o', mfc='none', ms=12 )
axvline(x=lf[knots[a]],color='r')
axvline(x=lf[knots[b]],color='r')
show()
# plot(y.f,y.fd_amp)
# show()
plot( window )
axvline(x=knots[a],color='r')
axvline(x=knots[b],color='r')
show()
y.fdfilter( window )
#
this.__lowpassfiltered__ = True
# Time Domain LALSimulation Waveform Approximant h_pluss and cross, but using nrutils data conventions
def lswfa( apx ='IMRPhenomPv2', # Approximant name; must be compatible with lal convenions
eta = None, # symmetric mass ratio
chi1 = None, # spin1 iterable (Dimensionless)
chi2 = None, # spin2 iterable (Dimensionless)
fmin_hz = 30.0, # phys starting freq in Hz
verbose = False ): # boolean toggle for verbosity
#
from numpy import array,linspace,double
import lalsimulation as lalsim
from nrutils import eta2q
import lal
# Standardize input mass ratio and convert to component masses
M = 70.0
q = eta2q(eta)
q = double(q)
q = max( [q,1.0/q] )
m2 = M * 1.0 / (1.0+q)
m1 = float(q) * m2
# NOTE IS THIS CORRECT????
S1 = array(chi1)
S2 = array(chi2)
#
fmin_phys = fmin_hz
M_total_phys = (m1+m2) * lal.MSUN_SI
#
TD_arguments = {'phiRef': 0.0,
'deltaT': 1.0 * M_total_phys * lal.MTSUN_SI / lal.MSUN_SI,
'f_min': fmin_phys,
'm1': m1 * lal.MSUN_SI,
'm2' : m2 * lal.MSUN_SI,
'S1x' : S1[0],
'S1y' : S1[1],
'S1z' : S1[2],
'S2x' : S2[0],
'S2y' : S2[1],
'S2z' : S2[2],
'f_ref': 100.0,
'r': lal.PC_SI,
'z': 0,
'i': 0,
'lambda1': 0,
'lambda2': 0,
'waveFlags': None,
'nonGRparams': None,
'amplitudeO': -1,
'phaseO': -1,
'approximant': lalsim.SimInspiralGetApproximantFromString(apx)}
#
# Use lalsimulation to calculate plus and cross in lslsim dataformat
hp, hc = lalsim.SimInspiralTD(**TD_arguments)
# Convert the lal datatype to a gwf object
D = 1e-6 * TD_arguments['r']/lal.PC_SI
y = lalsim2gwf( hp,hc,m1+m2, D )
#
return y
# Characterize END of time domain waveform (POST RINGDOWN)
class gwfcharend:
def __init__(this,ylm):
# Import useful things
from numpy import log
# ROM (Ruduce order model) the post-peak as two lines
la = log( ylm.amp[ ylm.k_amp_max: ])
tt = ylm.t[ ylm.k_amp_max: ]
knots,rl = romline(tt,la,2)
# Check for lack of noise floor (in the case of sims stopped before noise floor reached)
# NOTE that in this case no effective windowing is applied
this.nonoisefloor = knots[-1]+1 == len(tt)
if this.nonoisefloor:
msg = 'No noise floor found. This simulation may have been stopped before the numerical noise floor was reached.'
warning(msg,'gwfcharend')
# Define the start and end of the region to be windowed
this.left_index = ylm.k_amp_max + knots[-1]
this.right_index = ylm.k_amp_max + knots[-1]+(len(tt)-knots[-1])*6/10
# Calculate the window and store to the current object
this.window_state = [ this.right_index, this.left_index ]
this.window = maketaper( ylm.t, this.window_state )
# Characterize the START of a time domain waveform (PRE INSPIRAL)
class gwfcharstart:
#
def __init__( this, # the object to be created
y, # input gwf object who'se start behavior will be characterised
shift = 2, # The size of the turn on region in units of waveform cycles.
verbose = False ):
#
from numpy import arange,diff,where,array,ceil,mean
from numpy import histogram as hist
thisfun=this.__class__.__name__
# Take notes on what happens
notes = []
# This algorithm estimates the start of the gravitational waveform -- after the initial junk radiation that is present within most raw NR output. The algorithm proceeds in the manner consistent with a time domain waveform.
# Validate inputs
if not isinstance(y,gwf):
msg = 'First imput must be a '+cyan('gwf')+' object. Type %s found instead.' % type(y).__name__
error(msg,thisfun)
# 1. Find the pre-peak portion of the waveform.
val_mask = arange( y.k_amp_max )
# 2. Find the peak locations of the plus part.
pks,pk_mask = findpeaks( y.cross[ val_mask ] )
pk_mask = pk_mask[ pks > y.amp[y.k_amp_max]*5e-4 ]
# 3. Find the difference between the peaks
D = diff(pk_mask)
# If the waveform starts at its peak (e.g. in the case of ringdown)
if len(D)==0:
#
this.left_index = 0
this.right_index = 0
this.left_dphi=this.center_dphi=this.right_dphi = y.dphi[this.right_index]
this.peak_mask = [0]
else:
# 4. Find location of the first peak that is separated from its adjacent by greater than the largest value. This location is stored to start_map.
start_map = find( D >= max(D) )[0]
# 5. Determine the with of waveform turn on in indeces based on the results above. NOTE that the width is bound below by half the difference betwen the wf start and the wf peak locations.
index_width = min( [ 1+pk_mask[start_map+shift]-pk_mask[start_map], 0.5*(1+y.k_amp_max-pk_mask[ start_map ]) ] )
# 6. Estimate where the waveform begins to turn on. This is approximately where the junk radiation ends. Note that this area will be very depressed upon windowing, so is can be
j_id = pk_mask[ start_map ]
# 7. Use all results thus far to construct this object
this.left_index = int(j_id) # Where the initial junk radiation is thought to end
this.right_index = int(j_id + index_width - 1) # If tapering is desired, then this index will be
# the end of the tapered region.
this.left_dphi = y.dphi[ this.left_index ] # A lowerbound estimate for the min frequency within
# the waveform.
this.right_dphi = y.dphi[ this.right_index ] # An upperbound estimate for the min frequency within
# the waveform
this.center_dphi = mean(y.dphi[ this.left_index:this.right_index ]) # A moderate estimate for the min frequency within they
# waveform
this.peak_mask = pk_mask
# Construct related window
this.window_state = [this.left_index,this.right_index]
this.window = maketaper( y.t, this.window_state )
# Characterize the END of a time domain waveform: Where is the noise floor?
def gwfend():
#
return None
# Function which converts lalsim waveform to gwf object
def lalsim2gwf( hp,hc,M,D ):
#
from numpy import linspace,array,double,sqrt,hstack,zeros
from nrutils.tools.unit.conversion import codeh
# Extract plus and cross data. Divide out contribution from spherical harmonic towards NR scaling
x = sYlm(-2,2,2,0,0)
h_plus = hp.data.data/x
h_cross = hc.data.data/x
# Create time series data
t = linspace( 0.0, (h_plus.size-1.0)*hp.deltaT, int(h_plus.size) )
# Create waveform
harr = array( [t,h_plus,h_cross] ).T
# Convert to code units, where Mtotal=1
harr = codeh( harr,M,D )
# Create gwf object
h = gwf( harr, kind=r'$h^{\mathrm{lal}}_{22}$' )
#
return h
# Taper a waveform object
def gwftaper( y, # gwf object to be windowed
state, # Index values defining region to be tapered:
# For state=[a,b], if a>b then the taper is 1 at b and 0 at a
# if a<b, then the taper is 1 at a and 0 at b.
plot = False,
verbose = False):
# Import useful things
from numpy import ones
from numpy import hanning as hann
# Parse taper state
a = state[0]
b = state[-1]
# Only proceed if a valid window is given
proceed = True
true_width = abs(b-a)
twice_hann = hann( 2*true_width )
if b>a:
true_hann = twice_hann[ :true_width ]
elif a<b:
true_hann = twice_hann[ true_width: ]
else:
proceed = False
# Proceed (or not) with windowing
window = ones( y.n )
if proceed:
# Make the window
window[ :min(state) ] = 0
window[ min(state) : max(state) ] = true_hann
# Apply the window to the data and reset fields
y.wfarr[:,1] *= window
y.wfarr[:,2] *= window
y.setfields()
#
return window
|
# -*- encoding: utf-8
import socket
from suplemon.suplemon_module import Module
class Hostname(Module):
"""Shows the machine hostname in the bottom status bar."""
def init(self):
self.hostname = ""
hostinfo = None
try:
hostinfo = socket.gethostbyaddr(socket.gethostname())
except:
self.logger.debug("Failed to get hostname.")
if hostinfo:
self.hostname = hostinfo[0]
# Use shorter hostname if available
if hostinfo[1]:
self.hostname = hostinfo[1][0]
def get_status(self):
if self.hostname:
return "host:{0}".format(self.hostname)
return ""
module = {
"class": Hostname,
"name": "hostname",
"status": "bottom",
}
|
from time import time
import logging
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.decomposition import PCA
from sklearn.svm import SVC
from sklearn.externals import joblib
from numpy.random import RandomState
import numpy as np
import glob
import cv2
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
def get_images_path(img_list='image_list.txt'):
'''
input:
img_list => str
=> default value: 'image_list.txt'
description:
loading image from given image path list, each path followed by ',' then image label (0|1)
return:
data => list( (image_path, label) )
'''
data = []
with open(img_list,'r') as fin:
for l in fin.read().split('\n'):
spl = l.split(',')
if len(spl) == 2:
# spl[0] -> image_path
# spl[1] -> label
data.append((spl[0],spl[1]))
return data
def init_training_data(path_list):
'''
input:
path_list => list( (image_path, label) )
return:
X => np.array([flatten_image])
Y => np.array([label_image])
img_w => int
img_h => int
'''
X = []
Y = []
h, w = cv2.imread(path_list[0][0],0).shape
for path,label in path_list:
X.append(cv2.imread(path,0).flatten()) # read in grayscale mode
Y.append(label)
print("loaded {} images, {} labels".format(len(X), len(Y)))
X = np.asarray(X)
Y = np.asarray(Y)
return X, Y, w, h
def train_model(n_components = 96, gamma = None, C = None):
'''
input:
n_components => int *must be less than or equal to training data H where H is the number of training data row*
gamma => <float|int>
C => <float|int>
description:
train new model, if gamma or C are equal None, then the training process use
grid search for finding best hyperparam, here the classifier model is type of SVM
after training complete, the SVM model and PCA matrix are automatically saved, if success.
After training complete, this function will do some evaluation using 20% splited from dataset,
train the model use 80%.
output:
Nothing
'''
data_lpath = get_images_path()
print('data to read: ',len(data_lpath))
X, Y, w, h = init_training_data(data_lpath)
#split 20% of data for testing and the rest for training, choosed randomly after some attempt
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.2)
print("data training/test => {}/{}".format(X_train.shape[0], X_test.shape[0]))
#PCA part
n_components = n_components if n_components <= X_train.shape[0] else X_train.shape[0] - abs(X_train.shape[0]-n_components)
solver = 'randomized'
print("Extracting the top {} eigenfaces from {} faces".format(n_components, X_train.shape[0]))
t0 = time()
pca = PCA(n_components=n_components, svd_solver=solver,whiten=True).fit(X_train)
print("done in %0.3fs" % (time() - t0))
#eigenface acquired
eigenfaces = pca.components_.reshape((n_components, h, w))
print("Projecting the input data on the eigenfaces")
t0 = time()
X_train_pca = pca.transform(X_train)
X_test_pca = pca.transform(X_test)
print("done in %0.3fs" % (time() - t0))
# build classifier part, this case using svc(Support Vector Classification)
# http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html
'''
The projected image(flatten) against pca
'''
clf = None
t0 = time()
if gamma == None and C == None:
# long process, lower or change the variation of hyperparam
# to test for faster process
param_grid = [
{'C': np.linspace(1, 1e10, num=120),
'kernel': ['rbf'],
'gamma': np.linspace(1e-5, 0.9, num=120), },
{'C': np.linspace(1, 1e10, num=120),
'kernel': ['linear'],
'gamma': np.linspace(1e-5, 0.9, num=120), },
{'C': np.linspace(1, 1e10, num=120),
'kernel': ['poly'],
'gamma': np.linspace(1e-5, 0.9, num=120), }
]
try:
clf = GridSearchCV(estimator=SVC(verbose=-1, class_weight='balanced', cache_size=500,decision_function_shape='ovr'), param_grid=param_grid, n_jobs=-1)# multi thread
except Exception as _e:
clf = GridSearchCV(estimator=SVC(verbose=False, class_weight='balanced', cache_size=500,decision_function_shape='ovr'), param_grid=param_grid, n_jobs=-1)# multi thread
clf = clf.fit(X_train_pca, y_train)
print("\ndone fit data in %0.3fs" % (time() - t0))
print("Best estimator found by grid search:")
print(clf.best_estimator_)
else:
# kernel [rbf, linear, poly, sigmoid]
# decision_function_shape [ovr, ovo]
clf = SVC(gamma=gamma, C=C, decision_function_shape='ovr', class_weight='balanced', verbose=True, kernel='rbf') # not using multi thread
clf = clf.fit(X_train_pca, y_train)
print("done fit data in %0.3fs" % (time() - t0))
#post training
print('='*60)
print("Eval:")
print("Predicting gender")
n_classes = 2
print("{} classes".format(n_classes))
t0 = time()
#get predicted result with input projected test image dataset
y_pred = clf.predict(X_test_pca)
print("done in %0.3fs" % (time() - t0))
print(classification_report(y_test, y_pred))
'''
confusion matrix
e predicted
x
p
e 0 1
c 0 | a b |
t 1 | c d |
e
d
'''
print(confusion_matrix(y_test, y_pred, labels=["0","1"]))
pca_model_name = "pca_model_component_{}.jlib".format(n_components)
svm_model_name = "svm_model_component_{}.jlib".format(n_components)
joblib.dump(pca, pca_model_name)
joblib.dump(clf, svm_model_name)
print("done creating new model, saved with name:")
print("PCA matrix: {}".format(pca_model_name))
print("SVM model: {}".format(svm_model_name))
input()
if __name__ == '__main__':
conf = input("TRAIN NEW MODEL [ Y | enter to cancel ]>> ")
if str(conf).lower() == 'y':
train_model()
|
// David Eberly, Geometric Tools, Redmond WA 98052
// Copyright (c) 1998-2019
// Distributed under the Boost Software License, Version 1.0.
// https://www.boost.org/LICENSE_1_0.txt
// https://www.geometrictools.com/License/Boost/LICENSE_1_0.txt
// Version: 4.0.2019.08.13
#pragma once
#include <Mathematics/TIQuery.h>
#include <Mathematics/DistPoint3Plane3.h>
#include <Mathematics/OrientedBox.h>
namespace gte
{
template <typename Real>
class TIQuery<Real, Plane3<Real>, OrientedBox3<Real>>
{
public:
struct Result
{
bool intersect;
};
Result operator()(Plane3<Real> const& plane, OrientedBox3<Real> const& box)
{
Result result;
Real radius =
std::fabs(box.extent[0] * Dot(plane.normal, box.axis[0])) +
std::fabs(box.extent[1] * Dot(plane.normal, box.axis[1])) +
std::fabs(box.extent[2] * Dot(plane.normal, box.axis[2]));
DCPQuery<Real, Vector3<Real>, Plane3<Real>> ppQuery;
auto ppResult = ppQuery(box.center, plane);
result.intersect = (ppResult.distance <= radius);
return result;
}
};
}
|
// Copyright 2016 The TensorFlow Authors. All Rights Reserved.
// Modifications copyright (C) 2019 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =============================================================================
#ifndef HOROVOD_COLLECTIVE_OPERATIONS_H
#define HOROVOD_COLLECTIVE_OPERATIONS_H
#include <iostream>
#include "../common.h"
#include "../controller.h"
#include "../global_state.h"
#include "../operations.h"
#include "../parameter_manager.h"
namespace horovod {
namespace common {
class HorovodOp {
public:
HorovodOp(HorovodGlobalState* global_state);
virtual Status Execute(std::vector<TensorTableEntry>& entries,
const Response& response) = 0;
protected:
int64_t NumElements(std::vector<TensorTableEntry>& entries);
HorovodGlobalState* global_state_;
};
class AllreduceOp : public HorovodOp {
public:
AllreduceOp(HorovodGlobalState* global_state);
virtual ~AllreduceOp() = default;
virtual Status Execute(std::vector<TensorTableEntry>& entries,
const Response& response) = 0;
virtual bool Enabled(const ParameterManager& param_manager,
const std::vector<TensorTableEntry>& entries,
const Response& response) const = 0;
protected:
virtual void
MemcpyInFusionBuffer(const std::vector<TensorTableEntry>& entries,
const void*& fused_input_data, void*& buffer_data,
size_t& buffer_len);
virtual void MemcpyOutFusionBuffer(const void* buffer_data,
std::vector<TensorTableEntry>& entries);
virtual void
MemcpyEntryInFusionBuffer(const std::vector<TensorTableEntry>& entries,
const TensorTableEntry& e,
void* buffer_data_at_offset);
virtual void
MemcpyEntryOutFusionBuffer(const std::vector<TensorTableEntry>& entries,
const void* buffer_data_at_offset,
TensorTableEntry& e);
};
class AllgatherOp : public HorovodOp {
public:
AllgatherOp(HorovodGlobalState* global_state);
virtual ~AllgatherOp() = default;
virtual Status Execute(std::vector<TensorTableEntry>& entries,
const Response& response) = 0;
virtual bool Enabled(const ParameterManager& param_manager,
const std::vector<TensorTableEntry>& entries,
const Response& response) const = 0;
protected:
virtual Status AllocateOutput(std::vector<TensorTableEntry>& entries,
const Response& response,
int64_t**& entry_component_sizes,
int*& recvcounts);
virtual void SetDisplacements(const int* recvcounts, int*& displcmnts);
virtual void
SetEntryComponentOffsets(const std::vector<TensorTableEntry>& entries,
const int64_t* const* entry_component_sizes,
const int* recvcounts,
int64_t**& entry_component_offsets);
virtual void
MemcpyInFusionBuffer(const std::vector<TensorTableEntry>& entries,
const int* displcmnts, int element_size,
void*& buffer_data);
virtual void
MemcpyOutFusionBuffer(const int64_t* const* entry_component_offsets,
const int64_t* const* entry_component_sizes,
const void* buffer_data, int element_size,
std::vector<TensorTableEntry>& entries);
virtual void
MemcpyEntryInFusionBuffer(const std::vector<TensorTableEntry>& entries,
const TensorTableEntry& e,
void* buffer_data_at_offset);
virtual void
MemcpyEntryOutFusionBuffer(const std::vector<TensorTableEntry>& entries,
const void* buffer_data_at_offset,
TensorTableEntry& e,
int64_t entry_offset,
size_t entry_size);
};
class BroadcastOp : public HorovodOp {
public:
BroadcastOp(HorovodGlobalState* global_state);
virtual ~BroadcastOp() = default;
virtual Status Execute(std::vector<TensorTableEntry>& entries,
const Response& response) = 0;
virtual bool Enabled(const ParameterManager& param_manager,
const std::vector<TensorTableEntry>& entries,
const Response& response) const = 0;
};
class JoinOp : public HorovodOp {
public:
JoinOp(HorovodGlobalState* global_state);
virtual ~JoinOp() = default;
virtual Status Execute(std::vector<TensorTableEntry>& entries,
const Response& response);
};
class ErrorOp : public HorovodOp {
public:
ErrorOp(HorovodGlobalState* global_state);
virtual ~ErrorOp() = default;
virtual Status Execute(std::vector<TensorTableEntry>& entries, const Response& response);
};
} // namespace common
} // namespace horovod
#endif // HOROVOD_COLLECTIVE_OPERATIONS_H
|
"""Create collage from images in a folder."""
from argparse import ArgumentParser
from pathlib import Path
from collamake.make import CollaMake
if __name__ == "__main__":
parser = ArgumentParser(description="A simple collage maker")
parser.add_argument("-f", "--file", type=Path, required=True)
args = parser.parse_args()
cm = CollaMake(args.file)
cm.generate()
|
"""Definition of boards and/or ids"""
# Allow for aligned constant definitions:
BEAGLEBONE = "BEAGLEBONE"
BEAGLEBONE_BLACK = "BEAGLEBONE_BLACK"
BEAGLEBONE_BLUE = "BEAGLEBONE_BLUE"
BEAGLEBONE_BLACK_WIRELESS = "BEAGLEBONE_BLACK_WIRELESS"
BEAGLEBONE_POCKETBEAGLE = "BEAGLEBONE_POCKETBEAGLE"
BEAGLEBONE_GREEN = "BEAGLEBONE_GREEN"
BEAGLEBONE_GREEN_WIRELESS = "BEAGLEBONE_GREEN_WIRELESS"
BEAGLEBONE_GREEN_GATEWAY = "BEAGLEBONE_GREEN_GATEWAY"
BEAGLEBONE_BLACK_INDUSTRIAL = "BEAGLEBONE_BLACK_INDUSTRIAL"
BEAGLEBONE_ENHANCED = "BEAGLEBONE_ENHANCED"
BEAGLEBONE_USOMIQ = "BEAGLEBONE_USOMIQ"
BEAGLEBONE_AIR = "BEAGLEBONE_AIR"
BEAGLEBONE_AI = "BEAGLEBONE_AI"
BEAGLEBONE_POCKETBONE = "BEAGLEBONE_POCKETBONE"
BEAGLEV_STARLIGHT = "BEAGLEV_STARLIGHT"
BEAGLELOGIC_STANDALONE = "BEAGLELOGIC_STANDALONE"
OSD3358_DEV_BOARD = "OSD3358_DEV_BOARD"
OSD3358_SM_RED = "OSD3358_SM_RED"
FEATHER_HUZZAH = "FEATHER_HUZZAH"
FEATHER_M0_EXPRESS = "FEATHER_M0_EXPRESS"
GENERIC_LINUX_PC = "GENERIC_LINUX_PC"
PYBOARD = "PYBOARD"
NODEMCU = "NODEMCU"
RASPBERRY_PI_PICO = "RASPBERRY_PI_PICO"
GIANT_BOARD = "GIANT_BOARD"
# ASUS Tinker Boards
ASUS_TINKER_BOARD = "ASUS_TINKER_BOARD"
# Clockwork Pi boards
CLOCKWORK_CPI3 = "CLOCKWORK_CPI3"
# Orange Pi boards
ORANGE_PI_PC = "ORANGE_PI_PC"
ORANGE_PI_PC_2 = "ORANGE_PI_PC_2"
ORANGE_PI_R1 = "ORANGE_PI_R1"
ORANGE_PI_ZERO = "ORANGE_PI_ZERO"
ORANGE_PI_ONE = "ORANGE_PI_ONE"
ORANGE_PI_LITE = "ORANGE_PI_LITE"
ORANGE_PI_PC_PLUS = "ORANGE_PI_PC_PLUS"
ORANGE_PI_PLUS_2E = "ORANGE_PI_PLUS_2E"
ORANGE_PI_2 = "ORANGE_PI_2"
ORANGE_PI_ZERO_PLUS_2H5 = "ORANGE_PI_ZERO_PLUS_2H5"
ORANGE_PI_ZERO_PLUS = "ORANGE_PI_ZERO_PLUS"
ORANGE_PI_ZERO_2 = "ORANGE_PI_ZERO_2"
# Nano Pi boards
NANOPI_NEO_AIR = "NANOPI_NEO_AIR"
NANOPI_DUO2 = "NANOPI_DUO2"
# Banana Pi boards
BANANA_PI_M2_ZERO = "BANANA_PI_M2_ZERO"
# NVIDIA Jetson boards
JETSON_TX1 = "JETSON_TX1"
JETSON_TX2 = "JETSON_TX2"
JETSON_TX2_NX = "JETSON_TX2_NX"
CLARA_AGX_XAVIER = "CLARA_AGX_XAVIER"
JETSON_XAVIER = "JETSON_XAVIER"
JETSON_NANO = "JETSON_NANO"
JETSON_NX = "JETSON_NX"
# Google Coral dev board
CORAL_EDGE_TPU_DEV = "CORAL_EDGE_TPU_DEV"
CORAL_EDGE_TPU_DEV_MINI = "CORAL_EDGE_TPU_DEV_MINI"
# Xilinx PYNQ FPGA dev boards
PYNQ_Z1 = "PYNQ_Z1"
PYNQ_Z2 = "PYNQ_Z2"
# STM32 MPU boards
STM32MP157C_DK2 = "STM32MP157C_DK2"
OSD32MP1_BRK = "OSD32MP1_BRK"
OSD32MP1_RED = "OSD32MP1_RED"
# Embedfire LubanCat board
LUBANCAT_IMX6ULL = "LUBANCAT_IMX6ULL"
LUBANCAT_STM32MP157 = "LUBANCAT_STM32MP157"
# Various Raspberry Pi models
RASPBERRY_PI_B_REV1 = "RASPBERRY_PI_B_REV1"
RASPBERRY_PI_B_REV2 = "RASPBERRY_PI_B_REV2"
RASPBERRY_PI_B_PLUS = "RASPBERRY_PI_B_PLUS"
RASPBERRY_PI_A = "RASPBERRY_PI_A"
RASPBERRY_PI_A_PLUS = "RASPBERRY_PI_A_PLUS"
RASPBERRY_PI_CM1 = "RASPBERRY_PI_CM1"
RASPBERRY_PI_ZERO = "RASPBERRY_PI_ZERO"
RASPBERRY_PI_ZERO_W = "RASPBERRY_PI_ZERO_W"
RASPBERRY_PI_ZERO_2_W = "RASPBERRY_PI_ZERO_2_W"
RASPBERRY_PI_2B = "RASPBERRY_PI_2B"
RASPBERRY_PI_3B = "RASPBERRY_PI_3B"
RASPBERRY_PI_3B_PLUS = "RASPBERRY_PI_3B_PLUS"
RASPBERRY_PI_CM3 = "RASPBERRY_PI_CM3"
RASPBERRY_PI_3A_PLUS = "RASPBERRY_PI_3A_PLUS"
RASPBERRY_PI_CM3_PLUS = "RASPBERRY_PI_CM3_PLUS"
RASPBERRY_PI_4B = "RASPBERRY_PI_4B"
RASPBERRY_PI_AVNET_IIOT_GW = "RASPBERY_PI_AVNET_IIOT_GW"
RASPBERRY_PI_400 = "RASPBERRY_PI_400"
RASPBERRY_PI_CM4 = "RASPBERRY_PI_CM4"
ODROID_C1 = "ODROID_C1"
ODROID_C1_PLUS = "ODROID_C1_PLUS"
ODROID_C2 = "ODROID_C2"
ODROID_C4 = "ODROID_C4"
ODROID_N2 = "ODROID_N2"
ODROID_XU4 = "ODROID_XU4"
FTDI_FT232H = "FTDI_FT232H"
FTDI_FT2232H = "FTDI_FT2232H"
DRAGONBOARD_410C = "DRAGONBOARD_410C"
SIFIVE_UNLEASHED = "SIFIVE_UNLEASHED"
ALLWINER_D1 = "ALLWINER_D1"
MICROCHIP_MCP2221 = "MICROCHIP_MCP2221"
# Boards with u2if firmware
# https://github.com/execuc/u2if
PICO_U2IF = "PICO_U2IF"
FEATHER_U2IF = "FEATHER_U2IF"
ITSYBITSY_U2IF = "ITSYBITSY_U2IF"
MACROPAD_U2IF = "MACROPAD_U2IF"
QTPY_U2IF = "QTPY_U2IF"
QT2040_TRINKEY_U2IF = "QT2040_TRINKEY_U2IF"
BINHO_NOVA = "BINHO_NOVA"
ONION_OMEGA = "ONION_OMEGA"
ONION_OMEGA2 = "ONION_OMEGA2"
PINE64 = "PINE64"
PINEH64 = "PINEH64"
PINEBOOK = "PINEBOOK"
PINEPHONE = "PINEPHONE"
SOPINE = "SOPINE"
ROCK_PI_S = "ROCK_PI_S"
ROCK_PI_4 = "ROCK_PI_4"
ROCK_PI_X = "ROCK_PI_X"
ROCK_PI_E = "ROCK_PI_E"
GREATFET_ONE = "GREATFET_ONE"
# Udoo boards
UDOO_BOLT_V3 = "UDOO_BOLT_V3"
UDOO_BOLT_V8 = "UDOO_BOLT_V8"
UDOO_X86 = "UDOO_X86"
# MaaXBoard
MAAXBOARD = "MAAXBOARD"
MAAXBOARD_MINI = "MAAXBOARD_MINI"
# Asus Tinkerboard
_ASUS_TINKER_BOARD_IDS = (ASUS_TINKER_BOARD,)
# STM32MP1
_STM32MP1_IDS = (STM32MP157C_DK2, LUBANCAT_STM32MP157, OSD32MP1_BRK, OSD32MP1_RED)
# OrangePI
_ORANGE_PI_IDS = (
ORANGE_PI_PC,
ORANGE_PI_R1,
ORANGE_PI_ZERO,
ORANGE_PI_ONE,
ORANGE_PI_LITE,
ORANGE_PI_PC_PLUS,
ORANGE_PI_PLUS_2E,
ORANGE_PI_2,
ORANGE_PI_ZERO_PLUS_2H5,
ORANGE_PI_ZERO_PLUS,
ORANGE_PI_ZERO_2,
)
# NanoPi
_NANOPI_IDS = (NANOPI_NEO_AIR, NANOPI_DUO2)
# BananaPI
_BANANA_PI_IDS = (BANANA_PI_M2_ZERO,)
# LubanCat
_LUBANCAT_IDS = (LUBANCAT_IMX6ULL, LUBANCAT_STM32MP157)
# Coral boards
_CORAL_IDS = (CORAL_EDGE_TPU_DEV, CORAL_EDGE_TPU_DEV_MINI)
_PYNQ_IDS = (PYNQ_Z1, PYNQ_Z2)
_JETSON_IDS = (
(JETSON_TX1, ("nvidia,p2371-2180", "nvidia,jetson-cv")),
(
JETSON_TX2,
(
"nvidia,p2771-0000",
"nvidia,p2771-0888",
"nvidia,p3489-0000",
"nvidia,lightning",
"nvidia,quill",
"nvidia,storm",
),
),
(JETSON_TX2_NX, ("nvidia,p3509-0000+p3636-0001",)),
(CLARA_AGX_XAVIER, ("nvidia,e3900-0000+p2888-0004",)),
(
JETSON_XAVIER,
(
"nvidia,p2972-0000",
"nvidia,p2972-0006",
"nvidia,jetson-xavier",
"nvidia,jetson-xavier-industrial",
"nvidia,galen-industrial",
),
),
(JETSON_NANO, ("nvidia,p3450-0000", "nvidia,p3450-0002", "nvidia,jetson-nano")),
(
JETSON_NX,
(
"nvidia,p3509-0000+p3668-0000",
"nvidia,p3509-0000+p3668-0001",
"nvidia,p3509-0000-a00+p3668-0000-a01",
"nvidia,p3509-0000-a00+p3668-0001-a01",
"nvidia,p3449-0000+p3668-0000",
"nvidia,p3449-0000+p3668-0001",
),
),
)
_RASPBERRY_PI_40_PIN_IDS = (
RASPBERRY_PI_B_PLUS,
RASPBERRY_PI_A_PLUS,
RASPBERRY_PI_ZERO,
RASPBERRY_PI_ZERO_W,
RASPBERRY_PI_ZERO_2_W,
RASPBERRY_PI_2B,
RASPBERRY_PI_3B,
RASPBERRY_PI_3B_PLUS,
RASPBERRY_PI_3A_PLUS,
RASPBERRY_PI_4B,
RASPBERRY_PI_AVNET_IIOT_GW,
RASPBERRY_PI_400,
)
_RASPBERRY_PI_CM_IDS = (
RASPBERRY_PI_CM1,
RASPBERRY_PI_CM3,
RASPBERRY_PI_CM3_PLUS,
RASPBERRY_PI_CM4,
)
_ODROID_40_PIN_IDS = (
ODROID_C1,
ODROID_C1_PLUS,
ODROID_C2,
ODROID_C4,
ODROID_N2,
ODROID_XU4,
)
_BEAGLEBONE_IDS = (
BEAGLEBONE,
BEAGLEBONE_BLACK,
BEAGLEBONE_BLUE,
BEAGLEBONE_BLACK_WIRELESS,
BEAGLEBONE_POCKETBEAGLE,
BEAGLEBONE_GREEN,
BEAGLEBONE_GREEN_WIRELESS,
BEAGLEBONE_GREEN_GATEWAY,
BEAGLEBONE_BLACK_INDUSTRIAL,
BEAGLEBONE_ENHANCED,
BEAGLEBONE_USOMIQ,
BEAGLEBONE_AIR,
BEAGLEBONE_AI,
BEAGLEBONE_POCKETBONE,
BEAGLELOGIC_STANDALONE,
BEAGLEV_STARLIGHT,
OSD3358_DEV_BOARD,
OSD3358_SM_RED,
)
_LINARO_96BOARDS_IDS = (DRAGONBOARD_410C,)
_SIFIVE_IDS = (SIFIVE_UNLEASHED,)
# BeagleBone eeprom board ids from:
# https://github.com/beagleboard/image-builder
# Thanks to zmatt on freenode #beagle for pointers.
_BEAGLEBONE_BOARD_IDS = {
# Original bone/white:
BEAGLEBONE: (
("A4", "A335BONE00A4"),
("A5", "A335BONE00A5"),
("A6", "A335BONE00A6"),
("A6A", "A335BONE0A6A"),
("A6B", "A335BONE0A6B"),
("B", "A335BONE000B"),
),
BEAGLEBONE_BLACK: (
("A5", "A335BNLT00A5"),
("A5A", "A335BNLT0A5A"),
("A5B", "A335BNLT0A5B"),
("A5C", "A335BNLT0A5C"),
("A6", "A335BNLT00A6"),
("B", "A335BNLT000B"),
("C", "A335BNLT000C"),
("C", "A335BNLT00C0"),
),
BEAGLEBONE_BLUE: (("A2", "A335BNLTBLA2"),),
BEAGLEBONE_BLACK_WIRELESS: (("A5", "A335BNLTBWA5"),),
BEAGLEBONE_POCKETBEAGLE: (("A2", "A335PBGL00A2"),),
BEAGLEBONE_GREEN: (("1A", "A335BNLT...."), ("UNKNOWN", "A335BNLTBBG1")),
BEAGLEBONE_GREEN_WIRELESS: (("W1A", "A335BNLTGW1A"),),
BEAGLEBONE_GREEN_GATEWAY: (("GA1", "A335BNLTGG1A"),),
BEAGLEBONE_BLACK_INDUSTRIAL: (
("A0", "A335BNLTAIA0"), # Arrow
("A0", "A335BNLTEIA0"), # Element14
),
BEAGLEBONE_ENHANCED: (("A", "A335BNLTSE0A"),),
BEAGLEBONE_USOMIQ: (("6", "A335BNLTME06"),),
BEAGLEBONE_AIR: (("A0", "A335BNLTNAD0"),),
BEAGLEBONE_POCKETBONE: (("0", "A335BNLTBP00"),),
OSD3358_DEV_BOARD: (("0.1", "A335BNLTGH01"),),
OSD3358_SM_RED: (("0", "A335BNLTOS00"),),
BEAGLELOGIC_STANDALONE: (("A", "A335BLGC000A"),),
}
# Pi revision codes from:
# https://www.raspberrypi.org/documentation/hardware/raspberrypi/revision-codes/README.md
# Each tuple here contains both the base codes, and the versions that indicate
# the Pi is overvolted / overclocked - for 4-digit codes, this will be prefixed
# with 1000, and for 6-digit codes it'll be prefixed with 1. These are placed
# on separate lines.
_PI_REV_CODES = {
RASPBERRY_PI_B_REV1: (
# Regular codes:
"0002",
"0003",
# Overvolted/clocked versions:
"1000002",
"1000003",
),
RASPBERRY_PI_B_REV2: (
"0004",
"0005",
"0006",
"000d",
"000e",
"000f",
"1000005",
"1000006",
"100000d",
"100000e",
"100000f",
),
RASPBERRY_PI_B_PLUS: ("0010", "0013", "900032", "1000010", "1000013", "1900032"),
RASPBERRY_PI_A: ("0007", "0008", "0009", "1000007", "1000008", "1000009"),
RASPBERRY_PI_A_PLUS: ("0012", "0015", "900021", "1000012", "1000015", "1900021"),
RASPBERRY_PI_CM1: ("0011", "0014", "10000011", "10000014"),
RASPBERRY_PI_ZERO: (
"900092",
"920092",
"900093",
"920093",
"1900092",
"1920092",
"1900093",
"1920093", # warranty bit 24
"2900092",
"2920092",
"2900093",
"2920093", # warranty bit 25
),
RASPBERRY_PI_ZERO_W: ("9000c1", "19000c1", "29000c1"), # warranty bits
RASPBERRY_PI_2B: (
"a01040",
"a01041",
"a02042",
"a21041",
"a22042",
"1a01040",
"1a01041",
"1a02042",
"1a21041",
"1a22042", # warranty bit 24
"2a01040",
"2a01041",
"2a02042",
"2a21041",
"2a22042", # warranty bit 25
"3a01040",
"3a01041",
"3a02042",
"3a21041",
"3a22042",
),
RASPBERRY_PI_3B: (
"a02082",
"a22082",
"a32082",
"a52082",
"1a02082",
"1a22082",
"1a32082",
"1a52082", # warranty bit 24
"2a02082",
"2a22082",
"2a32082",
"2a52082", # warranty bit 25
),
RASPBERRY_PI_3B_PLUS: ("a020d3", "1a020d3", "2a020d3"), # warranty bits
RASPBERRY_PI_AVNET_IIOT_GW: ("60a220b0",),
RASPBERRY_PI_CM3: (
"a020a0",
"a220a0",
"1a020a0",
"2a020a0", # warranty bits
"1a220a0",
"2a220a0",
),
RASPBERRY_PI_3A_PLUS: ("9020e0", "19020e0", "29020e0"), # warranty bits
RASPBERRY_PI_CM3_PLUS: ("a02100", "1a02100", "2a02100"), # warranty bits
RASPBERRY_PI_4B: (
"a03111",
"b03111",
"c03111",
"a03112",
"b03112",
"c03112",
"b03114",
"c03114",
"d03114",
"1a03111",
"2a03111",
"1b03111",
"2b03111", # warranty bits
"1c03111",
"2c03111",
"1a03112",
"2a03112",
"1b03112",
"2b03112",
"1c03112",
"2c03112",
),
RASPBERRY_PI_400: ("c03130", "c03131"),
RASPBERRY_PI_CM4: ("a03140", "b03140", "c03140", "d03140"),
RASPBERRY_PI_ZERO_2_W: ("902120",),
}
# Onion omega boards
_ONION_OMEGA_BOARD_IDS = (ONION_OMEGA, ONION_OMEGA2)
# Pine64 boards and devices
_PINE64_DEV_IDS = (PINE64, PINEH64, PINEBOOK, PINEPHONE, SOPINE)
# RockPi boards and devices
_ROCK_PI_IDS = (ROCK_PI_S, ROCK_PI_4, ROCK_PI_X, ROCK_PI_E)
# UDOO
_UDOO_BOARD_IDS = {UDOO_BOLT_V8: ("SC40-2000-0000-C0|C",), UDOO_X86: ("dummy",)}
# MaaXBoard boards
_MAAXBOARD_DEV_IDS = ("MAAXBOARD", "MAAXBOARD_MINI")
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trainer to automate the training."""
import logging
import warnings
from itertools import count
from pathlib import Path
from traceback import print_exc
from typing import Any, Dict, Iterable, List, Optional, Union
import torch
from torch.utils.data import DataLoader
from pytorch_lightning.accelerators import Accelerator
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.memory import ModelSummary
from pytorch_lightning.core.step_result import Result
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.plugins import Plugin
from pytorch_lightning.profiler import BaseProfiler
from pytorch_lightning.trainer.callback_hook import TrainerCallbackHookMixin
from pytorch_lightning.trainer.configuration_validator import ConfigValidator
from pytorch_lightning.trainer.connectors.accelerator_connector import AcceleratorConnector
from pytorch_lightning.trainer.connectors.callback_connector import CallbackConnector
from pytorch_lightning.trainer.connectors.checkpoint_connector import CheckpointConnector
from pytorch_lightning.trainer.connectors.data_connector import DataConnector
from pytorch_lightning.trainer.connectors.debugging_connector import DebuggingConnector
from pytorch_lightning.trainer.connectors.env_vars_connector import _defaults_from_env_vars
from pytorch_lightning.trainer.connectors.logger_connector import LoggerConnector
from pytorch_lightning.trainer.connectors.model_connector import ModelConnector
from pytorch_lightning.trainer.connectors.optimizer_connector import OptimizerConnector
from pytorch_lightning.trainer.connectors.profiler_connector import ProfilerConnector
from pytorch_lightning.trainer.connectors.slurm_connector import SLURMConnector
from pytorch_lightning.trainer.connectors.training_trick_connector import TrainingTricksConnector
from pytorch_lightning.trainer.data_loading import TrainerDataLoadingMixin
from pytorch_lightning.trainer.deprecated_api import DeprecatedDistDeviceAttributes, DeprecatedTrainerAttributes
from pytorch_lightning.trainer.evaluation_loop import EvaluationLoop
from pytorch_lightning.trainer.logging import TrainerLoggingMixin
from pytorch_lightning.trainer.model_hooks import TrainerModelHooksMixin
from pytorch_lightning.trainer.optimizers import TrainerOptimizersMixin
from pytorch_lightning.trainer.predict_loop import PredictLoop
from pytorch_lightning.trainer.properties import TrainerProperties
from pytorch_lightning.trainer.states import TrainerState
from pytorch_lightning.trainer.training_loop import TrainLoop
from pytorch_lightning.trainer.training_tricks import TrainerTrainingTricksMixin
from pytorch_lightning.tuner.tuning import Tuner
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.cloud_io import load as pl_load
from pytorch_lightning.utilities.debugging import InternalDebugger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.memory import recursive_detach
from pytorch_lightning.utilities.model_helpers import is_overridden
log = logging.getLogger(__name__)
# warnings to ignore in trainer
warnings.filterwarnings(
'ignore', message='torch.distributed.reduce_op is deprecated, '
'please use torch.distributed.ReduceOp instead'
)
class Trainer(
TrainerProperties,
TrainerCallbackHookMixin,
TrainerModelHooksMixin,
TrainerOptimizersMixin,
TrainerLoggingMixin,
TrainerTrainingTricksMixin,
TrainerDataLoadingMixin,
DeprecatedDistDeviceAttributes,
DeprecatedTrainerAttributes,
):
@_defaults_from_env_vars
def __init__(
self,
logger: Union[LightningLoggerBase, Iterable[LightningLoggerBase], bool] = True,
checkpoint_callback: bool = True,
callbacks: Optional[Union[List[Callback], Callback]] = None,
default_root_dir: Optional[str] = None,
gradient_clip_val: float = 0,
process_position: int = 0,
num_nodes: int = 1,
num_processes: int = 1,
gpus: Optional[Union[List[int], str, int]] = None,
auto_select_gpus: bool = False,
tpu_cores: Optional[Union[List[int], str, int]] = None,
log_gpu_memory: Optional[str] = None,
progress_bar_refresh_rate: Optional[int] = None,
overfit_batches: Union[int, float] = 0.0,
track_grad_norm: Union[int, float, str] = -1,
check_val_every_n_epoch: int = 1,
fast_dev_run: Union[int, bool] = False,
accumulate_grad_batches: Union[int, Dict[int, int], List[list]] = 1,
max_epochs: Optional[int] = None,
min_epochs: Optional[int] = None,
max_steps: Optional[int] = None,
min_steps: Optional[int] = None,
limit_train_batches: Union[int, float] = 1.0,
limit_val_batches: Union[int, float] = 1.0,
limit_test_batches: Union[int, float] = 1.0,
limit_predict_batches: Union[int, float] = 1.0,
val_check_interval: Union[int, float] = 1.0,
flush_logs_every_n_steps: int = 100,
log_every_n_steps: int = 50,
accelerator: Optional[Union[str, Accelerator]] = None,
sync_batchnorm: bool = False,
precision: int = 32,
weights_summary: Optional[str] = 'top',
weights_save_path: Optional[str] = None,
num_sanity_val_steps: int = 2,
truncated_bptt_steps: Optional[int] = None,
resume_from_checkpoint: Optional[Union[Path, str]] = None,
profiler: Optional[Union[BaseProfiler, str]] = None,
benchmark: bool = False,
deterministic: bool = False,
reload_dataloaders_every_epoch: bool = False,
auto_lr_find: Union[bool, str] = False,
replace_sampler_ddp: bool = True,
terminate_on_nan: bool = False,
auto_scale_batch_size: Union[str, bool] = False,
prepare_data_per_node: bool = True,
plugins: Optional[Union[Plugin, str, list]] = None,
amp_backend: str = 'native',
amp_level: str = 'O2',
distributed_backend: Optional[str] = None,
move_metrics_to_cpu: bool = False,
multiple_trainloader_mode: str = 'max_size_cycle',
stochastic_weight_avg: bool = False
):
r"""
Customize every aspect of training via flags
Args:
accelerator: Previously known as distributed_backend (dp, ddp, ddp2, etc...).
Can also take in an accelerator object for custom hardware.
accumulate_grad_batches: Accumulates grads every k batches or as set up in the dict.
amp_backend: The mixed precision backend to use ("native" or "apex")
amp_level: The optimization level to use (O1, O2, etc...).
auto_lr_find: If set to True, will make trainer.tune() run a learning rate finder,
trying to optimize initial learning for faster convergence. trainer.tune() method will
set the suggested learning rate in self.lr or self.learning_rate in the LightningModule.
To use a different key set a string instead of True with the key name.
auto_scale_batch_size: If set to True, will `initially` run a batch size
finder trying to find the largest batch size that fits into memory.
The result will be stored in self.batch_size in the LightningModule.
Additionally, can be set to either `power` that estimates the batch size through
a power search or `binsearch` that estimates the batch size through a binary search.
auto_select_gpus: If enabled and `gpus` is an integer, pick available
gpus automatically. This is especially useful when
GPUs are configured to be in "exclusive mode", such
that only one process at a time can access them.
benchmark: If true enables cudnn.benchmark.
callbacks: Add a callback or list of callbacks.
checkpoint_callback: If ``True``, enable checkpointing.
It will configure a default ModelCheckpoint callback if there is no user-defined ModelCheckpoint in
:paramref:`~pytorch_lightning.trainer.trainer.Trainer.callbacks`.
check_val_every_n_epoch: Check val every n train epochs.
default_root_dir: Default path for logs and weights when no logger/ckpt_callback passed.
Default: ``os.getcwd()``.
Can be remote file paths such as `s3://mybucket/path` or 'hdfs://path/'
deterministic: If true enables cudnn.deterministic.
distributed_backend: deprecated. Please use 'accelerator'
fast_dev_run: runs n if set to ``n`` (int) else 1 if set to ``True`` batch(es)
of train, val and test to find any bugs (ie: a sort of unit test).
flush_logs_every_n_steps: How often to flush logs to disk (defaults to every 100 steps).
gpus: number of gpus to train on (int) or which GPUs to train on (list or str) applied per node
gradient_clip_val: 0 means don't clip.
limit_train_batches: How much of training dataset to check (float = fraction, int = num_batches)
limit_val_batches: How much of validation dataset to check (float = fraction, int = num_batches)
limit_test_batches: How much of test dataset to check (float = fraction, int = num_batches)
limit_predict_batches: How much of prediction dataset to check (float = fraction, int = num_batches)
logger: Logger (or iterable collection of loggers) for experiment tracking.
log_gpu_memory: None, 'min_max', 'all'. Might slow performance
log_every_n_steps: How often to log within steps (defaults to every 50 steps).
prepare_data_per_node: If True, each LOCAL_RANK=0 will call prepare data.
Otherwise only NODE_RANK=0, LOCAL_RANK=0 will prepare data
process_position: orders the progress bar when running multiple models on same machine.
progress_bar_refresh_rate: How often to refresh progress bar (in steps). Value ``0`` disables progress bar.
Ignored when a custom progress bar is passed to :paramref:`~Trainer.callbacks`. Default: None, means
a suitable value will be chosen based on the environment (terminal, Google COLAB, etc.).
profiler: To profile individual steps during training and assist in identifying bottlenecks.
overfit_batches: Overfit a fraction of training data (float) or a set number of batches (int).
plugins: Plugins allow modification of core behavior like ddp and amp, and enable custom lightning plugins.
precision: Full precision (32), half precision (16). Can be used on CPU, GPU or TPUs.
max_epochs: Stop training once this number of epochs is reached. Disabled by default (None).
If both max_epochs and max_steps are not specified, defaults to ``max_epochs`` = 1000.
min_epochs: Force training for at least these many epochs. Disabled by default (None).
If both min_epochs and min_steps are not specified, defaults to ``min_epochs`` = 1.
max_steps: Stop training after this number of steps. Disabled by default (None).
min_steps: Force training for at least these number of steps. Disabled by default (None).
num_nodes: number of GPU nodes for distributed training.
num_processes: number of processes for distributed training with distributed_backend="ddp_cpu"
num_sanity_val_steps: Sanity check runs n validation batches before starting the training routine.
Set it to `-1` to run all batches in all validation dataloaders.
reload_dataloaders_every_epoch: Set to True to reload dataloaders every epoch.
replace_sampler_ddp: Explicitly enables or disables sampler replacement. If not specified this
will toggled automatically when DDP is used. By default it will add ``shuffle=True`` for
train sampler and ``shuffle=False`` for val/test sampler. If you want to customize it,
you can set ``replace_sampler_ddp=False`` and add your own distributed sampler.
resume_from_checkpoint: Path/URL of the checkpoint from which training is resumed. If there is
no checkpoint file at the path, start from scratch. If resuming from mid-epoch checkpoint,
training will start from the beginning of the next epoch.
sync_batchnorm: Synchronize batch norm layers between process groups/whole world.
terminate_on_nan: If set to True, will terminate training (by raising a `ValueError`) at the
end of each training batch, if any of the parameters or the loss are NaN or +/-inf.
tpu_cores: How many TPU cores to train on (1 or 8) / Single TPU to train on [1]
track_grad_norm: -1 no tracking. Otherwise tracks that p-norm. May be set to 'inf' infinity-norm.
truncated_bptt_steps: Truncated back prop breaks performs backprop every k steps of much longer
sequence.
val_check_interval: How often to check the validation set. Use float to check within a training epoch,
use int to check every n steps (batches).
weights_summary: Prints a summary of the weights when training begins.
weights_save_path: Where to save weights if specified. Will override default_root_dir
for checkpoints only. Use this if for whatever reason you need the checkpoints
stored in a different place than the logs written in `default_root_dir`.
Can be remote file paths such as `s3://mybucket/path` or 'hdfs://path/'
Defaults to `default_root_dir`.
move_metrics_to_cpu: Whether to force internal logged metrics to be moved to cpu.
This can save some gpu memory, but can make training slower. Use with attention.
multiple_trainloader_mode: How to loop over the datasets when there are multiple train loaders.
In 'max_size_cycle' mode, the trainer ends one epoch when the largest dataset is traversed,
and smaller datasets reload when running out of their data. In 'min_size' mode, all the datasets
reload when reaching the minimum length of datasets.
stochastic_weight_avg: Whether to use `Stochastic Weight Averaging (SWA)
<https://pytorch.org/blog/pytorch-1.6-now-includes-stochastic-weight-averaging/>_`
"""
super().__init__()
distributed_backend = distributed_backend or accelerator
# init connectors
self.dev_debugger = InternalDebugger(self)
self.config_validator = ConfigValidator(self)
self.data_connector = DataConnector(self)
self.optimizer_connector = OptimizerConnector(self)
self.accelerator_connector = AcceleratorConnector(
num_processes, tpu_cores, distributed_backend, auto_select_gpus, gpus, num_nodes, sync_batchnorm, benchmark,
replace_sampler_ddp, deterministic, precision, amp_backend, amp_level, plugins
)
self.logger_connector = LoggerConnector(self, log_gpu_memory)
self.model_connector = ModelConnector(self)
self.callback_connector = CallbackConnector(self)
self.debugging_connector = DebuggingConnector(self)
self.training_tricks_connector = TrainingTricksConnector(self)
self.profile_connector = ProfilerConnector(self)
self.checkpoint_connector = CheckpointConnector(self)
self.slurm_connector = SLURMConnector(self)
self.tuner = Tuner(self)
self.train_loop = TrainLoop(self, multiple_trainloader_mode)
self.evaluation_loop = EvaluationLoop(self)
self.predict_loop = PredictLoop(self)
# training state
self.weights_summary = weights_summary
self.shown_warnings = set()
# init callbacks
# Declare attributes to be set in callback_connector on_trainer_init
self.callback_connector.on_trainer_init(
callbacks, checkpoint_callback, progress_bar_refresh_rate, process_position, default_root_dir,
weights_save_path, resume_from_checkpoint, stochastic_weight_avg
)
# hook
self.on_init_start()
# init optimizer + lr scheduler related flags
self.optimizer_connector.on_trainer_init()
# init data flags
self.data_connector.on_trainer_init(
check_val_every_n_epoch, reload_dataloaders_every_epoch, prepare_data_per_node
)
# init training tricks
self.training_tricks_connector.on_trainer_init(
gradient_clip_val, track_grad_norm, accumulate_grad_batches, truncated_bptt_steps, terminate_on_nan
)
self.train_loop.on_trainer_init(
max_epochs,
min_epochs,
max_steps,
min_steps,
num_sanity_val_steps,
weights_summary,
)
self.evaluation_loop.on_trainer_init()
# configure tuner
self.tuner.on_trainer_init(auto_lr_find, auto_scale_batch_size)
# configure profiler
self.profile_connector.on_trainer_init(profiler)
# init logger flags
self.logger_connector.on_trainer_init(
logger,
flush_logs_every_n_steps,
log_every_n_steps,
move_metrics_to_cpu,
)
# init debugging flags
self.debugging_connector.on_init_start(
limit_train_batches,
limit_val_batches,
limit_test_batches,
limit_predict_batches,
val_check_interval,
overfit_batches,
fast_dev_run,
)
# Callback system
self.on_init_end()
def fit(
self,
model: LightningModule,
train_dataloader: Any = None,
val_dataloaders: Optional[Union[DataLoader, List[DataLoader]]] = None,
datamodule: Optional[LightningDataModule] = None,
):
r"""
Runs the full optimization routine.
Args:
datamodule: A instance of :class:`LightningDataModule`.
model: Model to fit.
train_dataloader: Either a single PyTorch DataLoader or a collection of these
(list, dict, nested lists and dicts). In the case of multiple dataloaders, please
see this :ref:`page <multiple-training-dataloaders>`
val_dataloaders: Either a single Pytorch Dataloader or a list of them, specifying validation samples.
If the model has a predefined val_dataloaders method this will be skipped
"""
# we reuse fit for other functions. When already set, it shouldn't be modified.
if not self.state.running:
self.state = TrainerState.FITTING
if self._running_stage is None:
self.training = True
# set local properties on the model
self.model_connector.copy_trainer_model_properties(model)
# ----------------------------
# LINK DATA
# ----------------------------
# setup data, etc...
self.train_loop.setup_fit(model, train_dataloader, val_dataloaders, datamodule)
# hook
self.data_connector.prepare_data(model)
self.callback_connector._attach_model_callbacks(model, self)
# ----------------------------
# SET UP TRAINING
# ----------------------------
self.call_hook("on_before_accelerator_backend_setup", model)
self.accelerator.connect(model)
self.accelerator.setup_environment()
self.call_setup_hook(model) # allow user to setup lightning_module in accelerator environment
self.accelerator.setup(self, model) # note: this sets up self.lightning_module
# ----------------------------
# INSPECT THE CORE LOOPS
# ----------------------------
f"""
Lightning internal flow looks like this:
{Trainer.fit} or {Trainer.test} or {Trainer.predict} ||
| ||
create accelerator ||
| ||
{self.dispatch} ||
| || LIGHTNING
{self.accelerator.start_training} ||
or {self.accelerator.start_evaluating} ||
or {self.accelerator.start_predicting} || FLOW
| ||
{self.run_stage} ||
| || DIRECTION
{self.run_train} ||
or {self.run_evaluation} ||
or {self.run_predict} ||
| ||
results \/
This is used to guide readers to the core loops: train, test, predict.
{self.run_predict} is the simplest to understand, use `Go to Definition` to read it :)
Search for `start_training` or `start_evaluating` or `start_predicting` in
`pytorch_lightning/plugins/training_type_plugin` to find accelerator dispatch functions.
""" # noqa: W605
# ----------------------------
# TRAIN
# ----------------------------
# hook
if self.state == TrainerState.FITTING:
self.call_hook("on_fit_start")
# plugin will setup fitting (e.g. ddp will launch child processes)
self.pre_dispatch()
# dispatch `start_training` or `start_evaluating` or `start_predicting`
self.dispatch()
# plugin will finalized fitting (e.g. ddp_spawn will load trained model)
self.post_dispatch()
# ----------------------------
# POST-Training CLEAN UP
# ----------------------------
# hook
if self.state == TrainerState.FITTING:
self.call_hook('on_fit_end')
# teardown
self.call_teardown_hook(model)
if self.state != TrainerState.INTERRUPTED:
self.state = TrainerState.FINISHED
self._running_stage = None
# return 1 when finished
# used for testing or when we need to know that training succeeded
return self.accelerator.results or 1
def pre_dispatch(self):
self.accelerator.pre_dispatch(self)
# log hyper-parameters
if self.logger is not None:
# save exp to get started (this is where the first experiment logs are written)
self.logger.log_hyperparams(self.lightning_module.hparams_initial)
self.logger.log_graph(self.lightning_module)
self.logger.save()
def post_dispatch(self):
self.accelerator.post_dispatch(self)
self.accelerator.teardown()
def dispatch(self):
if self.evaluating:
self.accelerator.start_evaluating(self)
elif self.predicting:
self.accelerator.start_predicting(self)
else:
self.accelerator.start_training(self)
def run_stage(self):
results = None
self.profile_connector.setup()
if self.evaluating:
results = self.run_evaluate()
elif self.predicting:
results = self.run_predict()
else:
self.run_train()
return results
def _pre_training_routine(self):
# wait for all to join if on distributed
self.accelerator.barrier("setup_training")
# register auto-resubmit when on SLURM
self.slurm_connector.register_slurm_signal_handlers()
# --------------------------
# Pre-train
# --------------------------
# on pretrain routine start
ref_model = self.lightning_module
self.on_pretrain_routine_start()
ref_model.on_pretrain_routine_start()
# print model summary
if self.is_global_zero and self.weights_summary is not None and not self.testing:
if self.weights_summary in ModelSummary.MODES:
ref_model.summarize(mode=self.weights_summary)
else:
raise MisconfigurationException("weights_summary can be None, " + ", ".join(ModelSummary.MODES))
# restore training and model before hpc is called
self.checkpoint_connector.restore_weights()
# on pretrain routine end
self.on_pretrain_routine_end()
ref_model.on_pretrain_routine_end()
def run_train(self) -> None:
self._pre_training_routine()
if not self.is_global_zero and self.progress_bar_callback is not None:
self.progress_bar_callback.disable()
self.run_sanity_check(self.lightning_module)
self.checkpoint_connector.has_trained = False
# enable train mode
model = self.lightning_module
model.train()
torch.set_grad_enabled(True)
# reload data when needed
self.train_loop.reset_train_val_dataloaders(model)
# hook
self.train_loop.on_train_start()
try:
if self.train_loop.should_skip_training():
return
# run all epochs
epochs = range(self.current_epoch, self.max_epochs) if self.max_epochs else count(self.current_epoch)
for epoch in epochs:
# hook
self.train_loop.on_train_epoch_start(epoch)
with self.profiler.profile("run_training_epoch"):
# run train epoch
self.train_loop.run_training_epoch()
if self.max_steps and self.max_steps <= self.global_step:
return
# early stopping
met_min_epochs = (epoch >= self.min_epochs - 1) if self.min_epochs else True
met_min_steps = self.global_step >= self.min_steps if self.min_steps else True
if self.should_stop:
if met_min_epochs and met_min_steps:
return
else:
log.info(
'Trainer was signaled to stop but required minimum epochs'
f' ({self.min_epochs}) or minimum steps ({self.min_steps}) has'
' not been met. Training will continue...'
)
# hook
self.train_loop.on_train_end()
except KeyboardInterrupt:
rank_zero_warn('Detected KeyboardInterrupt, attempting graceful shutdown...')
# user could press Ctrl+c many times... only shutdown once
if not self.interrupted:
self.state = TrainerState.INTERRUPTED
self.on_keyboard_interrupt()
except (RuntimeError, AssertionError):
# if an exception is raised, the finally block is executed and can hide the actual exception
# that was initially raised if `on_train_end` also raises an exception. we want to avoid that
# for assertions and other runtime errors so we aren't misled while debugging
print_exc()
finally:
# hook
self.train_loop.on_train_end()
def run_evaluation(self, on_epoch=False):
if not (self.evaluating or self.sanity_checking):
rank_zero_warn(
f"`trainer.run_evaluation()` was called but the running stage is set to {self._running_stage}."
" This should not happen normally. Setting it to `RunningStage.VALIDATING`", RuntimeWarning
)
self.validating = True
# reset cached results
self.logger_connector.reset()
# prepare dataloaders
dataloaders, max_batches = self.evaluation_loop.get_evaluation_dataloaders()
# check if we want to skip this evaluation
if self.evaluation_loop.should_skip_evaluation(max_batches):
return [], []
# enable eval mode + no grads
self.evaluation_loop.on_evaluation_model_eval()
# ref model
model = self.lightning_module
model.zero_grad()
torch.set_grad_enabled(False)
# hook
self.evaluation_loop.on_evaluation_start()
# set up the eval loop
self.evaluation_loop.setup(model, max_batches, dataloaders)
# hook
self.evaluation_loop.on_evaluation_epoch_start()
# run validation/testing
for dataloader_idx, dataloader in enumerate(dataloaders):
# bookkeeping
dl_outputs = []
dataloader = self.accelerator.process_dataloader(dataloader)
dl_max_batches = self.evaluation_loop.max_batches[dataloader_idx]
for batch_idx, batch in enumerate(dataloader):
if batch is None:
continue
# stop short when running on limited batches
if batch_idx >= dl_max_batches:
break
# hook
self.evaluation_loop.on_evaluation_batch_start(batch, batch_idx, dataloader_idx)
# lightning module methods
with self.profiler.profile("evaluation_step_and_end"):
output = self.evaluation_loop.evaluation_step(batch, batch_idx, dataloader_idx)
output = self.evaluation_loop.evaluation_step_end(output)
# hook + store predictions
self.evaluation_loop.on_evaluation_batch_end(output, batch, batch_idx, dataloader_idx)
# log batch metrics
self.evaluation_loop.log_evaluation_step_metrics(output, batch_idx)
# track epoch level outputs
dl_outputs = self.track_output_for_epoch_end(dl_outputs, output)
# store batch level output per dataloader
self.evaluation_loop.outputs.append(dl_outputs)
# lightning module method
deprecated_eval_results = self.evaluation_loop.evaluation_epoch_end()
# hook
self.evaluation_loop.on_evaluation_epoch_end()
# update epoch-level lr_schedulers
if on_epoch:
self.optimizer_connector.update_learning_rates(interval='epoch')
# hook
self.evaluation_loop.on_evaluation_end()
# log epoch metrics
eval_loop_results = self.evaluation_loop.log_epoch_metrics_on_evaluation_end()
# save predictions to disk
self.evaluation_loop.predictions.to_disk()
# enable train mode again
self.evaluation_loop.on_evaluation_model_train()
torch.set_grad_enabled(True)
return eval_loop_results, deprecated_eval_results
def track_output_for_epoch_end(self, outputs, output):
if output is not None:
if isinstance(output, Result):
output = output.detach()
if self.move_metrics_to_cpu:
output = output.cpu()
elif isinstance(output, dict):
output = recursive_detach(output, to_cpu=self.move_metrics_to_cpu)
elif isinstance(output, torch.Tensor) and output.is_cuda and self.move_metrics_to_cpu:
output = output.cpu()
outputs.append(output)
return outputs
def run_evaluate(self):
if not self.is_global_zero and self.progress_bar_callback is not None:
self.progress_bar_callback.disable()
assert self.evaluating
with self.profiler.profile(f"run_{self._running_stage}_evaluation"):
eval_loop_results, _ = self.run_evaluation()
if len(eval_loop_results) == 0:
return 1
# remove the tensors from the eval results
for i, result in enumerate(eval_loop_results):
if isinstance(result, dict):
for k, v in result.items():
if isinstance(v, torch.Tensor):
result[k] = v.cpu().item()
return eval_loop_results
def run_predict(self):
self.predict_loop.on_predict_start()
# prepare dataloaders
dataloaders, max_batches = self.predict_loop.get_predict_dataloaders()
# check if we want to skip this evaluation
if self.predict_loop.should_skip_predict(max_batches):
return []
# ref model
model = self.lightning_module
# enable eval mode + no grads
self.predict_loop.on_predict_model_eval()
model.zero_grad()
torch.set_grad_enabled(False)
# set up the eval loop
self.predict_loop.setup(model, max_batches, dataloaders)
# run validation/testing
for dataloader_idx, dataloader in enumerate(dataloaders):
dataloader = self.accelerator.process_dataloader(dataloader)
dl_max_batches = self.predict_loop.max_batches[dataloader_idx]
for batch_idx, batch in enumerate(dataloader):
if batch is None:
continue
# stop short when running on limited batches
if batch_idx >= dl_max_batches:
break
# lightning module methods
with self.profiler.profile("predict_step"):
self.predict_loop.predict_step(batch, batch_idx, dataloader_idx)
results = self.predict_loop.on_predict_epoch_end()
self.predict_loop.on_predict_end()
return results
def run_sanity_check(self, ref_model):
using_val_step = ref_model.val_dataloader is not None and is_overridden('validation_step', ref_model)
should_sanity_check = using_val_step and self.num_sanity_val_steps > 0 and self.limit_val_batches > 0
# run tiny validation (if validation defined)
# to make sure program won't crash during val
if should_sanity_check:
stage = self._running_stage
self.sanity_checking = True
# hook and callback
self.on_sanity_check_start()
# run eval step
_, eval_results = self.run_evaluation()
# allow no returns from eval
if eval_results is not None and len(eval_results) > 0:
# when we get a list back, used only the last item
if isinstance(eval_results, list):
eval_results = eval_results[-1]
_, _, _, callback_metrics, _ = self.process_dict_result(eval_results)
self.logger_connector.callback_metrics = callback_metrics
self.on_sanity_check_end()
self._running_stage = stage
def validate(
self,
model: Optional[LightningModule] = None,
val_dataloaders: Optional[Union[DataLoader, List[DataLoader]]] = None,
ckpt_path: Optional[str] = 'best',
verbose: bool = True,
datamodule: Optional[LightningDataModule] = None,
):
r"""
Perform one evaluation epoch over the validation set.
Args:
model: The model to validate.
val_dataloaders: Either a single PyTorch DataLoader or a list of them,
specifying validation samples.
ckpt_path: Either ``best`` or path to the checkpoint you wish to validate.
If ``None``, use the current weights of the model.
When the model is given as argument, this parameter will not apply.
verbose: If True, prints the validation results.
datamodule: A instance of :class:`LightningDataModule`.
Returns:
The dictionary with final validation results returned by validation_epoch_end.
If validation_epoch_end is not defined, the output is a list of the dictionaries
returned by validation_step.
"""
# --------------------
# SETUP HOOK
# --------------------
self.verbose_evaluate = verbose
self.state = TrainerState.VALIDATING
self.validating = True
# If you supply a datamodule you can't supply val_dataloaders
if val_dataloaders and datamodule:
raise MisconfigurationException(
'You cannot pass both `trainer.validate(val_dataloaders=..., datamodule=...)`'
)
model_provided = model is not None
model = model or self.lightning_module
# Attach datamodule to get setup/prepare_data added to model before the call to it below
self.data_connector.attach_datamodule(model, datamodule)
# Attach dataloaders (if given)
self.data_connector.attach_dataloaders(model, val_dataloaders=val_dataloaders)
if not model_provided:
self.validated_ckpt_path = self.__load_ckpt_weights(model, ckpt_path=ckpt_path)
# run validate
results = self.fit(model)
assert self.state.stopped
self.validating = False
return results
def test(
self,
model: Optional[LightningModule] = None,
test_dataloaders: Optional[Union[DataLoader, List[DataLoader]]] = None,
ckpt_path: Optional[str] = 'best',
verbose: bool = True,
datamodule: Optional[LightningDataModule] = None,
):
r"""
Perform one evaluation epoch over the test set. It's separated from
fit to make sure you never run on your test set until you want to.
Args:
model: The model to test.
test_dataloaders: Either a single PyTorch DataLoader or a list of them,
specifying test samples.
ckpt_path: Either ``best`` or path to the checkpoint you wish to test.
If ``None``, use the current weights of the model.
When the model is given as argument, this parameter will not apply.
verbose: If True, prints the test results.
datamodule: A instance of :class:`LightningDataModule`.
Returns:
Returns a list of dictionaries, one for each test dataloader containing their respective metrics.
"""
# --------------------
# SETUP HOOK
# --------------------
self.verbose_evaluate = verbose
self.state = TrainerState.TESTING
self.testing = True
# If you supply a datamodule you can't supply test_dataloaders
if test_dataloaders and datamodule:
raise MisconfigurationException('You cannot pass both `trainer.test(test_dataloaders=..., datamodule=...)`')
model_provided = model is not None
model = model or self.lightning_module
# Attach datamodule to get setup/prepare_data added to model before the call to it below
self.data_connector.attach_datamodule(model, datamodule)
# Attach dataloaders (if given)
self.data_connector.attach_dataloaders(model, test_dataloaders=test_dataloaders)
if not model_provided:
self.tested_ckpt_path = self.__load_ckpt_weights(model, ckpt_path=ckpt_path)
# run test
results = self.fit(model)
assert self.state.stopped
self.testing = False
return results
def __load_ckpt_weights(
self,
model,
ckpt_path: Optional[str] = None,
) -> Optional[str]:
# if user requests the best checkpoint but we don't have it, error
if ckpt_path == 'best' and not self.checkpoint_callback.best_model_path:
raise MisconfigurationException(
'ckpt_path is "best", but `ModelCheckpoint` is not configured to save the best model.'
)
# load best weights
if ckpt_path is not None:
# ckpt_path is 'best' so load the best model
if ckpt_path == 'best':
ckpt_path = self.checkpoint_callback.best_model_path
if not ckpt_path:
fn = self.state.value
raise MisconfigurationException(
f'`.{fn}()` found no path for the best weights: "{ckpt_path}". Please'
' specify a path for a checkpoint `.{fn}(ckpt_path=PATH)`'
)
self.training_type_plugin.barrier()
ckpt = pl_load(ckpt_path, map_location=lambda storage, loc: storage)
model.load_state_dict(ckpt['state_dict'])
return ckpt_path
def predict(
self,
model: Optional[LightningModule] = None,
dataloaders: Optional[Union[DataLoader, List[DataLoader]]] = None,
datamodule: Optional[LightningDataModule] = None,
):
r"""
Separates from fit to make sure you never run on your predictions set until you want to.
This will call the model forward function to compute predictions.
Args:
model: The model to predict on.
dataloaders: Either a single
Pytorch Dataloader or a list of them, specifying inference samples.
datamodule: A instance of :class:`LightningDataModule`.
Returns:
Returns a list of dictionaries, one for each provided dataloader containing their respective predictions.
"""
# --------------------
# SETUP HOOK
# --------------------
# If you supply a datamodule you can't supply dataloaders
model = model or self.lightning_module
self.state = TrainerState.PREDICTING
self.predicting = True
if dataloaders and datamodule:
raise MisconfigurationException(
'You cannot pass dataloaders to trainer.predict if you supply a datamodule.'
)
# Attach datamodule to get setup/prepare_data added to model before the call to it below
self.data_connector.attach_datamodule(model, datamodule)
# Attach dataloaders (if given)
self.data_connector.attach_dataloaders(model, predict_dataloaders=dataloaders)
results = self.fit(model)
assert self.state.stopped
self.predicting = False
return results
def tune(
self,
model: LightningModule,
train_dataloader: Optional[DataLoader] = None,
val_dataloaders: Optional[Union[DataLoader, List[DataLoader]]] = None,
datamodule: Optional[LightningDataModule] = None,
):
r"""
Runs routines to tune hyperparameters before training.
Args:
datamodule: A instance of :class:`LightningDataModule`.
model: Model to tune.
train_dataloader: A Pytorch DataLoader with training samples. If the model has
a predefined train_dataloader method this will be skipped.
val_dataloaders: Either a single Pytorch Dataloader or a list of them, specifying validation samples.
If the model has a predefined val_dataloaders method this will be skipped
"""
self.state = TrainerState.TUNING
self.tuning = True
self.tuner.tune(model, train_dataloader, val_dataloaders, datamodule)
assert self.state.stopped
self.tuning = False
def call_setup_hook(self, model: LightningModule) -> None:
assert self.state.running, f"TrainerState: {self.state}"
state = self._setup_state
if self.datamodule is not None:
called = getattr(self.datamodule, f'has_setup_{state}')
if not called:
self.datamodule.setup(stage=state)
self.setup(model, stage=state)
model.setup(stage=state)
def call_teardown_hook(self, model: LightningModule) -> None:
state = self._teardown_state
self.profiler.teardown(stage=state)
self.teardown(stage=state)
model.teardown(stage=state)
def _reset_result_and_set_hook_fx_name(self, hook_name):
# on_before_zero_grad is called within training_step
if "batch_start" in hook_name or "on_before_zero_grad" in hook_name:
return True
model_ref = self.lightning_module
if model_ref is not None:
# used to track current hook name called
model_ref._results = Result()
model_ref._current_hook_fx_name = hook_name
return False
def _cache_logged_metrics(self):
model_ref = self.lightning_module
if model_ref is not None:
# capture logging for this hook
self.logger_connector.cache_logged_metrics()
def call_hook(self, hook_name, *args, **kwargs):
# set hook_name to model + reset Result obj
skip = self._reset_result_and_set_hook_fx_name(hook_name)
# always profile hooks
with self.profiler.profile(hook_name):
# first call trainer hook
if hasattr(self, hook_name):
trainer_hook = getattr(self, hook_name)
trainer_hook(*args, **kwargs)
# next call hook in lightningModule
output = None
model_ref = self.lightning_module
if is_overridden(hook_name, model_ref):
hook_fx = getattr(model_ref, hook_name)
output = hook_fx(*args, **kwargs)
# if the PL module doesn't have the hook then call the accelerator
# used to auto-reduce things for the user with Results obj
elif hasattr(self.accelerator, hook_name):
accelerator_hook = getattr(self.accelerator, hook_name)
output = accelerator_hook(*args, **kwargs)
if not skip:
self._cache_logged_metrics()
return output
|
//Write a generic function chainer that takes a starting value, and an array of functions to execute on it (array of symbols for ruby). Return the final value after execution is complete.
//
function add(num) {
return num + 1
}
function mult(num) {
return num * 30
}
//// returns 90;
function chain(num, funcArr){
for(var i = 0; i < funcArr.length; i++){
num = funcArr[i](num);
}
return num;
}
chain(2, [add, mult]);
//best solution -- uses reduce, as do most top answers.
//time to look up reduce!
function chain(v, fns) {
return fns.reduce(function(v, fn) { return fn(v) }, v);
}
|
export class BaseStore {
sortProcessingOrder = []; // Represents which order to apply sorts to each column
constructor(settings) {
this.sortable = settings.sortable;
this.filterable = settings.filterable;
this.columnDefinitions = settings.columnDefinitions;
this.pageable = settings.pageable;
this.pageSize = settings.pageSize;
this.page = settings.page;
this.firstVisibleItem = 0;
this.lastVisibleItem = 0;
// todo: make sure that it is array and has proper base class
if (this.columnDefinitions === undefined) {
throw new Error('Argument Exception: "columnDefinitions" setting must be deffined!');
}
}
refresh() {
throw new Error('Not implemented method!');
}
getData() {
throw new Error('Not implemented method!');
}
isFilterable() {
return this.filterable;
}
isSortable() {
return this.sortable;
}
isPageable() {
return this.pageable;
}
setPage(page) {
this.page = page;
}
setPageSize(pageSize) {
this.pageSize = pageSize;
}
updatePager() {
if (this.pager) {
this.pager.update(this.page, Number(this.pageSize), this.count);
}
this.firstVisibleItem = (this.page - 1) * Number(this.pageSize) + 1;
this.lastVisibleItem = Math.min((this.page) * Number(this.pageSize), this.count);
}
setPager(pager) {
this.pager = pager;
this.updatePager();
}
changeSortProcessingOrder(sort) {
let index = this.sortProcessingOrder.findIndex((el, index) => {
if (el.column === sort.column) {
return true;
}
return false;
});
if (index > -1) {
this.sortProcessingOrder.splice(index, 1);
}
if (sort.value !== undefined) {
this.sortProcessingOrder.push(sort);
}
return this.sortProcessingOrder;
}
applySortOptions(sorts) {
this.sortProcessingOrder = [];
sorts.forEach(sort => this.changeSortProcessingOrder(sort));
}
}
|
# -*- coding: utf-8 -*-
# Copyright © 2015 The Coil Contributors
# Copyright © 2014 Roberto Alsina
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function, unicode_literals
from nikola.plugin_categories import Command
import webbrowser
class WebAppCoilAdvertisement(Command):
name = "webapp"
doc_usage = ""
doc_purpose = "deprecated, use Coil CMS instead"
cmd_options = [
{
'name': 'browser',
'short': 'b',
'type': bool,
'help': 'Start a web browser.',
'default': False,
},
{
'name': 'port',
'short': 'p',
'long': 'port',
'default': 8001,
'type': int,
'help': 'Port nummber (default: 8001)',
},
]
def _execute(self, options, args):
print("Nikola WebApp is not available anymore. It has been replaced by Coil CMS.")
print("Coil CMS is a full-featured CMS, ready to be used everywhere from small single-user sites environments to big corporate blogs.")
print("The most basic setup does not require a database and can be done in 2 minutes.")
print("Coil setup guide: http://coil.readthedocs.org/admin/setup/")
if options and options.get('browser'):
webbrowser.open('http://coil.readthedocs.org/admin/setup')
|
#ifndef KALMAN_FILTER_H_
#define KALMAN_FILTER_H_
#include "Eigen/Dense"
class KalmanFilter {
public:
/**
* Constructor
*/
KalmanFilter();
/**
* Destructor
*/
virtual ~KalmanFilter();
/**
* Init Initializes Kalman filter
* @param x_in Initial state
* @param P_in Initial state covariance
* @param F_in Transition matrix
* @param H_in Measurement matrix
* @param R_in Measurement covariance matrix
* @param Q_in Process covariance matrix
*/
void Init(Eigen::VectorXd &x_in, Eigen::MatrixXd &P_in, Eigen::MatrixXd &F_in,
Eigen::MatrixXd &H_in, Eigen::MatrixXd &R_in, Eigen::MatrixXd &Q_in);
/**
* Prediction Predicts the state and the state covariance
* using the process model
* @param delta_T Time between k and k+1 in s
*/
void Predict();
/**
* Updates the state by using standard Kalman Filter equations
* @param z The measurement at k+1
*/
void Update(const Eigen::VectorXd &z);
/**
* Updates the state by using Extended Kalman Filter equations
* @param z The measurement at k+1
*/
void UpdateEKF(const Eigen::VectorXd &z);
/**
* Updates the state by using rest of the Kalman Filter equations
* @param y The difference in measurement and prediction at k+1
*/
void UpdateAny(const Eigen::VectorXd &y);
// state vector
Eigen::VectorXd x_;
// state covariance matrix
Eigen::MatrixXd P_;
// state transition matrix
Eigen::MatrixXd F_;
// process covariance matrix
Eigen::MatrixXd Q_;
// measurement matrix
Eigen::MatrixXd H_;
// measurement covariance matrix
Eigen::MatrixXd R_;
};
#endif // KALMAN_FILTER_H_
|
/* generated by Svelte vX.Y.Z */
import { create_ssr_component } from "svelte/internal";
import { onDestroy, onMount } from 'svelte';
function preload(input) {
return output;
}
function foo() {
console.log('foo');
}
function swipe(node, callback) {
} // TODO implement
const Component = create_ssr_component(($$result, $$props, $$bindings, slots) => {
onMount(() => {
console.log('onMount');
});
onDestroy(() => {
console.log('onDestroy');
});
return ``;
});
export default Component;
export { preload };
|
#!/usr/bin/env python
def auto_run(root='j023507-040202'):
import os
import matplotlib.pyplot as plt
from grizli import utils
from grizli.pipeline import auto_script, photoz
utils.set_warnings()
tab = utils.GTable.gread('{0}_footprint.fits'.format(root))
HOME_PATH = os.getcwd()
auto_script.VALID_FILTERS = ['F098M', 'F105W', 'F110W', 'F125W', 'F127M', 'F139M', 'F140W', 'F153M', 'F160W', 'F410M', 'F435W', 'F438W', 'F439W', 'F450W', 'F467M', 'F475W', 'F475X', 'F547M', 'F550M', 'F555W', 'F569W', 'F600LP', 'F606W', 'F621M', 'F622W', 'F625W', 'F675W', 'F689M', 'F702W', 'F763M', 'F775W', 'F791W', 'F814W', 'F845M', 'F850LP', 'F350LP']
IS_PARALLEL = utils.column_string_operation(tab['proposal_pi'], 'alkan', method='count', logical='or').sum() > 0
auto_script.go(root=root, maglim=[19, 23], HOME_PATH=HOME_PATH, inspect_ramps=False, manual_alignment=False, is_parallel_field=IS_PARALLEL, reprocess_parallel=False, only_preprocess=True, run_extractions=False, run_fit=False, s3_sync='cp', fine_radec=None, combine_all_filters=False, gaia_by_date=True, align_simple=False, align_clip=100, master_radec=None, is_dash=False, run_parse_visits=True, reference_wcs_filters=['F160W','F140W','F125W','F105W','F110W','F098M','F814W','F850LP', 'F606W','F435W'])
plt.ioff()
fig = auto_script.field_rgb(root=root, HOME_PATH=HOME_PATH, xsize=18)
plt.close(fig)
# Photo-z
try:
out = photoz.eazy_photoz(root, object_only=False, force=True,
aper_ix=1, sys_err=0.05, apply_prior=False,
beta_prior=True,
external_limits=3, external_sys_err=0.3)
except:
pass
if __name__ == "__main__":
import sys
import numpy as np
from grizli import utils
from grizli.pipeline import auto_script
utils.set_warnings()
if len(sys.argv) < 3:
print('Usage: aws.py {field} {init/run/summary} [min_mag,max_mag]')
exit
root = sys.argv[1]
auto_run(root=root)
|
# Copyright (c) 2022 Tulir Asokan
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import annotations
from typing import Type
import logging
from mautrix.api import Method, Path, PathBuilder
from mautrix.appservice import IntentAPI
from mautrix.types import RoomID, UserID
from mautrix.util.logging import TraceLogger
from .puppet import BasePuppet
from .user import BaseUser
class NotificationDisabler:
puppet_cls: Type[BasePuppet]
config_enabled: bool = False
log: TraceLogger = logging.getLogger("mau.notification_disabler")
user_id: UserID
room_id: RoomID
intent: IntentAPI | None
enabled: bool
def __init__(self, room_id: RoomID, user: BaseUser) -> None:
self.user_id = user.mxid
self.room_id = room_id
self.enabled = False
@property
def _path(self) -> PathBuilder:
return Path.v3.pushrules["global"].override[
f"net.maunium.silence_while_backfilling:{self.room_id}"
]
@property
def _rule(self) -> dict:
return {
"actions": ["dont_notify"],
"conditions": [
{
"kind": "event_match",
"key": "room_id",
"pattern": self.room_id,
}
],
}
async def __aenter__(self) -> None:
puppet = await self.puppet_cls.get_by_custom_mxid(self.user_id)
self.intent = puppet.intent if puppet and puppet.is_real_user else None
if not self.intent or not self.config_enabled:
return
self.enabled = True
try:
self.log.debug(f"Disabling notifications in {self.room_id} for {self.intent.mxid}")
await self.intent.api.request(Method.PUT, self._path, content=self._rule)
except Exception:
self.log.warning(
f"Failed to disable notifications in {self.room_id} "
f"for {self.intent.mxid} while backfilling",
exc_info=True,
)
raise
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
if not self.enabled:
return
try:
self.log.debug(f"Re-enabling notifications in {self.room_id} for {self.intent.mxid}")
await self.intent.api.request(Method.DELETE, self._path)
except Exception:
self.log.warning(
f"Failed to re-enable notifications in {self.room_id} "
f"for {self.intent.mxid} after backfilling",
exc_info=True,
)
|
# -*- coding: utf-8 -*-
# Copyright 2015-2020 CERN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Wen Guan <wen.guan@cern.ch>, 2015-2016
# - Mario Lassnig <mario.lassnig@cern.ch>, 2015
# - Martin Barisits <martin.barisits@cern.ch>, 2015-2018
# - Vincent Garonne <vincent.garonne@cern.ch>, 2015-2018
# - Cedric Serfon <cedric.serfon@cern.ch>, 2018
# - Robert Illingworth <illingwo@fnal.gov>, 2018
# - Hannes Hansen <hannes.jakob.hansen@cern.ch>, 2018
# - Andrew Lister <andrew.lister@stfc.ac.uk>, 2019
# - Thomas Beermann <thomas.beermann@cern.ch>, 2020-2021
# - Benedikt Ziemons <benedikt.ziemons@cern.ch>, 2020
"""
Conveyor is a daemon to manage file transfers.
"""
from __future__ import division
import datetime
import json
import logging
import os
import socket
import threading
import time
import traceback
import stomp
import rucio.db.sqla.util
from rucio.common import exception
from rucio.common.config import config_get, config_get_bool, config_get_int
from rucio.common.constants import FTS_COMPLETE_STATE
from rucio.common.logging import setup_logging
from rucio.common.policy import get_policy
from rucio.core import heartbeat, request
from rucio.core.monitor import record_counter
from rucio.core.transfer import set_transfer_update_time
from rucio.db.sqla.constants import RequestState
logging.getLogger("stomp").setLevel(logging.CRITICAL)
graceful_stop = threading.Event()
class Receiver(object):
def __init__(self, broker, id, total_threads, full_mode=False):
self.__broker = broker
self.__id = id
self.__total_threads = total_threads
self.__full_mode = full_mode
def on_error(self, headers, message):
record_counter('daemons.conveyor.receiver.error')
logging.error('[%s] %s' % (self.__broker, message))
def on_message(self, headers, message):
record_counter('daemons.conveyor.receiver.message_all')
try:
msg = json.loads(message)
except Exception:
msg = json.loads(message[:-1]) # Note: I am not sure if this is needed anymore, this was due to an unparsable EOT character
if 'vo' not in msg or msg['vo'] != get_policy():
return
if 'job_metadata' in msg.keys() \
and isinstance(msg['job_metadata'], dict) \
and 'issuer' in msg['job_metadata'].keys() \
and str(msg['job_metadata']['issuer']) == str('rucio'):
if 'job_m_replica' in msg.keys() and 'job_state' in msg.keys() \
and (str(msg['job_m_replica']).lower() == str('false') or (str(msg['job_m_replica']).lower() == str('true') and str(msg['job_state']) != str('ACTIVE'))):
if 'request_id' in msg['job_metadata']:
# submitted by old submitter
response = {'new_state': None,
'transfer_id': msg.get('tr_id').split("__")[-1],
'job_state': msg.get('t_final_transfer_state', None),
'src_url': msg.get('src_url', None),
'dst_url': msg.get('dst_url', None),
'transferred_at': datetime.datetime.utcfromtimestamp(float(msg.get('tr_timestamp_complete', 0)) / 1000),
'duration': (float(msg.get('tr_timestamp_complete', 0)) - float(msg.get('tr_timestamp_start', 0))) / 1000,
'reason': msg.get('t__error_message', None),
'scope': msg['job_metadata'].get('scope', None),
'name': msg['job_metadata'].get('name', None),
'src_rse': msg['job_metadata'].get('src_rse', None),
'dst_rse': msg['job_metadata'].get('dst_rse', None),
'request_id': msg['job_metadata'].get('request_id', None),
'activity': msg['job_metadata'].get('activity', None),
'src_rse_id': msg['job_metadata'].get('src_rse_id', None),
'dest_rse_id': msg['job_metadata'].get('dest_rse_id', None),
'previous_attempt_id': msg['job_metadata'].get('previous_attempt_id', None),
'adler32': msg['job_metadata'].get('adler32', None),
'md5': msg['job_metadata'].get('md5', None),
'filesize': msg['job_metadata'].get('filesize', None),
'external_host': msg.get('endpnt', None),
'job_m_replica': msg.get('job_m_replica', None),
'details': {'files': msg['job_metadata']}}
else:
# for new submitter, file_metadata replace the job_metadata
response = {'new_state': None,
'transfer_id': msg.get('tr_id').split("__")[-1],
'job_state': msg.get('t_final_transfer_state', None),
'src_url': msg.get('src_url', None),
'dst_url': msg.get('dst_url', None),
'started_at': datetime.datetime.utcfromtimestamp(float(msg.get('tr_timestamp_start', 0)) / 1000),
'transferred_at': datetime.datetime.utcfromtimestamp(float(msg.get('tr_timestamp_complete', 0)) / 1000),
'duration': (float(msg.get('tr_timestamp_complete', 0)) - float(msg.get('tr_timestamp_start', 0))) / 1000,
'reason': msg.get('t__error_message', None),
'scope': msg['file_metadata'].get('scope', None),
'name': msg['file_metadata'].get('name', None),
'src_type': msg['file_metadata'].get('src_type', None),
'dst_type': msg['file_metadata'].get('dst_type', None),
'src_rse': msg['file_metadata'].get('src_rse', None),
'dst_rse': msg['file_metadata'].get('dst_rse', None),
'request_id': msg['file_metadata'].get('request_id', None),
'activity': msg['file_metadata'].get('activity', None),
'src_rse_id': msg['file_metadata'].get('src_rse_id', None),
'dest_rse_id': msg['file_metadata'].get('dest_rse_id', None),
'previous_attempt_id': msg['file_metadata'].get('previous_attempt_id', None),
'adler32': msg['file_metadata'].get('adler32', None),
'md5': msg['file_metadata'].get('md5', None),
'filesize': msg['file_metadata'].get('filesize', None),
'external_host': msg.get('endpnt', None),
'job_m_replica': msg.get('job_m_replica', None),
'details': {'files': msg['file_metadata']}}
record_counter('daemons.conveyor.receiver.message_rucio')
if str(msg['t_final_transfer_state']) == FTS_COMPLETE_STATE.OK:
response['new_state'] = RequestState.DONE
elif str(msg['t_final_transfer_state']) == FTS_COMPLETE_STATE.ERROR:
response['new_state'] = RequestState.FAILED
try:
if response['new_state']:
logging.info('RECEIVED DID %s:%s FROM %s TO %s REQUEST %s TRANSFER_ID %s STATE %s' % (response['scope'],
response['name'],
response['src_rse'],
response['dst_rse'],
response['request_id'],
response['transfer_id'],
response['new_state']))
if self.__full_mode:
ret = request.update_request_state(response)
record_counter('daemons.conveyor.receiver.update_request_state.%s' % ret)
else:
try:
logging.debug("Update request %s update time" % response['request_id'])
set_transfer_update_time(response['external_host'], response['transfer_id'], datetime.datetime.utcnow() - datetime.timedelta(hours=24))
record_counter('daemons.conveyor.receiver.set_transfer_update_time')
except Exception as error:
logging.debug("Failed to update transfer's update time: %s" % str(error))
except Exception:
logging.critical(traceback.format_exc())
def receiver(id, total_threads=1, full_mode=False):
"""
Main loop to consume messages from the FTS3 producer.
"""
logging.info('receiver starting in full mode: %s' % full_mode)
executable = 'conveyor-receiver'
hostname = socket.getfqdn()
pid = os.getpid()
hb_thread = threading.current_thread()
heartbeat.sanity_check(executable=executable, hostname=hostname)
# Make an initial heartbeat so that all finishers have the correct worker number on the next try
heartbeat.live(executable, hostname, pid, hb_thread)
brokers_alias = []
brokers_resolved = []
try:
brokers_alias = [b.strip() for b in config_get('messaging-fts3', 'brokers').split(',')]
except Exception:
raise Exception('Could not load brokers from configuration')
logging.info('resolving broker dns alias: %s' % brokers_alias)
brokers_resolved = []
for broker in brokers_alias:
addrinfos = socket.getaddrinfo(broker, 0, socket.AF_INET, 0, socket.IPPROTO_TCP)
brokers_resolved.extend(ai[4][0] for ai in addrinfos)
logging.info('brokers resolved to %s', brokers_resolved)
logging.info('checking authentication method')
use_ssl = True
try:
use_ssl = config_get_bool('messaging-fts3', 'use_ssl')
except:
logging.info('could not find use_ssl in configuration -- please update your rucio.cfg')
port = config_get_int('messaging-fts3', 'port')
vhost = config_get('messaging-fts3', 'broker_virtual_host', raise_exception=False)
if not use_ssl:
username = config_get('messaging-fts3', 'username')
password = config_get('messaging-fts3', 'password')
port = config_get_int('messaging-fts3', 'nonssl_port')
conns = []
for broker in brokers_resolved:
if not use_ssl:
logging.info('setting up username/password authentication: %s' % broker)
con = stomp.Connection12(host_and_ports=[(broker, port)],
use_ssl=False,
vhost=vhost,
reconnect_attempts_max=999)
else:
logging.info('setting up ssl cert/key authentication: %s' % broker)
con = stomp.Connection12(host_and_ports=[(broker, port)],
use_ssl=True,
ssl_key_file=config_get('messaging-fts3', 'ssl_key_file'),
ssl_cert_file=config_get('messaging-fts3', 'ssl_cert_file'),
vhost=vhost,
reconnect_attempts_max=999)
conns.append(con)
logging.info('receiver started')
while not graceful_stop.is_set():
heartbeat.live(executable, hostname, pid, hb_thread)
for conn in conns:
if not conn.is_connected():
logging.info('connecting to %s' % conn.transport._Transport__host_and_ports[0][0])
record_counter('daemons.messaging.fts3.reconnect.%s' % conn.transport._Transport__host_and_ports[0][0].split('.')[0])
conn.set_listener('rucio-messaging-fts3', Receiver(broker=conn.transport._Transport__host_and_ports[0], id=id, total_threads=total_threads, full_mode=full_mode))
conn.start()
if not use_ssl:
conn.connect(username, password, wait=True)
else:
conn.connect(wait=True)
conn.subscribe(destination=config_get('messaging-fts3', 'destination'),
id='rucio-messaging-fts3',
ack='auto')
time.sleep(1)
logging.info('receiver graceful stop requested')
for conn in conns:
try:
conn.disconnect()
except Exception:
pass
heartbeat.die(executable, hostname, pid, hb_thread)
logging.info('receiver graceful stop done')
def stop(signum=None, frame=None):
"""
Graceful exit.
"""
graceful_stop.set()
def run(once=False, total_threads=1, full_mode=False):
"""
Starts up the receiver thread
"""
setup_logging()
if rucio.db.sqla.util.is_old_db():
raise exception.DatabaseException('Database was not updated, daemon won\'t start')
logging.info('starting receiver thread')
threads = [threading.Thread(target=receiver, kwargs={'id': i,
'full_mode': full_mode,
'total_threads': total_threads}) for i in range(0, total_threads)]
[thread.start() for thread in threads]
logging.info('waiting for interrupts')
# Interruptible joins require a timeout.
while threads:
threads = [thread.join(timeout=3.14) for thread in threads if thread and thread.isAlive()]
|
'''
RSA_OOP_test
@author: Sollazzo Nicholas
@date: March 20th 2017
@version: 1.0
@status: GREEN
'''
from RSA import RSA
rsa = RSA()
print(rsa.dict())
#en = rsa.encrypt(input('>'))
en = rsa.encrypt('das')
print('encrypted:', en)
print('decrypted:', rsa.decrypt(en))
|
"""
UhbdInterface.py
Module to interface with uhbd binaries.
"""
__author__ = "Michael J. Harms"
# ---------- Initialize module --------------------
import __init__, UhbdFullFunctions, UhbdSingleFunctions, UhbdErrorCheck
import os, shutil
from common import SystemOps, Error
# Set up uhbd binary
global uhbd
bin_path = SystemOps.checkEnvironVariable('UHBD')
uhbd = os.path.join(bin_path,'uhbd')
if not os.path.isfile(uhbd):
raise OSError("uhbd binary not found in $UHBD (%s)" % bin_path)
# ---------- Function definitions --------------------
def runUHBD(inputfile,outputfile):
"""
Runs UHBD from an inputfile, putting standard out to outputfile.
"""
global uhbd
print "uhbd < %s > %s" % (inputfile,outputfile)
f = open(inputfile,'r')
inp = f.read()
f.close()
try:
cin, cout = os.popen2(uhbd)
cin.write(inp)
cin.close()
out = cout.read()
cout.close()
except IOError:
err = "uhbd binary (%s) not executable" % uhbd
raise IOError(err)
status = UhbdErrorCheck.checkOut(out,outputfile)
g = open(outputfile,'w')
g.write(out)
g.close()
if status[0] == 1:
raise Error.UhbdError(status[1])
def runSingleCalculation(calc_param):
"""
Peform pH titration on filename.
"""
# Set up aliases for binaries
getgrid = os.path.join(bin_path,'getgrids')
doinp = os.path.join(bin_path,'doinps')
getpot = os.path.join(bin_path,'getpots')
hybrid = os.path.join(bin_path,'hybrids')
# Make sure that all of the executables exist:
to_check = [getgrid, doinp, getpot, hybrid]
checksum = sum([os.path.isfile(f) for f in to_check])
if checksum != len(to_check):
raise OSError("Not all required binaries in $UHBD (%s)" % bin_path)
print 'prepares'
UhbdSingleFunctions.runPrepares(calc_param)
runUHBD('pkaS-uhbdini.inp','pkaS-uhbdini.out')
print 'getgrids'
SystemOps.runBin(getgrid)
print 'Running stopnow loop.'
while os.path.isfile('stopnow') == False:
SystemOps.runBin(doinp)
runUHBD('uhbdpr.inp','uhbdpr.out')
runUHBD('uhbdaa.inp','uhbdaa.out')
SystemOps.runBin(getpot)
shutil.copy('potentials','pkaS-potentials')
shutil.copy('sitesinpr.pdb','pkaS-sitesinpr.pdb')
# Run hybrid
hybrid_run = os.popen(hybrid,'w')
hybrid_run.write("%s\n%s\n%s\n" % calc_param.ph_param)
hybrid_run.close()
def runFullCalculation(calc_param):
"""Peform pH titration on filename."""
# Set up aliases for binaries
getgrid = os.path.join(bin_path,'getgrid')
doinp = os.path.join(bin_path,'doinp')
getpot = os.path.join(bin_path,'getpot')
hybrid = os.path.join(bin_path,'hybrid')
# Make sure that all of the executables exist:
to_check = [getgrid, doinp, getpot, hybrid]
checksum = sum([os.path.isfile(f) for f in to_check])
if checksum != len(to_check):
raise OSError("Not all required binaries in $UHBD (%s)" % bin_path)
print 'Prepare'
UhbdFullFunctions.runPrepare(calc_param)
runUHBD('uhbdini.inp','uhbdini.out')
print 'Getgrid'
SystemOps.runBin(getgrid)
print 'Running stopnow loop.'
while os.path.isfile('stopnow') == False:
SystemOps.runBin(doinp)
runUHBD('uhbdpr.inp1','uhbdpr.out1')
runUHBD('uhbdpr.inp2','uhbdpr.out2')
runUHBD('uhbdaa.inp1','uhbdaa.out1')
runUHBD('uhbdaa.inp2','uhbdaa.out2')
SystemOps.runBin(getpot)
shutil.move('tempallG.pdb','allgroups.pdb')
shutil.move('tempallR.pdb','allresidues.pdb')
shutil.move('tmp_for_pot.dat','for_pot.dat')
shutil.copy('potentials','pkaF-potentials')
# Run hybrid
hybrid_run = os.popen(hybrid,'w')
hybrid_run.write("%s\n%s\n%s\n" % calc_param.ph_param)
hybrid_run.close()
|
BOOL FlagGET(int& nFlagType, int nField);
void FlagSET(int& nFlagType, int nField, BOOL bSet);
int GetFileImageIndex(CString strPath, BOOL bIsDirectory);
int CompareFileName(TCHAR* name1, TCHAR* name2);
int GetLine(CString& strText, int nPos, CString& strLine, CString strToken);
BOOL WriteCStringToFile(CString strFile, CString& strContent);
BOOL ReadFileToCString(CString strFile, CString& strData);
CString IDSTR(int nID);
|
!function(a,b){"function"==typeof define&&define.amd?define(["../numeral"],b):b("object"==typeof module&&module.exports?require("../numeral"):a.numeral)}(this,function(a){a.register("locale","ja",{delimiters:{thousands:",",decimal:"."},abbreviations:{thousand:"千",million:"百万",billion:"十億",trillion:"兆"},ordinal:function(a){return"."},currency:{symbol:"¥"}})});
|
########################################################################################################
# #
# The module provides one solution to the "horse race problem" #
# #
# We invert the relationship between win probabilities and performance densities in a competition #
# where all contestants' scoring distributions share the same density up to translation. #
# #
# See the upcoming book "Risk Neutral Probability in Sport" by yours truly #
# #
########################################################################################################
# There are three horses in a race.
# The standard deviation of their times is, we shall imagine, one second.
# The market prices, expressed as Australian style dividends, are
from pysport.horseracing.lattice_calibration import normalize_dividends
tote_dividends = [ 17.0, 3.4, 17.0, 4.5, 8.0 ]
normalized_dividends = normalize_dividends( tote_dividends )
# These risk-neutral probabilities are consistent with ability offsets computed as follows
from pysport.horseracing.lattice_calibration import dividend_implied_racing_ability
ability = dividend_implied_racing_ability( dividends = tote_dividends)
print "Relative abilities are " + str( ability )
# We can check that this inversion is indeed accurate to around three or four significant figures
from pysport.horseracing.lattice_calibration import racing_ability_implied_dividends
impl_dividends = racing_ability_implied_dividends( ability=ability )
print zip(normalized_dividends, impl_dividends)
|
/*!
* The MIT License (MIT)
*
* Copyright (c) 2016 Mykhailo Stadnyk <mikhus@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* @version 2.1.4
*/
!function(e){"use strict";function t(e,t){if(!e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!t||"object"!=typeof t&&"function"!=typeof t?e:t}function i(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function, not "+typeof t);e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}),t&&(Object.setPrototypeOf?Object.setPrototypeOf(e,t):e.__proto__=t)}function r(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function o(e,t){if(t||(t="undefined"==typeof window?global:window),void 0!==t[e])return t[e];for(var i=["webkit","moz","ms","o"],r=0,o=i.length,n=e.charAt(0).toUpperCase()+e.substr(1);r<o;r++){var a=t[i[r]+n];if(void 0!==a)return a}return null}function n(e,t,i,r,o,a,l){if("function"!=typeof r)throw new TypeError("Invalid animation rule:",r);var s=e-i,d=s/o,u=0;d>1&&(d=1),1!==d&&(u=r(d),isFinite(u)&&!isNaN(u)&&(d=u)),t&&t(d),s<o?l.frame=G(function(e){return n(e,t,i,r,o,a,l)}):(a&&a(),l.inProgress=!1)}function a(){Array.prototype.constructor.apply(this,arguments)}function l(e){if(!(e instanceof DOMException&&2152923147===e.result))throw e}function s(e){return e.majorTicks instanceof Array||(e.majorTicks=e.majorTicks?[e.majorTicks]:[]),e.majorTicks.length||(e.majorTicks.push(K.formatMajorTickNumber(e.minValue,e)),e.majorTicks.push(K.formatMajorTickNumber(e.maxValue,e))),["right"!==e.tickSide,"left"!==e.tickSide]}function d(e,t,i,r,o,n){e.beginPath(),e.moveTo(t+n,i),e.lineTo(t+r-n,i),e.quadraticCurveTo(t+r,i,t+r,i+n),e.lineTo(t+r,i+o-n),e.quadraticCurveTo(t+r,i+o,t+r-n,i+o),e.lineTo(t+n,i+o),e.quadraticCurveTo(t,i+o,t,i+o-n),e.lineTo(t,i+n),e.quadraticCurveTo(t,i,t+n,i),e.closePath()}function u(e,t){var i=t.valueDec,r=t.valueInt,o=0,n=void 0,a=void 0,l=void 0;if(e=parseFloat(e),l=e<0,e=Math.abs(e),i>0){for(a=e.toFixed(i).toString().split("."),n=r-a[0].length;o<n;++o)a[0]="0"+a[0];a=(l?"-":"")+a[0]+"."+a[1]}else{for(a=Math.round(e).toString(),n=r-a.length;o<n;++o)a="0"+a;a=(l?"-":"")+a}return a}function c(e,t){var i=void 0,r=!1;return i=0===t.majorTicksDec?Math.round(e).toString():e.toFixed(t.majorTicksDec),t.majorTicksInt>1?(r=~i.indexOf("."),~i.indexOf("-")?"-"+[t.majorTicksInt+t.majorTicksDec+2+(r?1:0)-i.length].join("0")+i.replace("-",""):[t.majorTicksInt+t.majorTicksDec+1+(r?1:0)-i.length].join("0")+i):i}function h(e){return e*Math.PI/180}function f(e,t){return{x:-e*Math.sin(t),y:e*Math.cos(t)}}function v(e,t,i,r){var o=!(arguments.length>4&&void 0!==arguments[4])||arguments[4],n=arguments.length>5&&void 0!==arguments[5]?arguments[5]:0,a=e.createLinearGradient(o?0:n,o?n:0,o?0:r,o?r:0);return a.addColorStop(0,t),a.addColorStop(1,i),a}function m(e,t){if(arguments.length>2&&void 0!==arguments[2]&&arguments[2])return e.restore(),!0;e.save();var i=t.borderShadowWidth;return i&&(e.shadowBlur=i,e.shadowColor=t.colorBorderShadow),!0}function p(e,t){t.needleShadow&&(e.shadowOffsetX=2,e.shadowOffsetY=2,e.shadowBlur=10,e.shadowColor=t.colorNeedleShadowDown)}function g(e,t,i){return e["font"+t+"Style"]+" "+e["font"+t+"Weight"]+" "+e["font"+t+"Size"]*i+"px "+e["font"+t]}function w(e){e.shadowOffsetX=null,e.shadowOffsetY=null,e.shadowBlur=null,e.shadowColor="",e.strokeStyle=null,e.lineWidth=0,e.save()}function b(e,t,i,r){t.valueTextShadow&&(e.shadowOffsetX=i,e.shadowOffsetY=i,e.shadowBlur=r,e.shadowColor=t.colorValueTextShadow)}function y(e,t,i,r,o,n){if(t.valueBox){w(e);var a=t.valueDec?1+t.valueDec:0,l="9".repeat(Math.max.apply(null,[String(parseInt(i)).length+a].concat(t.majorTicks.map(function(e){return String(parseInt(e,10)).length+a})))),s=t.valueText||u(i,t),c=n/200,h=n/100,f=.4*h,v=1.2*h;e.font=g(t,"Value",c),b(e,t,f,v);var m=e.measureText(t.valueText?s:"-"+u(Number(l),t)).width;w(e);var p=parseFloat(t.fontValueSize)*c+f+v,y=h*parseFloat(t.valueBoxStroke),x=2*n-2*y,k=m+10*h,T=1.1*p+f+v,S=h*t.valueBoxBorderRadius,O=(parseFloat(t.valueBoxWidth)||0)/100*x;O>k&&(k=O),k>x&&(k=x);var A=r-k/2,C=o-T/2,V=o-5.75*h;if(e.beginPath(),S?d(e,A,C,k,T,S):e.rect(A,C,k,T),y){var N=e.createRadialGradient(r,V,10*h,r,V,20*h);N.addColorStop(0,t.colorValueBoxRect),N.addColorStop(1,t.colorValueBoxRectEnd),e.strokeStyle=N,e.lineWidth=y,e.stroke()}t.colorValueBoxShadow&&(e.shadowBlur=1.2*h,e.shadowColor=t.colorValueBoxShadow),t.colorValueBoxBackground&&(e.fillStyle=t.colorValueBoxBackground,e.fill()),e.closePath(),e.restore(),b(e,t,f,v),e.fillStyle=t.colorValueText,e.textAlign="center",e.textBaseline="alphabetic",e.fillText(s,A+k/2,o+T/2-p/3),e.restore()}}function x(e){var t=e.value,i=e.minValue,r=e.maxValue,o=.01*(r-i);return{normal:t<i?i:t>r?r:t,indented:t<i?i-o:t>r?r+o:t}}function k(e,t,i,r,o){i.beginPath(),i.arc(0,0,J(e),0,2*Q,!0),i.lineWidth=t,i.strokeStyle=o?K.linearGradient(i,r,o,e):r,i.stroke(),i.closePath()}function T(e,t){var i=X.pixelRatio;return e.maxRadius||(e.maxRadius=e.max-t.borderShadowWidth-t.borderOuterWidth*i-t.borderMiddleWidth*i-t.borderInnerWidth*i+(t.borderOuterWidth?.5:0)+(t.borderMiddleWidth?.5:0)+(t.borderInnerWidth?.5:0)),e.maxRadius}function S(e,t){var i=X.pixelRatio,r=t.borderShadowWidth*i,o=e.max-r-t.borderOuterWidth*i/2,n=o-t.borderOuterWidth*i/2-t.borderMiddleWidth*i/2+.5,a=n-t.borderMiddleWidth*i/2-t.borderInnerWidth*i/2+.5,l=T(e,t),s=void 0,d=!1;e.save(),t.borderOuterWidth&&(d=K.drawShadow(e,t,d),k(o,t.borderOuterWidth*i,e,t.colorBorderOuter,t.colorBorderOuterEnd)),t.borderMiddleWidth&&(d=K.drawShadow(e,t,d),k(n,t.borderMiddleWidth*i,e,t.colorBorderMiddle,t.colorBorderMiddleEnd)),t.borderInnerWidth&&(d=K.drawShadow(e,t,d),k(a,t.borderInnerWidth*i,e,t.colorBorderInner,t.colorBorderInnerEnd)),K.drawShadow(e,t,d),e.beginPath(),e.arc(0,0,J(l),0,2*Q,!0),t.colorPlateEnd?(s=e.createRadialGradient(0,0,l/2,0,0,l),s.addColorStop(0,t.colorPlate),s.addColorStop(1,t.colorPlateEnd)):s=t.colorPlate,e.fillStyle=s,e.fill(),e.closePath(),e.restore()}function O(e,t){var i=e.max*(parseFloat(t.highlightsWidth)||0)/100;if(i){var r=J(C(e,t)-i/2),o=0,n=t.highlights.length,a=(t.maxValue-t.minValue)/t.ticksAngle;for(e.save();o<n;o++){var l=t.highlights[o];e.beginPath(),e.rotate(ee),e.arc(0,0,r,K.radians(t.startAngle+(l.from-t.minValue)/a),K.radians(t.startAngle+(l.to-t.minValue)/a),!1),e.strokeStyle=l.color,e.lineWidth=i,e.stroke(),e.closePath(),e.restore(),e.save()}}}function A(e,t){var i=C(e,t),r=void 0,o=void 0,n=void 0,a=0,l=0,s=Math.abs(t.minorTicks)||0,d=t.ticksAngle/(t.maxValue-t.minValue);for(e.lineWidth=X.pixelRatio,e.strokeStyle=t.colorMinorTicks||t.colorStrokeTicks,e.save(),t.exactTicks?(o=t.maxValue-t.minValue,r=s?o/s:0,l=(t.majorTicks[0]%s||0)*d):r=s*(t.majorTicks.length-1);a<r;++a)n=t.startAngle+l+a*(t.ticksAngle/r),e.rotate(K.radians(n)),e.beginPath(),e.moveTo(0,i),e.lineTo(0,i-.075*e.max),P(e)}function C(e,t){var i=e.max/100;return T(e,t)-5*i-(t.barWidth?2*(parseFloat(t.barStrokeWidth)||0)+((parseFloat(t.barWidth)||0)+5)*i:0)}function V(e,t){K.prepareTicks(t);var i=J(C(e,t)),r=void 0,o=void 0,n=t.majorTicks.length,a=X.pixelRatio;for(e.lineWidth=2*a,e.save(),o=t.colorMajorTicks instanceof Array?t.colorMajorTicks:new Array(n).fill(t.colorStrokeTicks||t.colorMajorTicks),r=0;r<n;++r)e.strokeStyle=o[r],e.rotate(K.radians(N(t,t.exactTicks?t.majorTicks[r]:r,n))),e.beginPath(),e.moveTo(0,i),e.lineTo(0,i-.15*e.max),P(e);t.strokeTicks&&(e.strokeStyle=t.colorStrokeTicks||o[0],e.rotate(ee),e.beginPath(),e.arc(0,0,i,K.radians(t.startAngle),K.radians(t.startAngle+t.ticksAngle),!1),P(e))}function N(e,t,i){if(e.exactTicks){var r=e.ticksAngle/(e.maxValue-e.minValue);return e.startAngle+r*(t-e.minValue)}return e.startAngle+t*(e.ticksAngle/(i-1))}function P(e){e.stroke(),e.restore(),e.closePath(),e.save()}function j(e,t){var i=C(e,t)-.15*e.max,r={},o=0,n=t.majorTicks.length,a="needle"!==t.animationTarget,l=t.colorNumbers instanceof Array?t.colorNumbers:new Array(n).fill(t.colorNumbers),s=a?-(t.value-t.minValue)/(t.maxValue-t.minValue)*t.ticksAngle:0;for(a&&(e.save(),e.rotate(-K.radians(s))),e.font=K.font(t,"Numbers",e.max/200),e.lineWidth=0,e.textAlign="center",e.textBaseline="middle";o<n;++o){var d=s+N(t,t.exactTicks?t.majorTicks[o]:o,n),u=e.measureText(t.majorTicks[o]).width,c=t.fontNumbersSize,h=Math.sqrt(u*u+c*c)/2,f=K.radialPoint(i-h-t.numbersMargin/100*e.max,K.radians(d));360===d&&(d=0),r[d]||(r[d]=!0,e.fillStyle=l[o],e.fillText(t.majorTicks[o],f.x,f.y))}a&&e.restore()}function M(e,t){t.title&&(e.save(),e.font=K.font(t,"Title",e.max/200),e.fillStyle=t.colorTitle,e.textAlign="center",e.fillText(t.title,0,-e.max/4.25,.8*e.max),e.restore())}function B(e,t){t.units&&(e.save(),e.font=K.font(t,"Units",e.max/200),e.fillStyle=t.colorUnits,e.textAlign="center",e.fillText(t.units,0,e.max/3.25,.8*e.max),e.restore())}function W(e,t){if(t.needle){var i=t.ticksAngle<360?K.normalizedValue(t).indented:t.value,r=T(e,t),o=J(r/100*t.needleCircleSize),n=J(r/100*t.needleCircleSize*.75),a=J(r/100*t.needleEnd),l=J(t.needleStart?r/100*t.needleStart:0),s=r/100*t.needleWidth,d=r/100*t.needleWidth/2,u=X.pixelRatio,c="needle"!==t.animationTarget;e.save(),K.drawNeedleShadow(e,t),e.rotate(K.radians(c?t.startAngle:t.startAngle+(i-t.minValue)/(t.maxValue-t.minValue)*t.ticksAngle)),e.fillStyle=K.linearGradient(e,t.colorNeedle,t.colorNeedleEnd,a-l),"arrow"===t.needleType?(e.beginPath(),e.moveTo(-d,-l),e.lineTo(-s,0),e.lineTo(-1*u,a),e.lineTo(u,a),e.lineTo(s,0),e.lineTo(d,-l),e.closePath(),e.fill(),e.beginPath(),e.lineTo(-.5*u,a),e.lineTo(-1*u,a),e.lineTo(-s,0),e.lineTo(-d,-l),e.lineTo(d/2*u-2*u,-l),e.closePath(),e.fillStyle=t.colorNeedleShadowUp,e.fill()):(e.beginPath(),e.moveTo(-d,a),e.lineTo(-d,l),e.lineTo(d,l),e.lineTo(d,a),e.closePath(),e.fill()),t.needleCircleSize&&(e.restore(),K.drawNeedleShadow(e,t),t.needleCircleOuter&&(e.beginPath(),e.arc(0,0,o,0,2*Q,!0),e.fillStyle=K.linearGradient(e,t.colorNeedleCircleOuter,t.colorNeedleCircleOuterEnd,o),e.fill(),e.closePath()),t.needleCircleInner&&(e.beginPath(),e.arc(0,0,n,0,2*Q,!0),e.fillStyle=K.linearGradient(e,t.colorNeedleCircleInner,t.colorNeedleCircleInnerEnd,n),e.fill(),e.closePath()),e.restore())}}function _(e,t,i){K.drawValueBox(e,t,i,0,e.max-.33*e.max,e.max)}function E(e,t){var i=e.max/100,r=T(e,t)-5*i,o=parseFloat(t.barStrokeWidth)||0,n=(parseFloat(t.barWidth)||0)*i,a=r-2*o-n,l=(r-a)/2,s=a+l,d=o/s,u=t.startAngle,c=t.startAngle+t.ticksAngle;e.save(),e.rotate(ee),o&&(e.beginPath(),e.arc(0,0,s,K.radians(u)-d,K.radians(c)+d,!1),e.strokeStyle=t.colorBarStroke,e.lineWidth=2*l,e.stroke(),e.closePath()),n&&(e.beginPath(),e.arc(0,0,s,K.radians(u),K.radians(c),!1),e.strokeStyle=t.colorBar,e.lineWidth=n,e.stroke(),e.closePath(),t.barShadow&&(e.beginPath(),e.arc(0,0,r,K.radians(u),K.radians(c),!1),e.clip(),e.beginPath(),e.strokeStyle=t.colorBar,e.lineWidth=1,e.shadowBlur=t.barShadow,e.shadowColor=t.colorBarShadow,e.shadowOffsetX=0,e.shadowOffsetY=0,e.arc(0,0,r,K.radians(t.startAngle),K.radians(t.startAngle+t.ticksAngle),!1),e.stroke(),e.closePath(),e.restore(),e.rotate(ee)),t.barProgress&&(e.beginPath(),e.arc(0,0,s,K.radians(u),K.radians(u+(K.normalizedValue(t).normal-t.minValue)/(t.maxValue-t.minValue)*t.ticksAngle),!1),e.strokeStyle=t.colorBarProgress,e.lineWidth=n,e.stroke(),e.closePath())),e.restore()}function R(e){return e.options.animatedValue?e.options.value:e.value}var I=function e(t,i,r){null===t&&(t=Function.prototype);var o=Object.getOwnPropertyDescriptor(t,i);if(void 0===o){var n=Object.getPrototypeOf(t);return null===n?void 0:e(n,i,r)}if("value"in o)return o.value;var a=o.get;if(void 0!==a)return a.call(r)},D=function e(t,i,r,o){var n=Object.getOwnPropertyDescriptor(t,i);if(void 0===n){var a=Object.getPrototypeOf(t);null!==a&&e(a,i,r,o)}else if("value"in n&&n.writable)n.value=r;else{var l=n.set;void 0!==l&&l.call(o,r)}return r},z=function(){function e(e,t){for(var i=0;i<t.length;i++){var r=t[i];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(e,r.key,r)}}return function(t,i,r){return i&&e(t.prototype,i),r&&e(t,r),t}}();Object.assign||Object.defineProperty(Object,"assign",{enumerable:!1,configurable:!0,writable:!0,value:function(e,t){if(void 0===e||null===e)throw new TypeError("Cannot convert first argument to object");for(var i=Object(e),r=1;r<arguments.length;r++){var o=arguments[r];if(void 0!==o&&null!==o)for(var n=Object.keys(Object(o)),a=0,l=n.length;a<l;a++){var s=n[a],d=Object.getOwnPropertyDescriptor(o,s);void 0!==d&&d.enumerable&&(i[s]=o[s])}}return i}}),Array.prototype.indexOf||(Array.prototype.indexOf=function(e,t){var i;if(null===this)throw new TypeError('"this" is null or not defined');var r=Object(this),o=r.length>>>0;if(0===o)return-1;var n=+t||0;if(Math.abs(n)===1/0&&(n=0),n>=o)return-1;for(i=Math.max(n>=0?n:o-Math.abs(n),0);i<o;){if(i in r&&r[i]===e)return i;i++}return-1}),Array.prototype.fill||(Array.prototype.fill=function(e){if(null===this)throw new TypeError("this is null or not defined");for(var t=Object(this),i=t.length>>>0,r=arguments[1],o=r>>0,n=o<0?Math.max(i+o,0):Math.min(o,i),a=arguments[2],l=void 0===a?i:a>>0,s=l<0?Math.max(i+l,0):Math.min(l,i);n<s;)t[n]=e,n++;return t}),"undefined"==typeof window&&(window="undefined"==typeof global?{}:global);var F=function(){function e(){r(this,e),this._events={},this.addListener=this.on,this.removeListener=this.off}return z(e,[{key:"emit",value:function(e){if(this._events[e]){for(var t=0,i=this._events[e].length,r=arguments.length,o=Array(r>1?r-1:0),n=1;n<r;n++)o[n-1]=arguments[n];for(;t<i;t++)this._events[e][t]&&this._events[e][t].apply(this,o)}}},{key:"once",value:function(e){for(var t=arguments.length,i=Array(t>1?t-1:0),r=1;r<t;r++)i[r-1]=arguments[r];for(var o=0,n=i.length,a=this;o<n;o++)!function(){var t=i[o],r=function i(){a.off(e,i),t.apply(a,arguments)};i[o]=r}();this.on.apply(this,[e].concat(i))}},{key:"on",value:function(e){this._events[e]||(this._events[e]=[]);for(var t=0,i=arguments.length<=1?0:arguments.length-1;t<i;t++)this._events[e].push(arguments.length<=t+1?void 0:arguments[t+1])}},{key:"off",value:function(e){if(this._events[e])for(var t=0,i=arguments.length<=1?0:arguments.length-1;t<i;t++)for(var r=arguments.length<=t+1?void 0:arguments[t+1],o=void 0;~(o=this._events[e].indexOf(r));)this._events[e].splice(o,1)}},{key:"removeAllListeners",value:function(e){delete this._events[e]}},{key:"listeners",get:function(){return this._events}}]),e}(),G=o("requestAnimationFrame")||function(e){return setTimeout(function(){return e((new Date).getTime())},1e3/60)},L={linear:function(e){return e},quad:function(e){return Math.pow(e,2)},dequad:function(e){return 1-L.quad(1-e)},quint:function(e){return Math.pow(e,5)},dequint:function(e){return 1-Math.pow(1-e,5)},cycle:function(e){return 1-Math.sin(Math.acos(e))},decycle:function(e){return Math.sin(Math.acos(1-e))},bounce:function(e){return 1-L.debounce(1-e)},debounce:function(e){for(var t=0,i=1;1;t+=i,i/=2)if(e>=(7-4*t)/11)return-Math.pow((11-6*t-11*e)/4,2)+Math.pow(i,2)},elastic:function(e){return 1-L.delastic(1-e)},delastic:function(e){return Math.pow(2,10*(e-1))*Math.cos(20*Math.PI*1.5/3*e)}},U=function(){function e(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"linear",i=arguments.length>1&&void 0!==arguments[1]?arguments[1]:250,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:function(){},n=arguments.length>3&&void 0!==arguments[3]?arguments[3]:function(){};if(r(this,e),this.duration=i,this.rule=t,this.draw=o,this.end=n,"function"!=typeof this.draw)throw new TypeError("Invalid animation draw callback:",o);if("function"!=typeof this.end)throw new TypeError("Invalid animation end callback:",n)}return z(e,[{key:"animate",value:function(e,t){var i=this;this.frame&&this.cancel();var r=window.performance&&window.performance.now?window.performance.now():o("animationStartTime")||Date.now();e=e||this.draw,t=t||this.end,this.draw=e,this.end=t,this.frame=G(function(o){return n(o,e,r,L[i.rule]||i.rule,i.duration,t,i)})}},{key:"cancel",value:function(){if(this.frame){(o("cancelAnimationFrame")||function(e){})(this.frame),this.frame=null}}},{key:"destroy",value:function(){this.cancel(),this.draw=null,this.end=null}}]),e}();U.rules=L;var q=function(){function t(i,o,n){r(this,t),this.options=i,this.element=o.toLowerCase(),this.type=t.toDashed(n),this.Type=e[n],this.mutationsObserved=!1,this.isObservable=!!window.MutationObserver,window.GAUGES_NO_AUTO_INIT||t.domReady(this.traverse.bind(this))}return z(t,[{key:"isValidNode",value:function(e){return!(!e.tagName||e.tagName.toLowerCase()!==this.element||e.getAttribute("data-type")!==this.type)}},{key:"traverse",value:function(){for(var e=document.getElementsByTagName(this.element),t=0,i=e.length;t<i;t++)this.process(e[t]);this.isObservable&&!this.mutationsObserved&&(new MutationObserver(this.observe.bind(this)).observe(document.body,{childList:!0,subtree:!0,attributes:!0,characterData:!0,attributeOldValue:!0,characterDataOldValue:!0}),this.mutationsObserved=!0)}},{key:"observe",value:function(e){for(var t=0,i=e.length;t<i;t++){var r=e[t];if("attributes"===r.type&&"data-type"===r.attributeName&&this.isValidNode(r.target)&&r.oldValue!==this.type)setTimeout(this.process.bind(this,r.target));else if(r.addedNodes&&r.addedNodes.length)for(var o=0,n=r.addedNodes.length;o<n;o++)setTimeout(this.process.bind(this,r.addedNodes[o]))}}},{key:"process",value:function(e){var i=this;if(!this.isValidNode(e))return null;var r=void 0,o=JSON.parse(JSON.stringify(this.options)),n=null;for(r in o)if(o.hasOwnProperty(r)){var a=t.toAttributeName(r),l=t.parse(e.getAttribute(a));null!==l&&void 0!==l&&(o[r]=l)}return o.renderTo=e,n=new this.Type(o),n.draw&&n.draw(),this.isObservable?(n.observer=new MutationObserver(function(r){r.forEach(function(r){if("attributes"===r.type){var o=r.attributeName.toLowerCase(),a=e.getAttribute(o).toLowerCase();if("data-type"===o&&a&&a!==i.type)n.observer.disconnect(),delete n.observer,n.destroy&&n.destroy();else if("data-"===o.substr(0,5)){var l=o.substr(5).split("-").map(function(e,t){return t?e.charAt(0).toUpperCase()+e.substr(1):e}).join(""),s={};s[l]=t.parse(e.getAttribute(r.attributeName)),"value"===l?n&&(n.value=s[l]):n.update&&n.update(s)}}})}),n.observer.observe(e,{attributes:!0}),n):n}}],[{key:"parse",value:function(e){if("true"===e)return!0;if("false"===e)return!1;if("undefined"!==e){if("null"===e)return null;if(/^[-+#.\w\d\s]+(?:,[-+#.\w\d\s]*)+$/.test(e))return e.split(",");try{return JSON.parse(e)}catch(e){}return e}}},{key:"toDashed",value:function(e){for(var t=e.split(/(?=[A-Z])/),i=1,r=t.length,o=t[0].toLowerCase();i<r;i++)o+="-"+t[i].toLowerCase();return o}},{key:"toCamelCase",value:function(e){for(var t=!(arguments.length>1&&void 0!==arguments[1])||arguments[1],i=e.split(/-/),r=0,o=i.length,n="";r<o;r++)n+=r||t?i[r][0].toUpperCase()+i[r].substr(1).toLowerCase():i[r].toLowerCase();return n}},{key:"toAttributeName",value:function(e){return"data-"+t.toDashed(e)}},{key:"domReady",value:function(e){if(/comp|inter|loaded/.test((window.document||{}).readyState+""))return e();window.addEventListener?window.addEventListener("DOMContentLoaded",e,!1):window.attachEvent&&window.attachEvent("onload",e)}}]),t}(),X=function(){function e(t,i,o){r(this,e),e.collection.push(this),this.width=i||0,this.height=o||0,this.element=t,this.init()}return z(e,[{key:"init",value:function(){var t=e.pixelRatio;this.element.width=this.width*t,this.element.height=this.height*t,this.element.style.width=this.width+"px",this.element.style.height=this.height+"px",this.elementClone=this.element.cloneNode(!0),this.context=this.element.getContext("2d"),this.contextClone=this.elementClone.getContext("2d"),this.drawWidth=this.element.width,this.drawHeight=this.element.height,this.drawX=this.drawWidth/2,this.drawY=this.drawHeight/2,this.minSide=this.drawX<this.drawY?this.drawX:this.drawY,this.elementClone.initialized=!1,this.contextClone.translate(this.drawX,this.drawY),this.contextClone.save(),this.context.translate(this.drawX,this.drawY),this.context.save(),this.context.max=this.contextClone.max=this.minSide,this.context.maxRadius=this.contextClone.maxRadius=null}},{key:"destroy",value:function(){var t=e.collection.indexOf(this);~t&&e.collection.splice(t,1),this.context.clearRect(-this.drawX,-this.drawY,this.drawWidth,this.drawHeight),this.context.max=null,delete this.context.max,this.context.maxRadius=null,delete this.context.maxRadius,this.context=null,this.contextClone=null,this.elementClone=null,this.element=null,this.onRedraw=null}},{key:"commit",value:function(){var t=e.pixelRatio;return 1!==t&&(this.contextClone.scale(t,t),this.contextClone.save()),this}},{key:"redraw",value:function(){return this.init(),this.onRedraw&&this.onRedraw(),this}}],[{key:"redraw",value:function(){for(var t=0,i=e.collection.length;t<i;t++)e.collection[t].redraw()}},{key:"pixelRatio",get:function(){return window.devicePixelRatio||1}}]),e}();X.collection=[],window.matchMedia&&window.matchMedia("screen and (min-resolution: 2dppx)").addListener(X.redraw);var Y={renderTo:null,width:0,height:0,minValue:0,maxValue:100,value:0,units:!1,exactTicks:!1,majorTicks:[0,20,40,60,80,100],minorTicks:10,strokeTicks:!0,animatedValue:!1,animateOnInit:!1,title:!1,borders:!0,numbersMargin:1,listeners:null,valueInt:3,valueDec:2,majorTicksInt:1,majorTicksDec:0,animation:!0,animationDuration:500,animationRule:"cycle",colorPlate:"#fff",colorPlateEnd:"",colorMajorTicks:"#444",colorMinorTicks:"#666",colorStrokeTicks:"",colorTitle:"#888",colorUnits:"#888",colorNumbers:"#444",colorNeedle:"rgba(240,128,128,1)",colorNeedleEnd:"rgba(255,160,122,.9)",colorValueText:"#444",colorValueTextShadow:"rgba(0,0,0,0.3)",colorBorderShadow:"rgba(0,0,0,0.5)",colorBorderOuter:"#ddd",colorBorderOuterEnd:"#aaa",colorBorderMiddle:"#eee",colorBorderMiddleEnd:"#f0f0f0",colorBorderInner:"#fafafa",colorBorderInnerEnd:"#ccc",colorValueBoxRect:"#888",colorValueBoxRectEnd:"#666",colorValueBoxBackground:"#babab2",colorValueBoxShadow:"rgba(0,0,0,1)",colorNeedleShadowUp:"rgba(2,255,255,0.2)",colorNeedleShadowDown:"rgba(188,143,143,0.45)",colorBarStroke:"#222",colorBar:"#ccc",colorBarProgress:"#888",colorBarShadow:"#000",fontNumbers:"Arial",fontTitle:"Arial",fontUnits:"Arial",fontValue:"Arial",fontNumbersSize:20,fontTitleSize:24,fontUnitsSize:22,fontValueSize:26,fontNumbersStyle:"normal",fontTitleStyle:"normal",fontUnitsStyle:"normal",fontValueStyle:"normal",fontNumbersWeight:"normal",fontTitleWeight:"normal",fontUnitsWeight:"normal",fontValueWeight:"normal",needle:!0,needleShadow:!0,needleType:"arrow",needleStart:5,needleEnd:85,needleWidth:4,borderOuterWidth:3,borderMiddleWidth:3,borderInnerWidth:3,borderShadowWidth:3,valueBox:!0,valueBoxStroke:5,valueBoxWidth:0,valueText:"",valueTextShadow:!0,valueBoxBorderRadius:2.5,highlights:[{from:20,to:60,color:"#eee"},{from:60,to:80,color:"#ccc"},{from:80,to:100,color:"#999"}],highlightsWidth:15,barWidth:20,barStrokeWidth:0,barProgress:!0,barShadow:0};a.prototype=Object.create(Array.prototype),a.prototype.constructor=a,a.prototype.get=function(e){if("string"==typeof e)for(var t=0,i=this.length;t<i;t++){var r=this[t].options.renderTo.tagName?this[t].options.renderTo:document.getElementById(this[t].options.renderTo||"");if(r.getAttribute("id")===e)return this[t]}else if("number"==typeof e)return this[e];return null};var H="2.1.4",J=(Math.round,Math.abs),$=new a;$.version=H;var Z=function(o){function n(i){r(this,n);var o=t(this,(n.__proto__||Object.getPrototypeOf(n)).call(this)),a=o.constructor.name;if("BaseGauge"===a)throw new TypeError("Attempt to instantiate abstract class!");if($.push(o),i.listeners&&Object.keys(i.listeners).forEach(function(e){(i.listeners[e]instanceof Array?i.listeners[e]:[i.listeners[e]]).forEach(function(t){o.on(e,t)})}),o.version=H,o.type=e[a]||n,o.initialized=!1,i.minValue=parseFloat(i.minValue),i.maxValue=parseFloat(i.maxValue),i.value=parseFloat(i.value)||0,i.borders||(i.borderInnerWidth=i.borderMiddleWidth=i.borderOuterWidth=0),!i.renderTo)throw TypeError("Canvas element was not specified when creating the Gauge object!");var l=i.renderTo.tagName?i.renderTo:document.getElementById(i.renderTo);if(!(l instanceof HTMLCanvasElement))throw TypeError("Given gauge canvas element is invalid!");return i.width=parseFloat(i.width)||0,i.height=parseFloat(i.height)||0,i.width&&i.height||(i.width||(i.width=l.parentNode?l.parentNode.offsetWidth:l.offsetWidth),i.height||(i.height=l.parentNode?l.parentNode.offsetHeight:l.offsetHeight)),o.options=i||{},o.options.animateOnInit&&(o._value=o.options.value,o.options.value=o.options.minValue),o.canvas=new X(l,i.width,i.height),o.canvas.onRedraw=o.draw.bind(o),o.animation=new U(i.animationRule,i.animationDuration),o}return i(n,o),z(n,[{key:"update",value:function(e){return Object.assign(this.options,this.type.configure(e||{})),this.canvas.width=this.options.width,this.canvas.height=this.options.height,this.animation.rule=this.options.animationRule,this.animation.duration=this.options.animationDuration,this.canvas.redraw(),this}},{key:"destroy",value:function(){var e=$.indexOf(this);~e&&$.splice(e,1),this.canvas.destroy(),this.canvas=null,this.animation.destroy(),this.animation=null,this.emit("destroy")}},{key:"draw",value:function(){return this.options.animateOnInit&&!this.initialized&&(this.value=this._value,this.initialized=!0,this.emit("init")),this.emit("render"),this}},{key:"value",set:function(e){var t=this;e=n.ensureValue(e,this.options.minValue);var i=this.options.value;if(e!==i)if(this.options.animation){if(this.animation.frame&&(this.options.value=this._value,this._value===e))return this.animation.cancel(),void delete this._value;void 0===this._value&&(this._value=e),this.emit("animationStart"),this.animation.animate(function(r){var o=i+(e-i)*r;t.options.animatedValue&&t.emit("value",o,t.value),t.options.value=o,t.draw(),t.emit("animate",r,t.options.value)},function(){void 0!==t._value&&(t.emit("value",t._value,t.value),t.options.value=t._value,delete t._value),t.draw(),t.emit("animationEnd")})}else this.emit("value",e,this.value),this.options.value=e,this.draw()},get:function(){return void 0===this._value?this.options.value:this._value}}],[{key:"configure",value:function(e){return e}},{key:"initialize",value:function(e,t){return new q(t,"canvas",e)}},{key:"fromElement",value:function(e){var t=q.toCamelCase(e.getAttribute("data-type")),i=e.attributes,r=0,o=i.length,n={};if(t){for(/Gauge$/.test(t)||(t+="Gauge");r<o;r++)n[q.toCamelCase(i[r].name.replace(/^data-/,""),!1)]=q.parse(i[r].value);new q(n,e.tagName,t).process(e)}}},{key:"ensureValue",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0;return e=parseFloat(e),!isNaN(e)&&isFinite(e)||(e=parseFloat(t)||0),e}},{key:"version",get:function(){return H}}]),n}(F);void 0!==e&&(e.BaseGauge=Z,e.gauges=(window.document||{}).gauges=$);var K={roundRect:d,padValue:u,formatMajorTickNumber:c,radians:h,radialPoint:f,linearGradient:v,drawNeedleShadow:p,drawValueBox:y,verifyError:l,prepareTicks:s,drawShadow:m,font:g,normalizedValue:x},Q=Math.PI,ee=Q/2,te=Object.assign({},Y,{ticksAngle:270,startAngle:45,colorNeedleCircleOuter:"#f0f0f0",colorNeedleCircleOuterEnd:"#ccc",colorNeedleCircleInner:"#e8e8e8",colorNeedleCircleInnerEnd:"#f5f5f5",needleCircleSize:10,needleCircleInner:!0,needleCircleOuter:!0,needleStart:20,animationTarget:"needle",useMinPath:!1,barWidth:0}),ie=function(e){function o(e){return r(this,o),e=Object.assign({},te,e||{}),t(this,(o.__proto__||Object.getPrototypeOf(o)).call(this,o.configure(e)))}return i(o,e),z(o,[{key:"draw",value:function(){try{var e=this.canvas,t=[-e.drawX,-e.drawY,e.drawWidth,e.drawHeight],i=t[0],r=t[1],n=t[2],a=t[3],l=this.options;if("needle"===l.animationTarget){if(!e.elementClone.initialized){var s=e.contextClone;s.clearRect(i,r,n,a),s.save(),this.emit("beforePlate"),S(s,l),this.emit("beforeHighlights"),O(s,l),this.emit("beforeMinorTicks"),A(s,l),this.emit("beforeMajorTicks"),V(s,l),this.emit("beforeNumbers"),j(s,l),this.emit("beforeTitle"),M(s,l),this.emit("beforeUnits"),B(s,l),e.elementClone.initialized=!0}this.canvas.commit(),e.context.clearRect(i,r,n,a),e.context.save(),e.context.drawImage(e.elementClone,i,r,n,a),e.context.save(),this.emit("beforeProgressBar"),E(e.context,l),this.emit("beforeValueBox"),_(e.context,l,R(this)),this.emit("beforeNeedle"),W(e.context,l)}else{var d=-K.radians((l.value-l.minValue)/(l.maxValue-l.minValue)*l.ticksAngle);if(e.context.clearRect(i,r,n,a),e.context.save(),this.emit("beforePlate"),S(e.context,l),e.context.rotate(d),this.emit("beforeHighlights"),O(e.context,l),this.emit("beforeMinorTicks"),A(e.context,l),this.emit("beforeMajorTicks"),V(e.context,l),this.emit("beforeNumbers"),j(e.context,l),this.emit("beforeProgressBar"),E(e.context,l),e.context.rotate(-d),e.context.save(),!e.elementClone.initialized){var u=e.contextClone;u.clearRect(i,r,n,a),u.save(),this.emit("beforeTitle"),M(u,l),this.emit("beforeUnits"),B(u,l),this.emit("beforeNeedle"),W(u,l),e.elementClone.initialized=!0}e.context.drawImage(e.elementClone,i,r,n,a)}this.emit("beforeValueBox"),_(e.context,l,R(this)),I(o.prototype.__proto__||Object.getPrototypeOf(o.prototype),"draw",this).call(this)}catch(e){K.verifyError(e)}return this}},{key:"value",set:function(e){e=Z.ensureValue(e,this.options.minValue),this.options.animation&&360===this.options.ticksAngle&&this.options.useMinPath&&(this._value=e,e=this.options.value+((e-this.options.value)%360+540)%360-180),D(o.prototype.__proto__||Object.getPrototypeOf(o.prototype),"value",e,this)},get:function(){return I(o.prototype.__proto__||Object.getPrototypeOf(o.prototype),"value",this)}}],[{key:"configure",value:function(e){return e.barWidth>50&&(e.barWidth=50),isNaN(e.startAngle)&&(e.startAngle=45),isNaN(e.ticksAngle)&&(e.ticksAngle=270),e.ticksAngle>360&&(e.ticksAngle=360),e.ticksAngle<0&&(e.ticksAngle=0),e.startAngle<0&&(e.startAngle=0),e.startAngle>360&&(e.startAngle=360),e}}]),o}(Z);void 0!==e&&(e.RadialGauge=ie),Z.initialize("RadialGauge",te),"undefined"!=typeof module&&Object.assign(e,{Collection:a,GenericOptions:Y,Animation:U,BaseGauge:Z,drawings:K,SmartCanvas:X,DomObserver:q,vendorize:o})}("undefined"!=typeof module?module.exports:window);
|
#include <stdio.h>
#include <stdlib.h>
#define ll long long
#define f(i,a,b) for(i=a;i<b;i++)
#define fd(i,b,a) for(i=b;i>=a;i--)
#define nl '\n'
int min(int a, int b)
{
return (a<b?a:b);
}
int max(int a, int b) {
return (a>b?a:b);
}
int findMax(int A[], int n, int m) {
int M[16][16] = {0};
int cum[16] = {0};
for (int i = 1; i <= n; i++)
cum[i] = cum[i-1] + A[i-1];
for (int i = 1; i <= n; i++)
M[i][1] = cum[i];
for (int i = 1; i <= m; i++)
M[1][i] = A[0];
for (int i = 2; i <= m; i++) {
for (int j = 2; j <= n; j++) {
int best = 999999;
for (int p = 1; p <= j; p++) {
best = min(best, max(M[p][i-1], cum[j]-cum[p]));
}
M[j][i] = best;
}
}
return M[n][m];
}
int main()
{
int i,n,m;
scanf("%d%d",&n,&m);
int A[n];
f(i,0,n) scanf("%d",&A[i]);
printf("%d\n",findMax(A,n,m));
return 0;
}
|
"""
Definition of all exceptions.
"""
class ArtifactoryException(Exception):
"""Generic artifactory exception."""
class UserAlreadyExistsException(ArtifactoryException):
"""User already exists."""
class GroupAlreadyExistsException(ArtifactoryException):
"""Group already exists."""
class RepositoryAlreadyExistsException(ArtifactoryException):
"""Repository already exists."""
class PermissionAlreadyExistsException(ArtifactoryException):
"""Permission already exists."""
class UserNotFoundException(ArtifactoryException):
"""The user was not found."""
class GroupNotFoundException(ArtifactoryException):
"""The group was not found."""
class RepositoryNotFoundException(ArtifactoryException):
"""The repository was not found."""
class PermissionNotFoundException(ArtifactoryException):
"""A permission object was not found."""
class ArtifactNotFoundException(ArtifactoryException):
"""An artifact was not found"""
class PropertyNotFoundException(ArtifactoryException):
"""All requested properties were not found"""
class InvalidTokenDataException(ArtifactoryException):
"""The token contains invalid data."""
|
/** vim: et:ts=4:sw=4:sts=4
* @license RequireJS 2.3.6 Copyright jQuery Foundation and other contributors.
* Released under MIT license, https://github.com/requirejs/requirejs/blob/master/LICENSE
*/
//Not using strict: uneven strict support in browsers, #392, and causes
//problems with requirejs.exec()/transpiler plugins that may not be strict.
/*jslint regexp: true, nomen: true, sloppy: true */
/*global window, navigator, document, importScripts, setTimeout, opera */
var requirejs, require, define;
(function (global, setTimeout) {
var req, s, head, baseElement, dataMain, src,
interactiveScript, currentlyAddingScript, mainScript, subPath,
version = '2.3.6',
commentRegExp = /\/\*[\s\S]*?\*\/|([^:"'=]|^)\/\/.*$/mg,
cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,
jsSuffixRegExp = /\.js$/,
currDirRegExp = /^\.\//,
op = Object.prototype,
ostring = op.toString,
hasOwn = op.hasOwnProperty,
isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document),
isWebWorker = !isBrowser && typeof importScripts !== 'undefined',
//PS3 indicates loaded and complete, but need to wait for complete
//specifically. Sequence is 'loading', 'loaded', execution,
// then 'complete'. The UA check is unfortunate, but not sure how
//to feature test w/o causing perf issues.
readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ?
/^complete$/ : /^(complete|loaded)$/,
defContextName = '_',
//Oh the tragedy, detecting opera. See the usage of isOpera for reason.
isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]',
contexts = {},
cfg = {},
globalDefQueue = [],
useInteractive = false;
//Could match something like ')//comment', do not lose the prefix to comment.
function commentReplace(match, singlePrefix) {
return singlePrefix || '';
}
function isFunction(it) {
return ostring.call(it) === '[object Function]';
}
function isArray(it) {
return ostring.call(it) === '[object Array]';
}
/**
* Helper function for iterating over an array. If the func returns
* a true value, it will break out of the loop.
*/
function each(ary, func) {
if (ary) {
var i;
for (i = 0; i < ary.length; i += 1) {
if (ary[i] && func(ary[i], i, ary)) {
break;
}
}
}
}
/**
* Helper function for iterating over an array backwards. If the func
* returns a true value, it will break out of the loop.
*/
function eachReverse(ary, func) {
if (ary) {
var i;
for (i = ary.length - 1; i > -1; i -= 1) {
if (ary[i] && func(ary[i], i, ary)) {
break;
}
}
}
}
function hasProp(obj, prop) {
return hasOwn.call(obj, prop);
}
function getOwn(obj, prop) {
return hasProp(obj, prop) && obj[prop];
}
/**
* Cycles over properties in an object and calls a function for each
* property value. If the function returns a truthy value, then the
* iteration is stopped.
*/
function eachProp(obj, func) {
var prop;
for (prop in obj) {
if (hasProp(obj, prop)) {
if (func(obj[prop], prop)) {
break;
}
}
}
}
/**
* Simple function to mix in properties from source into target,
* but only if target does not already have a property of the same name.
*/
function mixin(target, source, force, deepStringMixin) {
if (source) {
eachProp(source, function (value, prop) {
if (force || !hasProp(target, prop)) {
if (deepStringMixin && typeof value === 'object' && value &&
!isArray(value) && !isFunction(value) &&
!(value instanceof RegExp)) {
if (!target[prop]) {
target[prop] = {};
}
mixin(target[prop], value, force, deepStringMixin);
} else {
target[prop] = value;
}
}
});
}
return target;
}
//Similar to Function.prototype.bind, but the 'this' object is specified
//first, since it is easier to read/figure out what 'this' will be.
function bind(obj, fn) {
return function () {
return fn.apply(obj, arguments);
};
}
function scripts() {
return document.getElementsByTagName('script');
}
function defaultOnError(err) {
throw err;
}
//Allow getting a global that is expressed in
//dot notation, like 'a.b.c'.
function getGlobal(value) {
if (!value) {
return value;
}
var g = global;
each(value.split('.'), function (part) {
g = g[part];
});
return g;
}
/**
* Constructs an error with a pointer to an URL with more information.
* @param {String} id the error ID that maps to an ID on a web page.
* @param {String} message human readable error.
* @param {Error} [err] the original error, if there is one.
*
* @returns {Error}
*/
function makeError(id, msg, err, requireModules) {
var e = new Error(msg + '\nhttps://requirejs.org/docs/errors.html#' + id);
e.requireType = id;
e.requireModules = requireModules;
if (err) {
e.originalError = err;
}
return e;
}
if (typeof define !== 'undefined') {
//If a define is already in play via another AMD loader,
//do not overwrite.
return;
}
if (typeof requirejs !== 'undefined') {
if (isFunction(requirejs)) {
//Do not overwrite an existing requirejs instance.
return;
}
cfg = requirejs;
requirejs = undefined;
}
//Allow for a require config object
if (typeof require !== 'undefined' && !isFunction(require)) {
//assume it is a config object.
cfg = require;
require = undefined;
}
function newContext(contextName) {
var inCheckLoaded, Module, context, handlers,
checkLoadedTimeoutId,
config = {
//Defaults. Do not set a default for map
//config to speed up normalize(), which
//will run faster if there is no default.
waitSeconds: 7,
baseUrl: './',
paths: {},
bundles: {},
pkgs: {},
shim: {},
config: {}
},
registry = {},
//registry of just enabled modules, to speed
//cycle breaking code when lots of modules
//are registered, but not activated.
enabledRegistry = {},
undefEvents = {},
defQueue = [],
defined = {},
urlFetched = {},
bundlesMap = {},
requireCounter = 1,
unnormalizedCounter = 1;
/**
* Trims the . and .. from an array of path segments.
* It will keep a leading path segment if a .. will become
* the first path segment, to help with module name lookups,
* which act like paths, but can be remapped. But the end result,
* all paths that use this function should look normalized.
* NOTE: this method MODIFIES the input array.
* @param {Array} ary the array of path segments.
*/
function trimDots(ary) {
var i, part;
for (i = 0; i < ary.length; i++) {
part = ary[i];
if (part === '.') {
ary.splice(i, 1);
i -= 1;
} else if (part === '..') {
// If at the start, or previous value is still ..,
// keep them so that when converted to a path it may
// still work when converted to a path, even though
// as an ID it is less than ideal. In larger point
// releases, may be better to just kick out an error.
if (i === 0 || (i === 1 && ary[2] === '..') || ary[i - 1] === '..') {
continue;
} else if (i > 0) {
ary.splice(i - 1, 2);
i -= 2;
}
}
}
}
/**
* Given a relative module name, like ./something, normalize it to
* a real name that can be mapped to a path.
* @param {String} name the relative name
* @param {String} baseName a real name that the name arg is relative
* to.
* @param {Boolean} applyMap apply the map config to the value. Should
* only be done if this normalization is for a dependency ID.
* @returns {String} normalized name
*/
function normalize(name, baseName, applyMap) {
var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex,
foundMap, foundI, foundStarMap, starI, normalizedBaseParts,
baseParts = (baseName && baseName.split('/')),
map = config.map,
starMap = map && map['*'];
//Adjust any relative paths.
if (name) {
name = name.split('/');
lastIndex = name.length - 1;
// If wanting node ID compatibility, strip .js from end
// of IDs. Have to do this here, and not in nameToUrl
// because node allows either .js or non .js to map
// to same file.
if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) {
name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, '');
}
// Starts with a '.' so need the baseName
if (name[0].charAt(0) === '.' && baseParts) {
//Convert baseName to array, and lop off the last part,
//so that . matches that 'directory' and not name of the baseName's
//module. For instance, baseName of 'one/two/three', maps to
//'one/two/three.js', but we want the directory, 'one/two' for
//this normalization.
normalizedBaseParts = baseParts.slice(0, baseParts.length - 1);
name = normalizedBaseParts.concat(name);
}
trimDots(name);
name = name.join('/');
}
//Apply map config if available.
if (applyMap && map && (baseParts || starMap)) {
nameParts = name.split('/');
outerLoop: for (i = nameParts.length; i > 0; i -= 1) {
nameSegment = nameParts.slice(0, i).join('/');
if (baseParts) {
//Find the longest baseName segment match in the config.
//So, do joins on the biggest to smallest lengths of baseParts.
for (j = baseParts.length; j > 0; j -= 1) {
mapValue = getOwn(map, baseParts.slice(0, j).join('/'));
//baseName segment has config, find if it has one for
//this name.
if (mapValue) {
mapValue = getOwn(mapValue, nameSegment);
if (mapValue) {
//Match, update name to the new value.
foundMap = mapValue;
foundI = i;
break outerLoop;
}
}
}
}
//Check for a star map match, but just hold on to it,
//if there is a shorter segment match later in a matching
//config, then favor over this star map.
if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) {
foundStarMap = getOwn(starMap, nameSegment);
starI = i;
}
}
if (!foundMap && foundStarMap) {
foundMap = foundStarMap;
foundI = starI;
}
if (foundMap) {
nameParts.splice(0, foundI, foundMap);
name = nameParts.join('/');
}
}
// If the name points to a package's name, use
// the package main instead.
pkgMain = getOwn(config.pkgs, name);
return pkgMain ? pkgMain : name;
}
function removeScript(name) {
if (isBrowser) {
each(scripts(), function (scriptNode) {
if (scriptNode.getAttribute('data-requiremodule') === name &&
scriptNode.getAttribute('data-requirecontext') === context.contextName) {
scriptNode.parentNode.removeChild(scriptNode);
return true;
}
});
}
}
function hasPathFallback(id) {
var pathConfig = getOwn(config.paths, id);
if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) {
//Pop off the first array value, since it failed, and
//retry
pathConfig.shift();
context.require.undef(id);
//Custom require that does not do map translation, since
//ID is "absolute", already mapped/resolved.
context.makeRequire(null, {
skipMap: true
})([id]);
return true;
}
}
//Turns a plugin!resource to [plugin, resource]
//with the plugin being undefined if the name
//did not have a plugin prefix.
function splitPrefix(name) {
var prefix,
index = name ? name.indexOf('!') : -1;
if (index > -1) {
prefix = name.substring(0, index);
name = name.substring(index + 1, name.length);
}
return [prefix, name];
}
/**
* Creates a module mapping that includes plugin prefix, module
* name, and path. If parentModuleMap is provided it will
* also normalize the name via require.normalize()
*
* @param {String} name the module name
* @param {String} [parentModuleMap] parent module map
* for the module name, used to resolve relative names.
* @param {Boolean} isNormalized: is the ID already normalized.
* This is true if this call is done for a define() module ID.
* @param {Boolean} applyMap: apply the map config to the ID.
* Should only be true if this map is for a dependency.
*
* @returns {Object}
*/
function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) {
var url, pluginModule, suffix, nameParts,
prefix = null,
parentName = parentModuleMap ? parentModuleMap.name : null,
originalName = name,
isDefine = true,
normalizedName = '';
//If no name, then it means it is a require call, generate an
//internal name.
if (!name) {
isDefine = false;
name = '_@r' + (requireCounter += 1);
}
nameParts = splitPrefix(name);
prefix = nameParts[0];
name = nameParts[1];
if (prefix) {
prefix = normalize(prefix, parentName, applyMap);
pluginModule = getOwn(defined, prefix);
}
//Account for relative paths if there is a base name.
if (name) {
if (prefix) {
if (isNormalized) {
normalizedName = name;
} else if (pluginModule && pluginModule.normalize) {
//Plugin is loaded, use its normalize method.
normalizedName = pluginModule.normalize(name, function (name) {
return normalize(name, parentName, applyMap);
});
} else {
// If nested plugin references, then do not try to
// normalize, as it will not normalize correctly. This
// places a restriction on resourceIds, and the longer
// term solution is not to normalize until plugins are
// loaded and all normalizations to allow for async
// loading of a loader plugin. But for now, fixes the
// common uses. Details in #1131
normalizedName = name.indexOf('!') === -1 ?
normalize(name, parentName, applyMap) :
name;
}
} else {
//A regular module.
normalizedName = normalize(name, parentName, applyMap);
//Normalized name may be a plugin ID due to map config
//application in normalize. The map config values must
//already be normalized, so do not need to redo that part.
nameParts = splitPrefix(normalizedName);
prefix = nameParts[0];
normalizedName = nameParts[1];
isNormalized = true;
url = context.nameToUrl(normalizedName);
}
}
//If the id is a plugin id that cannot be determined if it needs
//normalization, stamp it with a unique ID so two matching relative
//ids that may conflict can be separate.
suffix = prefix && !pluginModule && !isNormalized ?
'_unnormalized' + (unnormalizedCounter += 1) :
'';
return {
prefix: prefix,
name: normalizedName,
parentMap: parentModuleMap,
unnormalized: !!suffix,
url: url,
originalName: originalName,
isDefine: isDefine,
id: (prefix ?
prefix + '!' + normalizedName :
normalizedName) + suffix
};
}
function getModule(depMap) {
var id = depMap.id,
mod = getOwn(registry, id);
if (!mod) {
mod = registry[id] = new context.Module(depMap);
}
return mod;
}
function on(depMap, name, fn) {
var id = depMap.id,
mod = getOwn(registry, id);
if (hasProp(defined, id) &&
(!mod || mod.defineEmitComplete)) {
if (name === 'defined') {
fn(defined[id]);
}
} else {
mod = getModule(depMap);
if (mod.error && name === 'error') {
fn(mod.error);
} else {
mod.on(name, fn);
}
}
}
function onError(err, errback) {
var ids = err.requireModules,
notified = false;
if (errback) {
errback(err);
} else {
each(ids, function (id) {
var mod = getOwn(registry, id);
if (mod) {
//Set error on module, so it skips timeout checks.
mod.error = err;
if (mod.events.error) {
notified = true;
mod.emit('error', err);
}
}
});
if (!notified) {
req.onError(err);
}
}
}
/**
* Internal method to transfer globalQueue items to this context's
* defQueue.
*/
function takeGlobalQueue() {
//Push all the globalDefQueue items into the context's defQueue
if (globalDefQueue.length) {
each(globalDefQueue, function(queueItem) {
var id = queueItem[0];
if (typeof id === 'string') {
context.defQueueMap[id] = true;
}
defQueue.push(queueItem);
});
globalDefQueue = [];
}
}
handlers = {
'require': function (mod) {
if (mod.require) {
return mod.require;
} else {
return (mod.require = context.makeRequire(mod.map));
}
},
'exports': function (mod) {
mod.usingExports = true;
if (mod.map.isDefine) {
if (mod.exports) {
return (defined[mod.map.id] = mod.exports);
} else {
return (mod.exports = defined[mod.map.id] = {});
}
}
},
'module': function (mod) {
if (mod.module) {
return mod.module;
} else {
return (mod.module = {
id: mod.map.id,
uri: mod.map.url,
config: function () {
return getOwn(config.config, mod.map.id) || {};
},
exports: mod.exports || (mod.exports = {})
});
}
}
};
function cleanRegistry(id) {
//Clean up machinery used for waiting modules.
delete registry[id];
delete enabledRegistry[id];
}
function breakCycle(mod, traced, processed) {
var id = mod.map.id;
if (mod.error) {
mod.emit('error', mod.error);
} else {
traced[id] = true;
each(mod.depMaps, function (depMap, i) {
var depId = depMap.id,
dep = getOwn(registry, depId);
//Only force things that have not completed
//being defined, so still in the registry,
//and only if it has not been matched up
//in the module already.
if (dep && !mod.depMatched[i] && !processed[depId]) {
if (getOwn(traced, depId)) {
mod.defineDep(i, defined[depId]);
mod.check(); //pass false?
} else {
breakCycle(dep, traced, processed);
}
}
});
processed[id] = true;
}
}
function checkLoaded() {
var err, usingPathFallback,
waitInterval = config.waitSeconds * 1000,
//It is possible to disable the wait interval by using waitSeconds of 0.
expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(),
noLoads = [],
reqCalls = [],
stillLoading = false,
needCycleCheck = true;
//Do not bother if this call was a result of a cycle break.
if (inCheckLoaded) {
return;
}
inCheckLoaded = true;
//Figure out the state of all the modules.
eachProp(enabledRegistry, function (mod) {
var map = mod.map,
modId = map.id;
//Skip things that are not enabled or in error state.
if (!mod.enabled) {
return;
}
if (!map.isDefine) {
reqCalls.push(mod);
}
if (!mod.error) {
//If the module should be executed, and it has not
//been inited and time is up, remember it.
if (!mod.inited && expired) {
if (hasPathFallback(modId)) {
usingPathFallback = true;
stillLoading = true;
} else {
noLoads.push(modId);
removeScript(modId);
}
} else if (!mod.inited && mod.fetched && map.isDefine) {
stillLoading = true;
if (!map.prefix) {
//No reason to keep looking for unfinished
//loading. If the only stillLoading is a
//plugin resource though, keep going,
//because it may be that a plugin resource
//is waiting on a non-plugin cycle.
return (needCycleCheck = false);
}
}
}
});
if (expired && noLoads.length) {
//If wait time expired, throw error of unloaded modules.
err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads);
err.contextName = context.contextName;
return onError(err);
}
//Not expired, check for a cycle.
if (needCycleCheck) {
each(reqCalls, function (mod) {
breakCycle(mod, {}, {});
});
}
//If still waiting on loads, and the waiting load is something
//other than a plugin resource, or there are still outstanding
//scripts, then just try back later.
if ((!expired || usingPathFallback) && stillLoading) {
//Something is still waiting to load. Wait for it, but only
//if a timeout is not already in effect.
if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) {
checkLoadedTimeoutId = setTimeout(function () {
checkLoadedTimeoutId = 0;
checkLoaded();
}, 50);
}
}
inCheckLoaded = false;
}
Module = function (map) {
this.events = getOwn(undefEvents, map.id) || {};
this.map = map;
this.shim = getOwn(config.shim, map.id);
this.depExports = [];
this.depMaps = [];
this.depMatched = [];
this.pluginMaps = {};
this.depCount = 0;
/* this.exports this.factory
this.depMaps = [],
this.enabled, this.fetched
*/
};
Module.prototype = {
init: function (depMaps, factory, errback, options) {
options = options || {};
//Do not do more inits if already done. Can happen if there
//are multiple define calls for the same module. That is not
//a normal, common case, but it is also not unexpected.
if (this.inited) {
return;
}
this.factory = factory;
if (errback) {
//Register for errors on this module.
this.on('error', errback);
} else if (this.events.error) {
//If no errback already, but there are error listeners
//on this module, set up an errback to pass to the deps.
errback = bind(this, function (err) {
this.emit('error', err);
});
}
//Do a copy of the dependency array, so that
//source inputs are not modified. For example
//"shim" deps are passed in here directly, and
//doing a direct modification of the depMaps array
//would affect that config.
this.depMaps = depMaps && depMaps.slice(0);
this.errback = errback;
//Indicate this module has be initialized
this.inited = true;
this.ignore = options.ignore;
//Could have option to init this module in enabled mode,
//or could have been previously marked as enabled. However,
//the dependencies are not known until init is called. So
//if enabled previously, now trigger dependencies as enabled.
if (options.enabled || this.enabled) {
//Enable this module and dependencies.
//Will call this.check()
this.enable();
} else {
this.check();
}
},
defineDep: function (i, depExports) {
//Because of cycles, defined callback for a given
//export can be called more than once.
if (!this.depMatched[i]) {
this.depMatched[i] = true;
this.depCount -= 1;
this.depExports[i] = depExports;
}
},
fetch: function () {
if (this.fetched) {
return;
}
this.fetched = true;
context.startTime = (new Date()).getTime();
var map = this.map;
//If the manager is for a plugin managed resource,
//ask the plugin to load it now.
if (this.shim) {
context.makeRequire(this.map, {
enableBuildCallback: true
})(this.shim.deps || [], bind(this, function () {
return map.prefix ? this.callPlugin() : this.load();
}));
} else {
//Regular dependency.
return map.prefix ? this.callPlugin() : this.load();
}
},
load: function () {
var url = this.map.url;
//Regular dependency.
if (!urlFetched[url]) {
urlFetched[url] = true;
context.load(this.map.id, url);
}
},
/**
* Checks if the module is ready to define itself, and if so,
* define it.
*/
check: function () {
if (!this.enabled || this.enabling) {
return;
}
var err, cjsModule,
id = this.map.id,
depExports = this.depExports,
exports = this.exports,
factory = this.factory;
if (!this.inited) {
// Only fetch if not already in the defQueue.
if (!hasProp(context.defQueueMap, id)) {
this.fetch();
}
} else if (this.error) {
this.emit('error', this.error);
} else if (!this.defining) {
//The factory could trigger another require call
//that would result in checking this module to
//define itself again. If already in the process
//of doing that, skip this work.
this.defining = true;
if (this.depCount < 1 && !this.defined) {
if (isFunction(factory)) {
//If there is an error listener, favor passing
//to that instead of throwing an error. However,
//only do it for define()'d modules. require
//errbacks should not be called for failures in
//their callbacks (#699). However if a global
//onError is set, use that.
if ((this.events.error && this.map.isDefine) ||
req.onError !== defaultOnError) {
try {
exports = context.execCb(id, factory, depExports, exports);
} catch (e) {
err = e;
}
} else {
exports = context.execCb(id, factory, depExports, exports);
}
// Favor return value over exports. If node/cjs in play,
// then will not have a return value anyway. Favor
// module.exports assignment over exports object.
if (this.map.isDefine && exports === undefined) {
cjsModule = this.module;
if (cjsModule) {
exports = cjsModule.exports;
} else if (this.usingExports) {
//exports already set the defined value.
exports = this.exports;
}
}
if (err) {
err.requireMap = this.map;
err.requireModules = this.map.isDefine ? [this.map.id] : null;
err.requireType = this.map.isDefine ? 'define' : 'require';
return onError((this.error = err));
}
} else {
//Just a literal value
exports = factory;
}
this.exports = exports;
if (this.map.isDefine && !this.ignore) {
defined[id] = exports;
if (req.onResourceLoad) {
var resLoadMaps = [];
each(this.depMaps, function (depMap) {
resLoadMaps.push(depMap.normalizedMap || depMap);
});
req.onResourceLoad(context, this.map, resLoadMaps);
}
}
//Clean up
cleanRegistry(id);
this.defined = true;
}
//Finished the define stage. Allow calling check again
//to allow define notifications below in the case of a
//cycle.
this.defining = false;
if (this.defined && !this.defineEmitted) {
this.defineEmitted = true;
this.emit('defined', this.exports);
this.defineEmitComplete = true;
}
}
},
callPlugin: function () {
var map = this.map,
id = map.id,
//Map already normalized the prefix.
pluginMap = makeModuleMap(map.prefix);
//Mark this as a dependency for this plugin, so it
//can be traced for cycles.
this.depMaps.push(pluginMap);
on(pluginMap, 'defined', bind(this, function (plugin) {
var load, normalizedMap, normalizedMod,
bundleId = getOwn(bundlesMap, this.map.id),
name = this.map.name,
parentName = this.map.parentMap ? this.map.parentMap.name : null,
localRequire = context.makeRequire(map.parentMap, {
enableBuildCallback: true
});
//If current map is not normalized, wait for that
//normalized name to load instead of continuing.
if (this.map.unnormalized) {
//Normalize the ID if the plugin allows it.
if (plugin.normalize) {
name = plugin.normalize(name, function (name) {
return normalize(name, parentName, true);
}) || '';
}
//prefix and name should already be normalized, no need
//for applying map config again either.
normalizedMap = makeModuleMap(map.prefix + '!' + name,
this.map.parentMap,
true);
on(normalizedMap,
'defined', bind(this, function (value) {
this.map.normalizedMap = normalizedMap;
this.init([], function () { return value; }, null, {
enabled: true,
ignore: true
});
}));
normalizedMod = getOwn(registry, normalizedMap.id);
if (normalizedMod) {
//Mark this as a dependency for this plugin, so it
//can be traced for cycles.
this.depMaps.push(normalizedMap);
if (this.events.error) {
normalizedMod.on('error', bind(this, function (err) {
this.emit('error', err);
}));
}
normalizedMod.enable();
}
return;
}
//If a paths config, then just load that file instead to
//resolve the plugin, as it is built into that paths layer.
if (bundleId) {
this.map.url = context.nameToUrl(bundleId);
this.load();
return;
}
load = bind(this, function (value) {
this.init([], function () { return value; }, null, {
enabled: true
});
});
load.error = bind(this, function (err) {
this.inited = true;
this.error = err;
err.requireModules = [id];
//Remove temp unnormalized modules for this module,
//since they will never be resolved otherwise now.
eachProp(registry, function (mod) {
if (mod.map.id.indexOf(id + '_unnormalized') === 0) {
cleanRegistry(mod.map.id);
}
});
onError(err);
});
//Allow plugins to load other code without having to know the
//context or how to 'complete' the load.
load.fromText = bind(this, function (text, textAlt) {
/*jslint evil: true */
var moduleName = map.name,
moduleMap = makeModuleMap(moduleName),
hasInteractive = useInteractive;
//As of 2.1.0, support just passing the text, to reinforce
//fromText only being called once per resource. Still
//support old style of passing moduleName but discard
//that moduleName in favor of the internal ref.
if (textAlt) {
text = textAlt;
}
//Turn off interactive script matching for IE for any define
//calls in the text, then turn it back on at the end.
if (hasInteractive) {
useInteractive = false;
}
//Prime the system by creating a module instance for
//it.
getModule(moduleMap);
//Transfer any config to this other module.
if (hasProp(config.config, id)) {
config.config[moduleName] = config.config[id];
}
try {
req.exec(text);
} catch (e) {
return onError(makeError('fromtexteval',
'fromText eval for ' + id +
' failed: ' + e,
e,
[id]));
}
if (hasInteractive) {
useInteractive = true;
}
//Mark this as a dependency for the plugin
//resource
this.depMaps.push(moduleMap);
//Support anonymous modules.
context.completeLoad(moduleName);
//Bind the value of that module to the value for this
//resource ID.
localRequire([moduleName], load);
});
//Use parentName here since the plugin's name is not reliable,
//could be some weird string with no path that actually wants to
//reference the parentName's path.
plugin.load(map.name, localRequire, load, config);
}));
context.enable(pluginMap, this);
this.pluginMaps[pluginMap.id] = pluginMap;
},
enable: function () {
enabledRegistry[this.map.id] = this;
this.enabled = true;
//Set flag mentioning that the module is enabling,
//so that immediate calls to the defined callbacks
//for dependencies do not trigger inadvertent load
//with the depCount still being zero.
this.enabling = true;
//Enable each dependency
each(this.depMaps, bind(this, function (depMap, i) {
var id, mod, handler;
if (typeof depMap === 'string') {
//Dependency needs to be converted to a depMap
//and wired up to this module.
depMap = makeModuleMap(depMap,
(this.map.isDefine ? this.map : this.map.parentMap),
false,
!this.skipMap);
this.depMaps[i] = depMap;
handler = getOwn(handlers, depMap.id);
if (handler) {
this.depExports[i] = handler(this);
return;
}
this.depCount += 1;
on(depMap, 'defined', bind(this, function (depExports) {
if (this.undefed) {
return;
}
this.defineDep(i, depExports);
this.check();
}));
if (this.errback) {
on(depMap, 'error', bind(this, this.errback));
} else if (this.events.error) {
// No direct errback on this module, but something
// else is listening for errors, so be sure to
// propagate the error correctly.
on(depMap, 'error', bind(this, function(err) {
this.emit('error', err);
}));
}
}
id = depMap.id;
mod = registry[id];
//Skip special modules like 'require', 'exports', 'module'
//Also, don't call enable if it is already enabled,
//important in circular dependency cases.
if (!hasProp(handlers, id) && mod && !mod.enabled) {
context.enable(depMap, this);
}
}));
//Enable each plugin that is used in
//a dependency
eachProp(this.pluginMaps, bind(this, function (pluginMap) {
var mod = getOwn(registry, pluginMap.id);
if (mod && !mod.enabled) {
context.enable(pluginMap, this);
}
}));
this.enabling = false;
this.check();
},
on: function (name, cb) {
var cbs = this.events[name];
if (!cbs) {
cbs = this.events[name] = [];
}
cbs.push(cb);
},
emit: function (name, evt) {
each(this.events[name], function (cb) {
cb(evt);
});
if (name === 'error') {
//Now that the error handler was triggered, remove
//the listeners, since this broken Module instance
//can stay around for a while in the registry.
delete this.events[name];
}
}
};
function callGetModule(args) {
//Skip modules already defined.
if (!hasProp(defined, args[0])) {
getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]);
}
}
function removeListener(node, func, name, ieName) {
//Favor detachEvent because of IE9
//issue, see attachEvent/addEventListener comment elsewhere
//in this file.
if (node.detachEvent && !isOpera) {
//Probably IE. If not it will throw an error, which will be
//useful to know.
if (ieName) {
node.detachEvent(ieName, func);
}
} else {
node.removeEventListener(name, func, false);
}
}
/**
* Given an event from a script node, get the requirejs info from it,
* and then removes the event listeners on the node.
* @param {Event} evt
* @returns {Object}
*/
function getScriptData(evt) {
//Using currentTarget instead of target for Firefox 2.0's sake. Not
//all old browsers will be supported, but this one was easy enough
//to support and still makes sense.
var node = evt.currentTarget || evt.srcElement;
//Remove the listeners once here.
removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange');
removeListener(node, context.onScriptError, 'error');
return {
node: node,
id: node && node.getAttribute('data-requiremodule')
};
}
function intakeDefines() {
var args;
//Any defined modules in the global queue, intake them now.
takeGlobalQueue();
//Make sure any remaining defQueue items get properly processed.
while (defQueue.length) {
args = defQueue.shift();
if (args[0] === null) {
return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' +
args[args.length - 1]));
} else {
//args are id, deps, factory. Should be normalized by the
//define() function.
callGetModule(args);
}
}
context.defQueueMap = {};
}
context = {
config: config,
contextName: contextName,
registry: registry,
defined: defined,
urlFetched: urlFetched,
defQueue: defQueue,
defQueueMap: {},
Module: Module,
makeModuleMap: makeModuleMap,
nextTick: req.nextTick,
onError: onError,
/**
* Set a configuration for the context.
* @param {Object} cfg config object to integrate.
*/
configure: function (cfg) {
//Make sure the baseUrl ends in a slash.
if (cfg.baseUrl) {
if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') {
cfg.baseUrl += '/';
}
}
// Convert old style urlArgs string to a function.
if (typeof cfg.urlArgs === 'string') {
var urlArgs = cfg.urlArgs;
cfg.urlArgs = function(id, url) {
return (url.indexOf('?') === -1 ? '?' : '&') + urlArgs;
};
}
//Save off the paths since they require special processing,
//they are additive.
var shim = config.shim,
objs = {
paths: true,
bundles: true,
config: true,
map: true
};
eachProp(cfg, function (value, prop) {
if (objs[prop]) {
if (!config[prop]) {
config[prop] = {};
}
mixin(config[prop], value, true, true);
} else {
config[prop] = value;
}
});
//Reverse map the bundles
if (cfg.bundles) {
eachProp(cfg.bundles, function (value, prop) {
each(value, function (v) {
if (v !== prop) {
bundlesMap[v] = prop;
}
});
});
}
//Merge shim
if (cfg.shim) {
eachProp(cfg.shim, function (value, id) {
//Normalize the structure
if (isArray(value)) {
value = {
deps: value
};
}
if ((value.exports || value.init) && !value.exportsFn) {
value.exportsFn = context.makeShimExports(value);
}
shim[id] = value;
});
config.shim = shim;
}
//Adjust packages if necessary.
if (cfg.packages) {
each(cfg.packages, function (pkgObj) {
var location, name;
pkgObj = typeof pkgObj === 'string' ? {name: pkgObj} : pkgObj;
name = pkgObj.name;
location = pkgObj.location;
if (location) {
config.paths[name] = pkgObj.location;
}
//Save pointer to main module ID for pkg name.
//Remove leading dot in main, so main paths are normalized,
//and remove any trailing .js, since different package
//envs have different conventions: some use a module name,
//some use a file name.
config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main')
.replace(currDirRegExp, '')
.replace(jsSuffixRegExp, '');
});
}
//If there are any "waiting to execute" modules in the registry,
//update the maps for them, since their info, like URLs to load,
//may have changed.
eachProp(registry, function (mod, id) {
//If module already has init called, since it is too
//late to modify them, and ignore unnormalized ones
//since they are transient.
if (!mod.inited && !mod.map.unnormalized) {
mod.map = makeModuleMap(id, null, true);
}
});
//If a deps array or a config callback is specified, then call
//require with those args. This is useful when require is defined as a
//config object before require.js is loaded.
if (cfg.deps || cfg.callback) {
context.require(cfg.deps || [], cfg.callback);
}
},
makeShimExports: function (value) {
function fn() {
var ret;
if (value.init) {
ret = value.init.apply(global, arguments);
}
return ret || (value.exports && getGlobal(value.exports));
}
return fn;
},
makeRequire: function (relMap, options) {
options = options || {};
function localRequire(deps, callback, errback) {
var id, map, requireMod;
if (options.enableBuildCallback && callback && isFunction(callback)) {
callback.__requireJsBuild = true;
}
if (typeof deps === 'string') {
if (isFunction(callback)) {
//Invalid call
return onError(makeError('requireargs', 'Invalid require call'), errback);
}
//If require|exports|module are requested, get the
//value for them from the special handlers. Caveat:
//this only works while module is being defined.
if (relMap && hasProp(handlers, deps)) {
return handlers[deps](registry[relMap.id]);
}
//Synchronous access to one module. If require.get is
//available (as in the Node adapter), prefer that.
if (req.get) {
return req.get(context, deps, relMap, localRequire);
}
//Normalize module name, if it contains . or ..
map = makeModuleMap(deps, relMap, false, true);
id = map.id;
if (!hasProp(defined, id)) {
return onError(makeError('notloaded', 'Module name "' +
id +
'" has not been loaded yet for context: ' +
contextName +
(relMap ? '' : '. Use require([])')));
}
return defined[id];
}
//Grab defines waiting in the global queue.
intakeDefines();
//Mark all the dependencies as needing to be loaded.
context.nextTick(function () {
//Some defines could have been added since the
//require call, collect them.
intakeDefines();
requireMod = getModule(makeModuleMap(null, relMap));
//Store if map config should be applied to this require
//call for dependencies.
requireMod.skipMap = options.skipMap;
requireMod.init(deps, callback, errback, {
enabled: true
});
checkLoaded();
});
return localRequire;
}
mixin(localRequire, {
isBrowser: isBrowser,
/**
* Converts a module name + .extension into an URL path.
* *Requires* the use of a module name. It does not support using
* plain URLs like nameToUrl.
*/
toUrl: function (moduleNamePlusExt) {
var ext,
index = moduleNamePlusExt.lastIndexOf('.'),
segment = moduleNamePlusExt.split('/')[0],
isRelative = segment === '.' || segment === '..';
//Have a file extension alias, and it is not the
//dots from a relative path.
if (index !== -1 && (!isRelative || index > 1)) {
ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length);
moduleNamePlusExt = moduleNamePlusExt.substring(0, index);
}
return context.nameToUrl(normalize(moduleNamePlusExt,
relMap && relMap.id, true), ext, true);
},
defined: function (id) {
return hasProp(defined, makeModuleMap(id, relMap, false, true).id);
},
specified: function (id) {
id = makeModuleMap(id, relMap, false, true).id;
return hasProp(defined, id) || hasProp(registry, id);
}
});
//Only allow undef on top level require calls
if (!relMap) {
localRequire.undef = function (id) {
//Bind any waiting define() calls to this context,
//fix for #408
takeGlobalQueue();
var map = makeModuleMap(id, relMap, true),
mod = getOwn(registry, id);
mod.undefed = true;
removeScript(id);
delete defined[id];
delete urlFetched[map.url];
delete undefEvents[id];
//Clean queued defines too. Go backwards
//in array so that the splices do not
//mess up the iteration.
eachReverse(defQueue, function(args, i) {
if (args[0] === id) {
defQueue.splice(i, 1);
}
});
delete context.defQueueMap[id];
if (mod) {
//Hold on to listeners in case the
//module will be attempted to be reloaded
//using a different config.
if (mod.events.defined) {
undefEvents[id] = mod.events;
}
cleanRegistry(id);
}
};
}
return localRequire;
},
/**
* Called to enable a module if it is still in the registry
* awaiting enablement. A second arg, parent, the parent module,
* is passed in for context, when this method is overridden by
* the optimizer. Not shown here to keep code compact.
*/
enable: function (depMap) {
var mod = getOwn(registry, depMap.id);
if (mod) {
getModule(depMap).enable();
}
},
/**
* Internal method used by environment adapters to complete a load event.
* A load event could be a script load or just a load pass from a synchronous
* load call.
* @param {String} moduleName the name of the module to potentially complete.
*/
completeLoad: function (moduleName) {
var found, args, mod,
shim = getOwn(config.shim, moduleName) || {},
shExports = shim.exports;
takeGlobalQueue();
while (defQueue.length) {
args = defQueue.shift();
if (args[0] === null) {
args[0] = moduleName;
//If already found an anonymous module and bound it
//to this name, then this is some other anon module
//waiting for its completeLoad to fire.
if (found) {
break;
}
found = true;
} else if (args[0] === moduleName) {
//Found matching define call for this script!
found = true;
}
callGetModule(args);
}
context.defQueueMap = {};
//Do this after the cycle of callGetModule in case the result
//of those calls/init calls changes the registry.
mod = getOwn(registry, moduleName);
if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) {
if (config.enforceDefine && (!shExports || !getGlobal(shExports))) {
if (hasPathFallback(moduleName)) {
return;
} else {
return onError(makeError('nodefine',
'No define call for ' + moduleName,
null,
[moduleName]));
}
} else {
//A script that does not call define(), so just simulate
//the call for it.
callGetModule([moduleName, (shim.deps || []), shim.exportsFn]);
}
}
checkLoaded();
},
/**
* Converts a module name to a file path. Supports cases where
* moduleName may actually be just an URL.
* Note that it **does not** call normalize on the moduleName,
* it is assumed to have already been normalized. This is an
* internal API, not a public one. Use toUrl for the public API.
*/
nameToUrl: function (moduleName, ext, skipExt) {
var paths, syms, i, parentModule, url,
parentPath, bundleId,
pkgMain = getOwn(config.pkgs, moduleName);
if (pkgMain) {
moduleName = pkgMain;
}
bundleId = getOwn(bundlesMap, moduleName);
if (bundleId) {
return context.nameToUrl(bundleId, ext, skipExt);
}
//If a colon is in the URL, it indicates a protocol is used and it is just
//an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?)
//or ends with .js, then assume the user meant to use an url and not a module id.
//The slash is important for protocol-less URLs as well as full paths.
if (req.jsExtRegExp.test(moduleName)) {
//Just a plain path, not module name lookup, so just return it.
//Add extension if it is included. This is a bit wonky, only non-.js things pass
//an extension, this method probably needs to be reworked.
url = moduleName + (ext || '');
} else {
//A module that needs to be converted to a path.
paths = config.paths;
syms = moduleName.split('/');
//For each module name segment, see if there is a path
//registered for it. Start with most specific name
//and work up from it.
for (i = syms.length; i > 0; i -= 1) {
parentModule = syms.slice(0, i).join('/');
parentPath = getOwn(paths, parentModule);
if (parentPath) {
//If an array, it means there are a few choices,
//Choose the one that is desired
if (isArray(parentPath)) {
parentPath = parentPath[0];
}
syms.splice(0, i, parentPath);
break;
}
}
//Join the path parts together, then figure out if baseUrl is needed.
url = syms.join('/');
url += (ext || (/^data\:|^blob\:|\?/.test(url) || skipExt ? '' : '.js'));
url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url;
}
return config.urlArgs && !/^blob\:/.test(url) ?
url + config.urlArgs(moduleName, url) : url;
},
//Delegates to req.load. Broken out as a separate function to
//allow overriding in the optimizer.
load: function (id, url) {
req.load(context, id, url);
},
/**
* Executes a module callback function. Broken out as a separate function
* solely to allow the build system to sequence the files in the built
* layer in the right sequence.
*
* @private
*/
execCb: function (name, callback, args, exports) {
return callback.apply(exports, args);
},
/**
* callback for script loads, used to check status of loading.
*
* @param {Event} evt the event from the browser for the script
* that was loaded.
*/
onScriptLoad: function (evt) {
//Using currentTarget instead of target for Firefox 2.0's sake. Not
//all old browsers will be supported, but this one was easy enough
//to support and still makes sense.
if (evt.type === 'load' ||
(readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) {
//Reset interactive script so a script node is not held onto for
//to long.
interactiveScript = null;
//Pull out the name of the module and the context.
var data = getScriptData(evt);
context.completeLoad(data.id);
}
},
/**
* Callback for script errors.
*/
onScriptError: function (evt) {
var data = getScriptData(evt);
if (!hasPathFallback(data.id)) {
var parents = [];
eachProp(registry, function(value, key) {
if (key.indexOf('_@r') !== 0) {
each(value.depMaps, function(depMap) {
if (depMap.id === data.id) {
parents.push(key);
return true;
}
});
}
});
return onError(makeError('scripterror', 'Script error for "' + data.id +
(parents.length ?
'", needed by: ' + parents.join(', ') :
'"'), evt, [data.id]));
}
}
};
context.require = context.makeRequire();
return context;
}
/**
* Main entry point.
*
* If the only argument to require is a string, then the module that
* is represented by that string is fetched for the appropriate context.
*
* If the first argument is an array, then it will be treated as an array
* of dependency string names to fetch. An optional function callback can
* be specified to execute when all of those dependencies are available.
*
* Make a local req variable to help Caja compliance (it assumes things
* on a require that are not standardized), and to give a short
* name for minification/local scope use.
*/
req = requirejs = function (deps, callback, errback, optional) {
//Find the right context, use default
var context, config,
contextName = defContextName;
// Determine if have config object in the call.
if (!isArray(deps) && typeof deps !== 'string') {
// deps is a config object
config = deps;
if (isArray(callback)) {
// Adjust args if there are dependencies
deps = callback;
callback = errback;
errback = optional;
} else {
deps = [];
}
}
if (config && config.context) {
contextName = config.context;
}
context = getOwn(contexts, contextName);
if (!context) {
context = contexts[contextName] = req.s.newContext(contextName);
}
if (config) {
context.configure(config);
}
return context.require(deps, callback, errback);
};
/**
* Support require.config() to make it easier to cooperate with other
* AMD loaders on globally agreed names.
*/
req.config = function (config) {
return req(config);
};
/**
* Execute something after the current tick
* of the event loop. Override for other envs
* that have a better solution than setTimeout.
* @param {Function} fn function to execute later.
*/
req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) {
setTimeout(fn, 4);
} : function (fn) { fn(); };
/**
* Export require as a global, but only if it does not already exist.
*/
if (!require) {
require = req;
}
req.version = version;
//Used to filter out dependencies that are already paths.
req.jsExtRegExp = /^\/|:|\?|\.js$/;
req.isBrowser = isBrowser;
s = req.s = {
contexts: contexts,
newContext: newContext
};
//Create default context.
req({});
//Exports some context-sensitive methods on global require.
each([
'toUrl',
'undef',
'defined',
'specified'
], function (prop) {
//Reference from contexts instead of early binding to default context,
//so that during builds, the latest instance of the default context
//with its config gets used.
req[prop] = function () {
var ctx = contexts[defContextName];
return ctx.require[prop].apply(ctx, arguments);
};
});
if (isBrowser) {
head = s.head = document.getElementsByTagName('head')[0];
//If BASE tag is in play, using appendChild is a problem for IE6.
//When that browser dies, this can be removed. Details in this jQuery bug:
//http://dev.jquery.com/ticket/2709
baseElement = document.getElementsByTagName('base')[0];
if (baseElement) {
head = s.head = baseElement.parentNode;
}
}
/**
* Any errors that require explicitly generates will be passed to this
* function. Intercept/override it if you want custom error handling.
* @param {Error} err the error object.
*/
req.onError = defaultOnError;
/**
* Creates the node for the load command. Only used in browser envs.
*/
req.createNode = function (config, moduleName, url) {
var node = config.xhtml ?
document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') :
document.createElement('script');
node.type = config.scriptType || 'text/javascript';
node.charset = 'utf-8';
node.async = true;
return node;
};
/**
* Does the request to load a module for the browser case.
* Make this a separate function to allow other environments
* to override it.
*
* @param {Object} context the require context to find state.
* @param {String} moduleName the name of the module.
* @param {Object} url the URL to the module.
*/
req.load = function (context, moduleName, url) {
var config = (context && context.config) || {},
node;
if (isBrowser) {
//In the browser so use a script tag
node = req.createNode(config, moduleName, url);
node.setAttribute('data-requirecontext', context.contextName);
node.setAttribute('data-requiremodule', moduleName);
//Set up load listener. Test attachEvent first because IE9 has
//a subtle issue in its addEventListener and script onload firings
//that do not match the behavior of all other browsers with
//addEventListener support, which fire the onload event for a
//script right after the script execution. See:
//https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution
//UNFORTUNATELY Opera implements attachEvent but does not follow the script
//script execution mode.
if (node.attachEvent &&
//Check if node.attachEvent is artificially added by custom script or
//natively supported by browser
//read https://github.com/requirejs/requirejs/issues/187
//if we can NOT find [native code] then it must NOT natively supported.
//in IE8, node.attachEvent does not have toString()
//Note the test for "[native code" with no closing brace, see:
//https://github.com/requirejs/requirejs/issues/273
!(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) &&
!isOpera) {
//Probably IE. IE (at least 6-8) do not fire
//script onload right after executing the script, so
//we cannot tie the anonymous define call to a name.
//However, IE reports the script as being in 'interactive'
//readyState at the time of the define call.
useInteractive = true;
node.attachEvent('onreadystatechange', context.onScriptLoad);
//It would be great to add an error handler here to catch
//404s in IE9+. However, onreadystatechange will fire before
//the error handler, so that does not help. If addEventListener
//is used, then IE will fire error before load, but we cannot
//use that pathway given the connect.microsoft.com issue
//mentioned above about not doing the 'script execute,
//then fire the script load event listener before execute
//next script' that other browsers do.
//Best hope: IE10 fixes the issues,
//and then destroys all installs of IE 6-9.
//node.attachEvent('onerror', context.onScriptError);
} else {
node.addEventListener('load', context.onScriptLoad, false);
node.addEventListener('error', context.onScriptError, false);
}
node.src = url;
//Calling onNodeCreated after all properties on the node have been
//set, but before it is placed in the DOM.
if (config.onNodeCreated) {
config.onNodeCreated(node, config, moduleName, url);
}
//For some cache cases in IE 6-8, the script executes before the end
//of the appendChild execution, so to tie an anonymous define
//call to the module name (which is stored on the node), hold on
//to a reference to this node, but clear after the DOM insertion.
currentlyAddingScript = node;
if (baseElement) {
head.insertBefore(node, baseElement);
} else {
head.appendChild(node);
}
currentlyAddingScript = null;
return node;
} else if (isWebWorker) {
try {
//In a web worker, use importScripts. This is not a very
//efficient use of importScripts, importScripts will block until
//its script is downloaded and evaluated. However, if web workers
//are in play, the expectation is that a build has been done so
//that only one script needs to be loaded anyway. This may need
//to be reevaluated if other use cases become common.
// Post a task to the event loop to work around a bug in WebKit
// where the worker gets garbage-collected after calling
// importScripts(): https://webkit.org/b/153317
setTimeout(function() {}, 0);
importScripts(url);
//Account for anonymous modules
context.completeLoad(moduleName);
} catch (e) {
context.onError(makeError('importscripts',
'importScripts failed for ' +
moduleName + ' at ' + url,
e,
[moduleName]));
}
}
};
function getInteractiveScript() {
if (interactiveScript && interactiveScript.readyState === 'interactive') {
return interactiveScript;
}
eachReverse(scripts(), function (script) {
if (script.readyState === 'interactive') {
return (interactiveScript = script);
}
});
return interactiveScript;
}
//Look for a data-main script attribute, which could also adjust the baseUrl.
if (isBrowser && !cfg.skipDataMain) {
//Figure out baseUrl. Get it from the script tag with require.js in it.
eachReverse(scripts(), function (script) {
//Set the 'head' where we can append children by
//using the script's parent.
if (!head) {
head = script.parentNode;
}
//Look for a data-main attribute to set main script for the page
//to load. If it is there, the path to data main becomes the
//baseUrl, if it is not already set.
dataMain = script.getAttribute('data-main');
if (dataMain) {
//Preserve dataMain in case it is a path (i.e. contains '?')
mainScript = dataMain;
//Set final baseUrl if there is not already an explicit one,
//but only do so if the data-main value is not a loader plugin
//module ID.
if (!cfg.baseUrl && mainScript.indexOf('!') === -1) {
//Pull off the directory of data-main for use as the
//baseUrl.
src = mainScript.split('/');
mainScript = src.pop();
subPath = src.length ? src.join('/') + '/' : './';
cfg.baseUrl = subPath;
}
//Strip off any trailing .js since mainScript is now
//like a module name.
mainScript = mainScript.replace(jsSuffixRegExp, '');
//If mainScript is still a path, fall back to dataMain
if (req.jsExtRegExp.test(mainScript)) {
mainScript = dataMain;
}
//Put the data-main script in the files to load.
cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript];
return true;
}
});
}
/**
* The function that handles definitions of modules. Differs from
* require() in that a string for the module should be the first argument,
* and the function to execute after dependencies are loaded should
* return a value to define the module corresponding to the first argument's
* name.
*/
define = function (name, deps, callback) {
var node, context;
//Allow for anonymous modules
if (typeof name !== 'string') {
//Adjust args appropriately
callback = deps;
deps = name;
name = null;
}
//This module may not have dependencies
if (!isArray(deps)) {
callback = deps;
deps = null;
}
//If no name, and callback is a function, then figure out if it a
//CommonJS thing with dependencies.
if (!deps && isFunction(callback)) {
deps = [];
//Remove comments from the callback string,
//look for require calls, and pull them into the dependencies,
//but only if there are function args.
if (callback.length) {
callback
.toString()
.replace(commentRegExp, commentReplace)
.replace(cjsRequireRegExp, function (match, dep) {
deps.push(dep);
});
//May be a CommonJS thing even without require calls, but still
//could use exports, and module. Avoid doing exports and module
//work though if it just needs require.
//REQUIRES the function to expect the CommonJS variables in the
//order listed below.
deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps);
}
}
//If in IE 6-8 and hit an anonymous define() call, do the interactive
//work.
if (useInteractive) {
node = currentlyAddingScript || getInteractiveScript();
if (node) {
if (!name) {
name = node.getAttribute('data-requiremodule');
}
context = contexts[node.getAttribute('data-requirecontext')];
}
}
//Always save off evaluating the def call until the script onload handler.
//This allows multiple modules to be in a file without prematurely
//tracing dependencies, and allows for anonymous module support,
//where the module name is not known until the script onload event
//occurs. If no context, use the global queue, and get it processed
//in the onscript load callback.
if (context) {
context.defQueue.push([name, deps, callback]);
context.defQueueMap[name] = true;
} else {
globalDefQueue.push([name, deps, callback]);
}
};
define.amd = {
jQuery: true
};
/**
* Executes the text. Normally just uses eval, but can be modified
* to use a better, environment-specific call. Only used for transpiling
* loader plugins, not for plain JS modules.
* @param {String} text the text to execute/evaluate.
*/
req.exec = function (text) {
/*jslint evil: true */
return eval(text);
};
//Set up with config info.
req(cfg);
}(this, (typeof setTimeout === 'undefined' ? undefined : setTimeout)));
(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){
'use strict';
require('./modules/mobile-menu');
require('./modules/slick');
},{"./modules/mobile-menu":2,"./modules/slick":3}],2:[function(require,module,exports){
"use strict";
$(document).ready(function () {
$(".menu").click(function () {
$(".navbar").slideToggle(1);
$(".fa-minus-circle").slideToggle(1);
$(".fa-plus-circle").slideToggle(1);
});
});
},{}],3:[function(require,module,exports){
'use strict';
$(document).ready(function () {
$('.slick').slick({
vertical: true,
slidesToShow: 1,
verticalSwiping: true,
slidesToScroll: 1,
infinite: false,
accessibility: false,
nextArrow: '<i class="fa fa-chevron-right"></i>',
prevArrow: '<i class="fa fa-chevron-left"></i>'
});
var maxHeight = -1;
$('.slick-slide').each(function () {
if ($(this).height() > maxHeight) {
maxHeight = $(this).height();
}
});
$('.slick-slide').each(function () {
if ($(this).height() < maxHeight) {
$(this).css('margin', Math.ceil((maxHeight - $(this).height()) / 2) + 'px 0');
}
});
});
},{}]},{},[1])
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIm5vZGVfbW9kdWxlcy9icm93c2VyLXBhY2svX3ByZWx1ZGUuanMiLCJzcmMvanMvYXBwLmpzIiwic3JjL2pzL21vZHVsZXMvbW9iaWxlLW1lbnUuanMiLCJzcmMvanMvbW9kdWxlcy9zbGljay5qcyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQTs7O0FDSUE7O0FBQ0E7Ozs7O0FDTEEsRUFBRSxRQUFGLEVBQVksS0FBWixDQUFrQixZQUFXO0FBQ3pCLE1BQUUsT0FBRixFQUFXLEtBQVgsQ0FBaUIsWUFBVztBQUN4QixVQUFFLFNBQUYsRUFBYSxXQUFiLENBQXlCLENBQXpCO0FBQ0EsVUFBRSxrQkFBRixFQUFzQixXQUF0QixDQUFrQyxDQUFsQztBQUNBLFVBQUUsaUJBQUYsRUFBcUIsV0FBckIsQ0FBaUMsQ0FBakM7QUFDSCxLQUpEO0FBS0gsQ0FORDs7Ozs7QUNBQSxFQUFFLFFBQUYsRUFBWSxLQUFaLENBQWtCLFlBQVU7QUFDeEIsTUFBRSxRQUFGLEVBQVksS0FBWixDQUFrQjtBQUNkLGtCQUFVLElBREk7QUFFZCxzQkFBYyxDQUZBO0FBR2QseUJBQWlCLElBSEg7QUFJZCx3QkFBZ0IsQ0FKRjtBQUtkLGtCQUFVLEtBTEk7QUFNZCx1QkFBZSxLQU5EO0FBT2QsbUJBQVcscUNBUEc7QUFRZCxtQkFBVztBQVJHLEtBQWxCO0FBVUEsUUFBSSxZQUFZLENBQUMsQ0FBakI7QUFDQSxNQUFFLGNBQUYsRUFBa0IsSUFBbEIsQ0FBdUIsWUFBVztBQUM5QixZQUFJLEVBQUUsSUFBRixFQUFRLE1BQVIsS0FBbUIsU0FBdkIsRUFBa0M7QUFDOUIsd0JBQVksRUFBRSxJQUFGLEVBQVEsTUFBUixFQUFaO0FBQ0g7QUFDSixLQUpEO0FBS0EsTUFBRSxjQUFGLEVBQWtCLElBQWxCLENBQXVCLFlBQVc7QUFDOUIsWUFBSSxFQUFFLElBQUYsRUFBUSxNQUFSLEtBQW1CLFNBQXZCLEVBQWtDO0FBQ3RDLGNBQUUsSUFBRixFQUFRLEdBQVIsQ0FBWSxRQUFaLEVBQXNCLEtBQUssSUFBTCxDQUFVLENBQUMsWUFBVSxFQUFFLElBQUYsRUFBUSxNQUFSLEVBQVgsSUFBNkIsQ0FBdkMsSUFBNEMsTUFBbEU7QUFDQztBQUNKLEtBSkc7QUFLSCxDQXRCRCIsImZpbGUiOiJnZW5lcmF0ZWQuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlc0NvbnRlbnQiOlsiKGZ1bmN0aW9uKCl7ZnVuY3Rpb24gcihlLG4sdCl7ZnVuY3Rpb24gbyhpLGYpe2lmKCFuW2ldKXtpZighZVtpXSl7dmFyIGM9XCJmdW5jdGlvblwiPT10eXBlb2YgcmVxdWlyZSYmcmVxdWlyZTtpZighZiYmYylyZXR1cm4gYyhpLCEwKTtpZih1KXJldHVybiB1KGksITApO3ZhciBhPW5ldyBFcnJvcihcIkNhbm5vdCBmaW5kIG1vZHVsZSAnXCIraStcIidcIik7dGhyb3cgYS5jb2RlPVwiTU9EVUxFX05PVF9GT1VORFwiLGF9dmFyIHA9bltpXT17ZXhwb3J0czp7fX07ZVtpXVswXS5jYWxsKHAuZXhwb3J0cyxmdW5jdGlvbihyKXt2YXIgbj1lW2ldWzFdW3JdO3JldHVybiBvKG58fHIpfSxwLHAuZXhwb3J0cyxyLGUsbix0KX1yZXR1cm4gbltpXS5leHBvcnRzfWZvcih2YXIgdT1cImZ1bmN0aW9uXCI9PXR5cGVvZiByZXF1aXJlJiZyZXF1aXJlLGk9MDtpPHQubGVuZ3RoO2krKylvKHRbaV0pO3JldHVybiBvfXJldHVybiByfSkoKSIsIi8vIFlvdSBjYW4gd3JpdGUgYSBjYWxsIGFuZCBpbXBvcnQgeW91ciBmdW5jdGlvbnMgaW4gdGhpcyBmaWxlLlxyXG4vL1xyXG4vLyBUaGlzIGZpbGUgd2lsbCBiZSBjb21waWxlZCBpbnRvIGFwcC5qcyBhbmQgd2lsbCBub3QgYmUgbWluaWZpZWQuXHJcbi8vIEZlZWwgZnJlZSB3aXRoIHVzaW5nIEVTNiBoZXJlLlxyXG5pbXBvcnQgJy4vbW9kdWxlcy9tb2JpbGUtbWVudSc7XHJcbmltcG9ydCAnLi9tb2R1bGVzL3NsaWNrJzsiLCIkKGRvY3VtZW50KS5yZWFkeShmdW5jdGlvbigpIHtcclxuICAgICQoXCIubWVudVwiKS5jbGljayhmdW5jdGlvbigpIHtcclxuICAgICAgICAkKFwiLm5hdmJhclwiKS5zbGlkZVRvZ2dsZSgxKTtcclxuICAgICAgICAkKFwiLmZhLW1pbnVzLWNpcmNsZVwiKS5zbGlkZVRvZ2dsZSgxKTtcclxuICAgICAgICAkKFwiLmZhLXBsdXMtY2lyY2xlXCIpLnNsaWRlVG9nZ2xlKDEpO1xyXG4gICAgfSk7XHJcbn0pOyIsIiQoZG9jdW1lbnQpLnJlYWR5KGZ1bmN0aW9uKCl7XHJcbiAgICAkKCcuc2xpY2snKS5zbGljayh7XHJcbiAgICAgICAgdmVydGljYWw6IHRydWUsXHJcbiAgICAgICAgc2xpZGVzVG9TaG93OiAxLFxyXG4gICAgICAgIHZlcnRpY2FsU3dpcGluZzogdHJ1ZSxcclxuICAgICAgICBzbGlkZXNUb1Njcm9sbDogMSxcclxuICAgICAgICBpbmZpbml0ZTogZmFsc2UsXHJcbiAgICAgICAgYWNjZXNzaWJpbGl0eTogZmFsc2UsXHJcbiAgICAgICAgbmV4dEFycm93OiAnPGkgY2xhc3M9XCJmYSBmYS1jaGV2cm9uLXJpZ2h0XCI+PC9pPicsXHJcbiAgICAgICAgcHJldkFycm93OiAnPGkgY2xhc3M9XCJmYSBmYS1jaGV2cm9uLWxlZnRcIj48L2k+JyxcclxuICAgIH0pO1xyXG4gICAgbGV0IG1heEhlaWdodCA9IC0xO1xyXG4gICAgJCgnLnNsaWNrLXNsaWRlJykuZWFjaChmdW5jdGlvbigpIHtcclxuICAgICAgICBpZiAoJCh0aGlzKS5oZWlnaHQoKSA+IG1heEhlaWdodCkge1xyXG4gICAgICAgICAgICBtYXhIZWlnaHQgPSAkKHRoaXMpLmhlaWdodCgpO1xyXG4gICAgICAgIH0gICAgICBcclxuICAgIH0pO1xyXG4gICAgJCgnLnNsaWNrLXNsaWRlJykuZWFjaChmdW5jdGlvbigpIHtcclxuICAgICAgICBpZiAoJCh0aGlzKS5oZWlnaHQoKSA8IG1heEhlaWdodCkge1xyXG4gICAgJCh0aGlzKS5jc3MoJ21hcmdpbicsIE1hdGguY2VpbCgobWF4SGVpZ2h0LSQodGhpcykuaGVpZ2h0KCkpLzIpICsgJ3B4IDAnKTtcclxuICAgIH1cclxufSk7XHJcbn0pOyJdfQ==
|
#!/usr/bin/env python
'''Parameters for an Monte Carlo HSA agent for the pegs on disks task.'''
# python
import os
# scipy
from scipy.io import loadmat, savemat
from numpy.random import choice, normal, uniform
from numpy import cos, pi
def Parameters(realization):
'''Specifies simulation hyperparameters.'''
# === AGENT ===
# system
cwd = os.getcwd()
deviceId = 1 if "Seq6DofManip2" in cwd else (0 if "Seq6DofManip1" in cwd else -1)
randomSeed = realization
# curriculum
tMax = 4
nEpisodes = 100000
trainEvery = 1000
maxExperiences = 50000
epsilonMin = 0.03
unbiasOnEpisode = 0.95 * nEpisodes
# misc
gamma = 0.25
nGraspOrientations = 60
nPlaceOrientations = 3
# hand descriptor
imP = 48
imD = [0.1375, 0.2750, 0.2750, 0.2750, 0.2750] # maxObjHeight + handDepth / 2
imW = [0.3600, 0.0900, 0.0900, 0.0900, 0.1000]
selD = [0.1375, 0.09, 0.0225]
selW = [0.3600, 0.09, 0.0225]
# network parametrs
conv1KernelSize = 8
conv1Outputs = 32
conv1Stride = 2
conv2KernelSize = 4
conv2Outputs = 64
conv2Stride = 2
conv3KernelSize = 3
conv3Outputs = 32
conv3Stride = 2
conv4KernelSize = 2
conv4Outputs = 6
conv4Stride = 1
# optimization parametrs
nEpochs = 1
batchSize = 64
weightDecay = 0.0000
baseLearningRate = 0.0001
optimizer = "Adam"
# === ENVIRONMENT ===
# objects
nObjects = 2
nSupportObjects = 2
objectFolder = "/home/mgualti/Data/Seq6DofManip/Pegs"
supportObjectFolder = "/home/mgualti/Data/Seq6DofManip/Disks"
placeOrientTolerance = 1 - cos(1.0 * pi / 180.0)
placeHeightTolerance = [0.02, 0.02]
rewardCapGrasps = True
# misc
removeTable = False
# === Visualization / Saving ===
saveFileName = "results-{}.mat".format(realization)
loadNetwork = False
loadDatabase = False
showViewer = False
showSteps = False
plotImages = False
# visualize policy
visualizePolicy = False
if visualizePolicy:
randomSeed = randomSeed + 1
trainEvery = nEpisodes + 1
unbiasOnEpisode = 0
loadNetwork = True
loadDatabase = False
showViewer = True
showSteps = True
plotImages = False
# save parameter file
savemat("parameters.mat", locals())
|
/**
** @Author:pandayu
** @Version:1.0
** @DateTime:2018-09-09
** @Project:pandaCard CardGame
** @Contact: QQ:815099602
**/
/**
* 打开小伙伴添加伙伴时的士兵展示排序
*/
//士兵上阵层的创建
var armyChoiseLayer = ModalDialog.extend({
LayerName:"armyChoiseLayer",
_item:null,
skillItem:null,
_skillList:null,
_intoSoldierArray:[],//包含伙伴上阵和士兵上阵的数组
luckArray:[],//上阵数组士兵缘分
soldiersArray:[],//未上阵的士兵数组
intoSoldierArray:[],//上阵的士兵,包括部队士兵和小伙伴上阵的士兵
sortSolArray:[],//排序之后的容器数组
soldierLuckAray:[],//存储士兵能激活缘分个数的对象数组
sortluckSolderArray:[],//缘分排序后的士兵数组
luckAttr:{},//记录激活缘分信息,用于伙伴上阵数值的计算
luckAttrID:[],
outSoldiers:[],//没突破改造的数组
//logotype为1代表从添加士兵处进入为2代表从添加小伙伴处进入,3代表更换士兵按钮进入
ctor:function(pos, logotype){
this._super();
//this.LayerName = "armyChoiseLayer";
//加载士兵上阵层
this.logotype = logotype;//用于判断是部队的士兵上阵还是伙伴的士兵上阵,1和3是部队的士兵上阵,2是伙伴的士兵上阵
if(logotype == 1 || logotype == 3){
this.pos = pos-1;
}else{
this.pos = pos;
}
return true;
},
onEnter:function(){
this._super();
},
initUI:function(){
this.uiArmyChoiseLayer = ccs.load(res.uiArmyChoiseLayer).node;
this.addChild(this.uiArmyChoiseLayer);
this._skillList = ccui.helper.seekWidgetByName(this.uiArmyChoiseLayer, 'armyChoiseList');
var root = ccs.load(res.uiIntoBattleLayer);
this.skillItem = ccui.helper.seekWidgetByName(root.node, 'skillItem');
var goBack = ccui.helper.seekWidgetByName(this.uiArmyChoiseLayer, "btnBack");
goBack.addTouchEventListener(this.backEvent, this);
this.initControl();
},
initControl:function(){
//上阵士兵数组
this.intoSoldierArray.length = 0;
for(var key in GLOBALDATA.army.battle){
if(GLOBALDATA.army.battle[key] != 0 && GLOBALDATA.army.battle[key] != -1){
this.intoSoldierArray.push(GLOBALDATA.army.battle[key]);
}
}
//this._intoSoldierArray数组没有初始化为0,不知道会不会出现叠加元素的事情
this._intoSoldierArray = this.intoSoldierArray.concat();//包含上陣士兵和上陣小伙伴的數組
for(var key in GLOBALDATA.army.companion[key] > 0){
this._intoSoldierArray.push( GLOBALDATA.army.companion[key]);
}
//未上阵士兵数组
this.soldiersArray.length = 0;
for(var key in GLOBALDATA.soldiers){
if(GLOBALDATA.soldiers[key].j == 0){
this.soldiersArray.push(parseInt(key));
}
}
this.outSoldiers = [];
this.outSoldiers = this.outSoldiers.concat(this.soldiersArray);
if(this.soldiersArray != 0){
//未上阵士兵数组改造和突破的判断
for(var i=0;i<this.soldiersArray.length;i++){
var tsolid = this.soldiersArray[i];
var soldier = GLOBALDATA.soldiers[this.soldiersArray[i]];
if(soldier.m > 0){
var qhero = Helper.findHeroById(tsolid);
tsolid = qhero.breakid || tsolid;
}
if(soldier.sq == 10){
var reformAtt = Helper.findHeroById(tsolid);
tsolid = reformAtt.reform || tsolid;
}
this.soldiersArray[i] = tsolid;
}
//根据品质把未上陣的士兵进行排序
for(var i=0;i<this.soldiersArray.length-1;i++){
for(var j=0;j<this.soldiersArray.length-1-i;j++){
if(ITEMCFG[this.soldiersArray[j]].quality < ITEMCFG[this.soldiersArray[j+1]].quality){
var temp = this.soldiersArray[j];
this.soldiersArray[j] = this.soldiersArray[j+1];
this.soldiersArray[j+1] = temp;
var outsolTemp = this.outSoldiers[j];
this.outSoldiers[j] = this.outSoldiers[j+1];
this.outSoldiers[j+1] = outsolTemp;
}
}
}
//计算上陣的士兵和小夥伴激活了幾個緣分
var sumluckNum = 0;
for(var i=0;i<this.intoSoldierArray.length;i++){
for(var key in ARMYRELATIONCFG){
if(this.intoSoldierArray[i] == ARMYRELATIONCFG[key].armyid ){
var luckSoldierArray = ARMYRELATIONCFG[key].relation_armyvalue.concat();
if(ITEMCFG[luckSoldierArray[0]].maintype == 2){
var soldierNum = 0;//计算士兵的个数是否达到缘分开启的士兵个数
for (var k = 0; k < luckSoldierArray.length; k++){
if (this._intoSoldierArray.indexOf(luckSoldierArray[k]) == -1){
break;
}else{
soldierNum++;//判断缘分数组里的士兵是否全在上阵士兵里
}
if (soldierNum == luckSoldierArray.length) {
sumluckNum++;//本士兵数组可以激活缘分的个数
}
}
}else{
break;
}
}
}
}
//创建士兵对应的能激活的缘分个数的数组
this.luckArray.length = 0;
this.luckAttr = {};
for(var i=0;i<this.outSoldiers.length;i++){//soldiersArray是未上阵士兵,intoSoldierArray是上阵士兵
var luckNum = 0;//计算本士兵可以开启几个缘分
var readySol = this.outSoldiers[i];
if(this.logotype == 1){
this.intoSoldierArray.push(parseInt(this.outSoldiers[i]));
this._intoSoldierArray.push(parseInt(this.outSoldiers[i]));
this.outSoldiers.splice(i, 1);//删除相应的士兵
}else if(this.logotype == 2){
this._intoSoldierArray.push(parseInt(this.outSoldiers[i]));
this.outSoldiers.splice(i, 1);//删除相应的士兵
}else if(this.logotype == 3){
this.intoSoldierArray[this.pos] = this.outSoldiers[i];
this._intoSoldierArray.length = 0;
this._intoSoldierArray = this.intoSoldierArray.concat();//包含小伙伴上阵的士兵
for(var key in GLOBALDATA.army.companion[key] > 0){
this._intoSoldierArray.push( GLOBALDATA.army.companion[key]);
}
}
this.luckAttrID = [];
for(var j=0;j<this.intoSoldierArray.length;j++){
for(var key in ARMYRELATIONCFG){
if(this.intoSoldierArray[j] == ARMYRELATIONCFG[key].armyid ){
var luckSoldierArray = ARMYRELATIONCFG[key].relation_armyvalue.concat();
if(ITEMCFG[luckSoldierArray[0]].maintype == 2){
var soldierNum = 0;//计算士兵的个数是否达到缘分开启的士兵个数
for (var k = 0; k < luckSoldierArray.length; k++){
if (this._intoSoldierArray.indexOf(luckSoldierArray[k]) == -1){
break;
}else{
soldierNum++;//判断缘分数组里的士兵是否全在上阵士兵里
}
if (soldierNum == luckSoldierArray.length) {
luckNum++;//缘分的个数
if(luckSoldierArray.indexOf(readySol) != -1){
this.luckAttrID.push(key);
}
//this.luckAttr[readySol] = ARMYRELATIONCFG[key].id;
}
}
}else{
break;
}
}
}
}
if(this.logotype == 1){
this.outSoldiers.splice(i, 0, readySol);//把上阵的士兵再添加回来
this.intoSoldierArray.pop();
this._intoSoldierArray.pop();
}else if(this.logotype == 2){
this.outSoldiers.splice(i, 0, readySol);//把上阵的士兵再添加回来
this._intoSoldierArray.pop();
}
if(luckNum - sumluckNum <= 0){
this.luckArray.push(0);
}else{
this.luckAttr[readySol] = this.luckAttrID;
this.luckArray.push(luckNum - sumluckNum);//士兵缘分数组
}
}
//在品质的基础上根据缘分进行排序
this.sortSolArray.length = 0;
this.soldierLuckAray.length = 0;
for(var i=0;i<this.soldiersArray.length;i++){
var qualityArray = [];//相同品质的士兵数组
var qualuckArray = [];//同品质的缘分数组
var num = HEROCFG[this.soldiersArray[i].toString()].armyquality;
for(var j=i;j<this.soldiersArray.length;j++){
if(HEROCFG[this.soldiersArray[j].toString()].armyquality == num){
qualityArray.push(this.soldiersArray[j]);
qualuckArray.push(this.luckArray[j]);
}else{
//明天在这个位置对同品质的士兵缘分进行排序
for(var x=0;x<qualityArray.length-1;x++){
for(var y=0; y<qualityArray.length-1-x;y++){
if(qualuckArray[y] < qualuckArray[y+1]){
var quaTemp = qualityArray [y];
qualityArray[y] = qualityArray[y+1];
qualityArray[y+1] = quaTemp;
var luckTemp = qualuckArray[y];
qualuckArray[y] = qualuckArray[y+1];
qualuckArray[y+1] = luckTemp;
}
}
}
i=j-1;
var _qualityArray = [];
_qualityArray = this.sortluck(qualityArray, qualuckArray).concat();
this.sortSolArray.push.apply(this.sortSolArray, _qualityArray);//用另一对象替换当前对象
this.soldierLuckAray.push.apply(this.soldierLuckAray, qualuckArray);
break;
}
}
if(this.soldiersArray.length == j){//如果品质数组里的缘分全部相同则跳出该循环
break;
}
}
if(qualityArray.length == 1){
var _qualityArray = this.sortluck(qualityArray, qualuckArray).concat();
this.sortSolArray.push.apply(this.sortSolArray, _qualityArray);
this.soldierLuckAray.push.apply(this.soldierLuckAray, qualuckArray);
}else{
for(var x=0;x<qualityArray.length-1;x++){
for(var y=0; y<qualityArray.length-1-x;y++){
if(qualuckArray[y] < qualuckArray[y+1]){
var quaTemp = qualityArray [y];
qualityArray[y] = qualityArray[y+1];
qualityArray[y+1] = quaTemp;
var luckTemp = qualuckArray[y];
qualuckArray[y] = qualuckArray[y+1];
qualuckArray[y+1] = luckTemp;
}
}
}
var _qualityArray = this.sortluck(qualityArray, qualuckArray).concat();
this.sortSolArray.push.apply(this.sortSolArray, _qualityArray);
this.soldierLuckAray.push.apply(this.soldierLuckAray, qualuckArray);
}
this._skillList.removeAllItems();
for(var i=0;i<this.sortSolArray.length;i++){
var _item = this.skillItem.clone();
this._skillList.pushBackCustomItem(_item);
var _solAttribute = Helper.findHeroById(this.sortSolArray[i]);
var solAttribute = Helper.findItemId(this.sortSolArray[i]);
//士兵头像的获取
var solImage = ccui.helper.seekWidgetByName(_item, "solBg");
Helper.LoadFrameImageWithPlist(solImage, solAttribute.quality);
var iSolHead = cc.spriteFrameCache.getSpriteFrame(ITEMCFG[this.sortSolArray[i]].icon);
var sprite = new cc.Sprite(iSolHead);
sprite.setPosition(solImage.getContentSize().width/2, solImage.getContentSize().height / 2);
sprite.setScale(0.8);
solImage.addChild(sprite);
//士兵名称的获取
if(_solAttribute.initid > 0){
if(GLOBALDATA.soldiers[_solAttribute.initid].q > 0){
var mastername = ccui.helper.seekWidgetByName(_item, "soldierName");
mastername.setString(solAttribute.itemname + " +" + GLOBALDATA.soldiers[solAttribute.itemid].q);
}else{
var mastername = ccui.helper.seekWidgetByName(_item, "soldierName");
mastername.setString(solAttribute.itemname);
}
}else{
if(GLOBALDATA.soldiers[solAttribute.itemid].q > 0){
var mastername = ccui.helper.seekWidgetByName(_item, "soldierName");
mastername.setString(solAttribute.itemname + " +" + GLOBALDATA.soldiers[solAttribute.itemid].q);
}else{
var mastername = ccui.helper.seekWidgetByName(_item, "soldierName");
mastername.setString(solAttribute.itemname);
}
}
/*if(GLOBALDATA.soldiers[solAttribute.itemid].q > 0){
var mastername = ccui.helper.seekWidgetByName(_item, "soldierName");
mastername.setString(solAttribute.itemname + " +" + GLOBALDATA.soldiers[solAttribute.itemid].q);
}else{
var mastername = ccui.helper.seekWidgetByName(_item, "soldierName");
mastername.setString(solAttribute.itemname);
}*/
Helper.setNamecolorByQuality(mastername,solAttribute.quality); //物品名字按品质设置颜色
var solRank = ccui.helper.seekWidgetByName(_item, "rank");
var armyNum = ccui.helper.seekWidgetByName(_item, "armyNum");//数量
if(_solAttribute.initid == 0 || _solAttribute.initid == null){
solRank.setString(GLOBALDATA.soldiers[this.sortSolArray[i]].l);
armyNum.setString(STRINGCFG[100044].string+":" + GLOBALDATA.soldiers[this.sortSolArray[i]].n);//100044 数量
}else{
solRank.setString(GLOBALDATA.soldiers[_solAttribute.initid].l);
armyNum.setString(STRINGCFG[100044].string+":" + GLOBALDATA.soldiers[_solAttribute.initid].n);//100044 数量
}
var solAptitude = ccui.helper.seekWidgetByName(_item, "aptitude");//品质
solAptitude.setString(_solAttribute.intelligence);
var luck = ccui.helper.seekWidgetByName(_item, "luck");//缘分加成
var Icon = ccui.helper.seekWidgetByName(_item, "Icon");
var attrCfg = HEROCFG[this.sortSolArray[i]];
Icon.loadTexture(StringFormat("common/i/i_039_$1.png", attrCfg.armytype), ccui.Widget.PLIST_TEXTURE);
if(this.soldierLuckAray[i] > 0){
luck.setVisible(true);
luck.setString(STRINGCFG[100043].string+"+"+this.soldierLuckAray[i]); //100043 可激活缘分
}else{
luck.setVisible(false);
}
var intoBtn = ccui.helper.seekWidgetByName(_item, "intoButton");//上阵按钮
if(_solAttribute.initid == 0 || _solAttribute.initid == null){
intoBtn.setTag(this.sortSolArray[i]);
}else{
intoBtn.setTag(_solAttribute.initid);
}
intoBtn.setUserData("guid_armyChoiseList_intoButton"+i);
intoBtn.addTouchEventListener(this.intoEvent, this);
var solHead = ccui.helper.seekWidgetByName(_item, "solBg");//头像
solHead.setTag(this.sortSolArray[i]);
solHead.addTouchEventListener(this.solHeadEvent, this);
}
}
},
//同品质,同缘分根据等级进行排序
sortluck:function(qualityArray, qualuckArray){
this.sortluckSolderArray.length = 0;
for(var i=0;i<qualuckArray.length;i++){
var sameLuckArray = [];//同缘分数组
var sameLuckSolArray = [];//同缘分士兵数组
var luck = qualuckArray[i];
for(var j=i;j<qualuckArray.length;j++){
if(qualuckArray[j] == luck){
sameLuckArray.push(qualuckArray[j]);
sameLuckSolArray.push(qualityArray[j]);
}else{
for(var x=0;x<sameLuckArray.length-1;x++){
for(var y=0;y<sameLuckArray.length-1-x;y++){
if(HEROCFG[sameLuckSolArray[y]].initid > 0){
var soldierId1 = HEROCFG[sameLuckSolArray[y]].initid;
}else{
var soldierId1 = sameLuckSolArray[y]
}
if(HEROCFG[sameLuckSolArray[y+1]].initid > 0){
var soldierId2 = HEROCFG[sameLuckSolArray[y+1]].initid;
}else{
var soldierId2 = sameLuckSolArray[y+1]
}
if(GLOBALDATA.soldiers[soldierId1].l < GLOBALDATA.soldiers[soldierId2].l){
var luckTemp = sameLuckSolArray[y];
sameLuckSolArray[y]=sameLuckSolArray[y+1];
sameLuckSolArray[y+1]=luckTemp;
}
}
}
i=j-1;
this.sortluckSolderArray.push.apply(this.sortluckSolderArray, sameLuckSolArray);
break;
}
}
if(qualuckArray.length == j){//如果qualuckArray数组里的属性全部相同则跳出本循环
break;
}
}
if(sameLuckArray.length == 1){
this.sortluckSolderArray.push.apply(this.sortluckSolderArray, sameLuckSolArray);
return this.sortluckSolderArray;
}else{
for(var x=0;x<sameLuckArray.length-1;x++){
for(var y=0;y<sameLuckArray.length-1-x;y++){
if(HEROCFG[sameLuckSolArray[y]].initid > 0){
var soldierId1 = HEROCFG[sameLuckSolArray[y]].initid;
}else{
var soldierId1 = sameLuckSolArray[y]
}
if(HEROCFG[sameLuckSolArray[y+1]].initid > 0){
var soldierId2 = HEROCFG[sameLuckSolArray[y+1]].initid;
}else{
var soldierId2 = sameLuckSolArray[y+1]
}
if(GLOBALDATA.soldiers[soldierId1].l < GLOBALDATA.soldiers[soldierId2].l){
var luckTemp = sameLuckSolArray[y];
sameLuckSolArray[y]=sameLuckSolArray[y+1];
sameLuckSolArray[y+1]=luckTemp;
}
}
}
this.sortluckSolderArray.push.apply(this.sortluckSolderArray, sameLuckSolArray);
return this.sortluckSolderArray;
}
},
intoEvent:function(sender, type){
if(ccui.Widget.TOUCH_ENDED == type){
if(this.logotype == 1 || this.logotype == 3){
var solValue = parseInt(sender.getTag());
cc.log(solValue);
armyModel.changeBattle(solValue, this.pos+1);
}else if(this.logotype == 2){
var solValue = parseInt(sender.getTag());
armyModel.partnerAdd(solValue, this.pos+1);
if(this.luckAttr.hasOwnProperty(solValue.toString())){
var addAttrEvn = new cc.EventCustom('upParentData');
addAttrEvn.setUserData(this.luckAttr[solValue]);
cc.eventManager.dispatchEvent(addAttrEvn);
}
}
}
},
solHeadEvent:function(sender, type){
if(ccui.Widget.TOUCH_ENDED == type){
var solValue = sender.getTag();
cc.log(solValue);
var armyAttributeLayer = new armyAttriLayer(solValue,1);
this.addChild(armyAttributeLayer, 30);
}
},
backEvent:function(sender, type){
switch (type){
case ccui.Widget.TOUCH_ENDED:
this.removeFromParent(true);
break;
default:
break;
}
},
onExit:function () {
this._super();
cc.log('armyChoiseLayer onExit');
}
//*************************************************************************************************
});
|
import os
current_directory = os.path.dirname(os.path.abspath(__file__))
db = os.path.join(current_directory, 'hlpl_arabic_words.db')
import sqlite3
def get(case):
conn = sqlite3.connect(db)
conn.row_factory = lambda cursor, row: row[0]
cur = conn.cursor()
if case=='فاعل':
lst=cur.execute("SELECT * FROM ism_fa3il").fetchall()
return lst
if case=='مفعول':
lst=cur.execute("SELECT * FROM ism_maf3ol").fetchall()
return lst
if case=='مصدر':
lst=cur.execute("SELECT * FROM masdar").fetchall()
return lst
if case=='فعل':
lst=cur.execute("SELECT * FROM ar_verbs_lst").fetchall()
return lst
if case=='بداية-فعل':
lst=cur.execute("SELECT * FROM v_f").fetchall()
return lst
if case=='نهاية-فعل':
lst=cur.execute("SELECT * FROM v_b").fetchall()
return lst
if case=='بداية-اسم':
lst=cur.execute("SELECT * FROM n_f").fetchall()
return lst
if case=='نهاية-اسم':
lst=cur.execute("SELECT * FROM n_b").fetchall()
return lst
if case=='أداة':
lst=cur.execute("SELECT * FROM ar_articles_lst").fetchall()
return lst
if case=='connection':
return conn
|
var iconFactory
module.exports = function (jazzicon) {
if (!iconFactory) {
iconFactory = new IconFactory(jazzicon)
}
return iconFactory
}
function IconFactory (jazzicon) {
this.jazzicon = jazzicon
this.cache = {}
}
IconFactory.prototype.iconForAddress = function (address, diameter, imageify) {
if (imageify) {
return this.generateIdenticonImg(address, diameter)
} else {
return this.generateIdenticonSvg(address, diameter)
}
}
// returns img dom element
IconFactory.prototype.generateIdenticonImg = function (address, diameter) {
var identicon = this.generateIdenticonSvg(address, diameter)
var identiconSrc = identicon.innerHTML
var dataUri = toDataUri(identiconSrc)
var img = document.createElement('img')
img.src = dataUri
return img
}
// returns svg dom element
IconFactory.prototype.generateIdenticonSvg = function (address, diameter) {
var cacheId = `${address}:${diameter}`
// check cache, lazily generate and populate cache
var identicon = this.cache[cacheId] || (this.cache[cacheId] = this.generateNewIdenticon(address, diameter))
// create a clean copy so you can modify it
var cleanCopy = identicon.cloneNode(true)
return cleanCopy
}
// creates a new identicon
IconFactory.prototype.generateNewIdenticon = function (address, diameter) {
var numericRepresentation = jsNumberForAddress(address)
var identicon = this.jazzicon(diameter, numericRepresentation)
return identicon
}
// util
function jsNumberForAddress (address) {
var addr = address.slice(2, 10)
var seed = parseInt(addr, 16)
return seed
}
function toDataUri (identiconSrc) {
return 'data:image/svg+xml;charset=utf-8,' + encodeURIComponent(identiconSrc)
}
|
/*
* @Description:
* @Author: yamanashi12
* @Date: 2019-05-22 09:31:50
* @LastEditTime: 2020-03-24 10:17:49
* @LastEditors: Please set LastEditors
*/
export default [
{
path: '/page/manage',
name: 'pageManage',
meta: {
title: '页面管理'
},
component: () => import(/* webpackChunkName: "document-page-manage" */ '@/views/pageManage')
},
{
path: '/page/count',
name: 'pageCount',
meta: {
title: '页面管理'
},
component: () => import(/* webpackChunkName: "document-page-Count" */ '@/views/pageCount/index')
}
]
|
// db.js
var DB;
(function() {
if (DB === undefined) {
DB = function( mongo , name ){
this._mongo = mongo;
this._name = name;
}
}
DB.prototype.getMongo = function(){
assert( this._mongo , "why no mongo!" );
return this._mongo;
}
DB.prototype.getSiblingDB = function( name ){
return this.getMongo().getDB( name );
}
DB.prototype.getSisterDB = DB.prototype.getSiblingDB;
DB.prototype.getName = function(){
return this._name;
}
DB.prototype.stats = function(scale){
return this.runCommand( { dbstats : 1 , scale : scale } );
}
DB.prototype.getCollection = function( name ){
return new DBCollection( this._mongo , this , name , this._name + "." + name );
}
DB.prototype.commandHelp = function( name ){
var c = {};
c[name] = 1;
c.help = true;
var res = this.runCommand( c );
if ( ! res.ok )
throw Error(res.errmsg);
return res.help;
}
DB.prototype.runCommand = function( obj, extra ){
if ( typeof( obj ) == "string" ){
var n = {};
n[obj] = 1;
obj = n;
if ( extra && typeof( extra ) == "object" ) {
for ( var x in extra ) {
n[x] = extra[x];
}
}
}
return this.getCollection( "$cmd" ).findOne( obj );
}
DB.prototype._dbCommand = DB.prototype.runCommand;
DB.prototype.adminCommand = function( obj ){
if ( this._name == "admin" )
return this.runCommand( obj );
return this.getSiblingDB( "admin" ).runCommand( obj );
}
DB.prototype._adminCommand = DB.prototype.adminCommand; // alias old name
/**
Create a new collection in the database. Normally, collection creation is automatic. You would
use this function if you wish to specify special options on creation.
If the collection already exists, no action occurs.
<p>Options:</p>
<ul>
<li>
size: desired initial extent size for the collection. Must be <= 1000000000.
for fixed size (capped) collections, this size is the total/max size of the
collection.
</li>
<li>
capped: if true, this is a capped collection (where old data rolls out).
</li>
<li> max: maximum number of objects if capped (optional).</li>
<li> usePowerOf2Sizes: if true, set usePowerOf2Sizes allocation for the collection.</li>
<li>
storageEngine: BSON document containing storage engine specific options. Format:
{
storageEngine: {
storageEngine1: {
...
},
storageEngine2: {
...
},
...
}
}
</li>
</ul>
<p>Example:</p>
<code>db.createCollection("movies", { size: 10 * 1024 * 1024, capped:true } );</code>
* @param {String} name Name of new collection to create
* @param {Object} options Object with options for call. Options are listed above.
* @return SOMETHING_FIXME
*/
DB.prototype.createCollection = function(name, opt) {
var options = opt || {};
var cmd = { create: name };
if (options.max != undefined)
cmd.max = options.max;
if (options.autoIndexId != undefined)
cmd.autoIndexId = options.autoIndexId;
if (options.capped != undefined)
cmd.capped = options.capped;
if (options.size != undefined)
cmd.size = options.size;
if (options.usePowerOf2Sizes != undefined)
cmd.flags = options.usePowerOf2Sizes ? 1 : 0;
if (options.storageEngine != undefined)
cmd.storageEngine = options.storageEngine;
var res = this._dbCommand(cmd);
return res;
}
/**
* @deprecated use getProfilingStatus
* Returns the current profiling level of this database
* @return SOMETHING_FIXME or null on error
*/
DB.prototype.getProfilingLevel = function() {
var res = this._dbCommand( { profile: -1 } );
return res ? res.was : null;
}
/**
* @return the current profiling status
* example { was : 0, slowms : 100 }
* @return SOMETHING_FIXME or null on error
*/
DB.prototype.getProfilingStatus = function() {
var res = this._dbCommand( { profile: -1 } );
if ( ! res.ok )
throw Error( "profile command failed: " + tojson( res ) );
delete res.ok
return res;
}
/**
Erase the entire database. (!)
* @return Object returned has member ok set to true if operation succeeds, false otherwise.
*/
DB.prototype.dropDatabase = function() {
if ( arguments.length )
throw Error("dropDatabase doesn't take arguments");
return this._dbCommand( { dropDatabase: 1 } );
}
/**
* Shuts down the database. Must be run while using the admin database.
* @param opts Options for shutdown. Possible options are:
* - force: (boolean) if the server should shut down, even if there is no
* up-to-date slave
* - timeoutSecs: (number) the server will continue checking over timeoutSecs
* if any other servers have caught up enough for it to shut down.
*/
DB.prototype.shutdownServer = function(opts) {
if( "admin" != this._name ){
return "shutdown command only works with the admin database; try 'use admin'";
}
cmd = {"shutdown" : 1};
opts = opts || {};
for (var o in opts) {
cmd[o] = opts[o];
}
try {
var res = this.runCommand(cmd);
if( res )
throw Error( "shutdownServer failed: " + res.errmsg );
throw Error( "shutdownServer failed" );
}
catch ( e ){
assert( tojson( e ).indexOf( "error doing query: failed" ) >= 0 , "unexpected error: " + tojson( e ) );
print( "server should be down..." );
}
}
/**
Clone database on another server to here.
<p>
Generally, you should dropDatabase() first as otherwise the cloned information will MERGE
into whatever data is already present in this database. (That is however a valid way to use
clone if you are trying to do something intentionally, such as union three non-overlapping
databases into one.)
<p>
This is a low level administrative function will is not typically used.
* @param {String} from Where to clone from (dbhostname[:port]). May not be this database
(self) as you cannot clone to yourself.
* @return Object returned has member ok set to true if operation succeeds, false otherwise.
* See also: db.copyDatabase()
*/
DB.prototype.cloneDatabase = function(from) {
assert( isString(from) && from.length );
return this._dbCommand( { clone: from } );
}
/**
Clone collection on another server to here.
<p>
Generally, you should drop() first as otherwise the cloned information will MERGE
into whatever data is already present in this collection. (That is however a valid way to use
clone if you are trying to do something intentionally, such as union three non-overlapping
collections into one.)
<p>
This is a low level administrative function is not typically used.
* @param {String} from mongod instance from which to clnoe (dbhostname:port). May
not be this mongod instance, as clone from self is not allowed.
* @param {String} collection name of collection to clone.
* @param {Object} query query specifying which elements of collection are to be cloned.
* @return Object returned has member ok set to true if operation succeeds, false otherwise.
* See also: db.cloneDatabase()
*/
DB.prototype.cloneCollection = function(from, collection, query) {
assert( isString(from) && from.length );
assert( isString(collection) && collection.length );
collection = this._name + "." + collection;
query = query || {};
return this._dbCommand( { cloneCollection:collection, from:from, query:query } );
}
/**
Copy database from one server or name to another server or name.
Generally, you should dropDatabase() first as otherwise the copied information will MERGE
into whatever data is already present in this database (and you will get duplicate objects
in collections potentially.)
For security reasons this function only works when executed on the "admin" db. However,
if you have access to said db, you can copy any database from one place to another.
This method provides a way to "rename" a database by copying it to a new db name and
location. Additionally, it effectively provides a repair facility.
* @param {String} fromdb database name from which to copy.
* @param {String} todb database name to copy to.
* @param {String} fromhost hostname of the database (and optionally, ":port") from which to
copy the data. default if unspecified is to copy from self.
* @return Object returned has member ok set to true if operation succeeds, false otherwise.
* See also: db.clone()
*/
DB.prototype.copyDatabase = function(fromdb, todb, fromhost, username, password) {
assert( isString(fromdb) && fromdb.length );
assert( isString(todb) && todb.length );
fromhost = fromhost || "";
if ( username && password ) {
var n = this._adminCommand( { copydbgetnonce : 1, fromhost:fromhost } );
return this._adminCommand( { copydb:1, fromhost:fromhost, fromdb:fromdb, todb:todb, username:username, nonce:n.nonce, key:this.__pwHash( n.nonce, username, password ) } );
} else {
return this._adminCommand( { copydb:1, fromhost:fromhost, fromdb:fromdb, todb:todb } );
}
}
/**
Repair database.
* @return Object returned has member ok set to true if operation succeeds, false otherwise.
*/
DB.prototype.repairDatabase = function() {
return this._dbCommand( { repairDatabase: 1 } );
}
DB.prototype.help = function() {
print("DB methods:");
print("\tdb.adminCommand(nameOrDocument) - switches to 'admin' db, and runs command [ just calls db.runCommand(...) ]");
print("\tdb.auth(username, password)");
print("\tdb.cloneDatabase(fromhost)");
print("\tdb.commandHelp(name) returns the help for the command");
print("\tdb.copyDatabase(fromdb, todb, fromhost)");
print("\tdb.createCollection(name, { size : ..., capped : ..., max : ... } )");
print("\tdb.createUser(userDocument)");
print("\tdb.currentOp() displays currently executing operations in the db");
print("\tdb.dropDatabase()");
print("\tdb.eval(func, args) run code server-side");
print("\tdb.fsyncLock() flush data to disk and lock server for backups");
print("\tdb.fsyncUnlock() unlocks server following a db.fsyncLock()");
print("\tdb.getCollection(cname) same as db['cname'] or db.cname");
print("\tdb.getCollectionNames()");
print("\tdb.getLastError() - just returns the err msg string");
print("\tdb.getLastErrorObj() - return full status object");
print("\tdb.getLogComponents()");
print("\tdb.getMongo() get the server connection object");
print("\tdb.getMongo().setSlaveOk() allow queries on a replication slave server");
print("\tdb.getName()");
print("\tdb.getPrevError()");
print("\tdb.getProfilingLevel() - deprecated");
print("\tdb.getProfilingStatus() - returns if profiling is on and slow threshold");
print("\tdb.getReplicationInfo()");
print("\tdb.getSiblingDB(name) get the db at the same server as this one");
print("\tdb.getWriteConcern() - returns the write concern used for any operations on this db, inherited from server object if set");
print("\tdb.hostInfo() get details about the server's host");
print("\tdb.isMaster() check replica primary status");
print("\tdb.killOp(opid) kills the current operation in the db");
print("\tdb.listCommands() lists all the db commands");
print("\tdb.loadServerScripts() loads all the scripts in db.system.js");
print("\tdb.logout()");
print("\tdb.printCollectionStats()");
print("\tdb.printReplicationInfo()");
print("\tdb.printShardingStatus()");
print("\tdb.printSlaveReplicationInfo()");
print("\tdb.dropUser(username)");
print("\tdb.repairDatabase()");
print("\tdb.resetError()");
print("\tdb.runCommand(cmdObj) run a database command. if cmdObj is a string, turns it into { cmdObj : 1 }");
print("\tdb.serverStatus()");
print("\tdb.setLogLevel(level,<component>)");
print("\tdb.setProfilingLevel(level,<slowms>) 0=off 1=slow 2=all");
print("\tdb.setWriteConcern( <write concern doc> ) - sets the write concern for writes to the db");
print("\tdb.unsetWriteConcern( <write concern doc> ) - unsets the write concern for writes to the db");
print("\tdb.setVerboseShell(flag) display extra information in shell output");
print("\tdb.shutdownServer()");
print("\tdb.stats()");
print("\tdb.version() current version of the server");
return __magicNoPrint;
}
DB.prototype.printCollectionStats = function(scale) {
if (arguments.length > 1) {
print("printCollectionStats() has a single optional argument (scale)");
return;
}
if (typeof scale != 'undefined') {
if(typeof scale != 'number') {
print("scale has to be a number >= 1");
return;
}
if (scale < 1) {
print("scale has to be >= 1");
return;
}
}
var mydb = this;
this.getCollectionNames().forEach(
function(z) {
print( z );
printjson( mydb.getCollection(z).stats(scale) );
print( "---" );
}
);
}
/**
* <p> Set profiling level for your db. Profiling gathers stats on query performance. </p>
*
* <p>Default is off, and resets to off on a database restart -- so if you want it on,
* turn it on periodically. </p>
*
* <p>Levels :</p>
* <ul>
* <li>0=off</li>
* <li>1=log very slow operations; optional argument slowms specifies slowness threshold</li>
* <li>2=log all</li>
* @param {String} level Desired level of profiling
* @param {String} slowms For slow logging, query duration that counts as slow (default 100ms)
* @return SOMETHING_FIXME or null on error
*/
DB.prototype.setProfilingLevel = function(level,slowms) {
if (level < 0 || level > 2) {
var errorText = "input level " + level + " is out of range [0..2]";
var errorObject = new Error(errorText);
errorObject['dbSetProfilingException'] = errorText;
throw errorObject;
}
var cmd = { profile: level };
if ( isNumber( slowms ) )
cmd["slowms"] = slowms;
return this._dbCommand( cmd );
}
/**
* <p> Evaluate a js expression at the database server.</p>
*
* <p>Useful if you need to touch a lot of data lightly; in such a scenario
* the network transfer of the data could be a bottleneck. A good example
* is "select count(*)" -- can be done server side via this mechanism.
* </p>
*
* <p>
* If the eval fails, an exception is thrown of the form:
* </p>
* <code>{ dbEvalException: { retval: functionReturnValue, ok: num [, errno: num] [, errmsg: str] } }</code>
*
* <p>Example: </p>
* <code>print( "mycount: " + db.eval( function(){db.mycoll.find({},{_id:ObjId()}).length();} );</code>
*
* @param {Function} jsfunction Javascript function to run on server. Note this it not a closure, but rather just "code".
* @return result of your function, or null if error
*
*/
DB.prototype.eval = function(jsfunction) {
var cmd = { $eval : jsfunction };
if ( arguments.length > 1 ) {
cmd.args = argumentsToArray( arguments ).slice(1);
}
var res = this._dbCommand( cmd );
if (!res.ok)
throw Error( tojson( res ) );
return res.retval;
}
DB.prototype.dbEval = DB.prototype.eval;
/**
*
* <p>
* Similar to SQL group by. For example: </p>
*
* <code>select a,b,sum(c) csum from coll where active=1 group by a,b</code>
*
* <p>
* corresponds to the following in 10gen:
* </p>
*
* <code>
db.group(
{
ns: "coll",
key: { a:true, b:true },
// keyf: ...,
cond: { active:1 },
reduce: function(obj,prev) { prev.csum += obj.c; },
initial: { csum: 0 }
});
</code>
*
*
* <p>
* An array of grouped items is returned. The array must fit in RAM, thus this function is not
* suitable when the return set is extremely large.
* </p>
* <p>
* To order the grouped data, simply sort it client side upon return.
* <p>
Defaults
cond may be null if you want to run against all rows in the collection
keyf is a function which takes an object and returns the desired key. set either key or keyf (not both).
* </p>
*/
DB.prototype.groupeval = function(parmsObj) {
var groupFunction = function() {
var parms = args[0];
var c = db[parms.ns].find(parms.cond||{});
var map = new Map();
var pks = parms.key ? Object.keySet( parms.key ) : null;
var pkl = pks ? pks.length : 0;
var key = {};
while( c.hasNext() ) {
var obj = c.next();
if ( pks ) {
for ( var i=0; i<pkl; i++ ) {
var k = pks[i];
key[k] = obj[k];
}
}
else {
key = parms.$keyf(obj);
}
var aggObj = map.get(key);
if( aggObj == null ) {
var newObj = Object.extend({}, key); // clone
aggObj = Object.extend(newObj, parms.initial);
map.put( key, aggObj );
}
parms.$reduce(obj, aggObj);
}
return map.values();
}
return this.eval(groupFunction, this._groupFixParms( parmsObj ));
}
DB.prototype.groupcmd = function( parmsObj ){
var ret = this.runCommand( { "group" : this._groupFixParms( parmsObj ) } );
if ( ! ret.ok ){
throw Error( "group command failed: " + tojson( ret ) );
}
return ret.retval;
}
DB.prototype.group = DB.prototype.groupcmd;
DB.prototype._groupFixParms = function( parmsObj ){
var parms = Object.extend({}, parmsObj);
if( parms.reduce ) {
parms.$reduce = parms.reduce; // must have $ to pass to db
delete parms.reduce;
}
if( parms.keyf ) {
parms.$keyf = parms.keyf;
delete parms.keyf;
}
return parms;
}
DB.prototype.resetError = function(){
return this.runCommand( { reseterror : 1 } );
}
DB.prototype.forceError = function(){
return this.runCommand( { forceerror : 1 } );
}
DB.prototype.getLastError = function( w , wtimeout ){
var res = this.getLastErrorObj( w , wtimeout );
if ( ! res.ok )
throw Error( "getlasterror failed: " + tojson( res ) );
return res.err;
}
DB.prototype.getLastErrorObj = function( w , wtimeout ){
var cmd = { getlasterror : 1 };
if ( w ){
cmd.w = w;
if ( wtimeout )
cmd.wtimeout = wtimeout;
}
var res = this.runCommand( cmd );
if ( ! res.ok )
throw Error( "getlasterror failed: " + tojson( res ) );
return res;
}
DB.prototype.getLastErrorCmd = DB.prototype.getLastErrorObj;
/* Return the last error which has occurred, even if not the very last error.
Returns:
{ err : <error message>, nPrev : <how_many_ops_back_occurred>, ok : 1 }
result.err will be null if no error has occurred.
*/
DB.prototype.getPrevError = function(){
return this.runCommand( { getpreverror : 1 } );
}
DB.prototype._getCollectionNamesSystemNamespaces = function(){
var all = [];
var nsLength = this._name.length + 1;
var c = this.getCollection( "system.namespaces" ).find();
while ( c.hasNext() ){
var name = c.next().name;
if ( name.indexOf( "$" ) >= 0 && name.indexOf( ".oplog.$" ) < 0 )
continue;
all.push( name.substring( nsLength ) );
}
return all.sort();
}
DB.prototype._getCollectionNamesCommand = function() {
var res = this.runCommand( "listCollections" );
if ( res.code == 59 ) {
// command doesn't exist, old mongod
return null;
}
if ( !res.ok ) {
if ( res.errmsg && res.errmsg.startsWith( "no such cmd" ) ) {
return null;
}
throw Error( "listCollections failed: " + tojson( res ) );
}
var all = [];
for ( var i = 0; i < res.collections.length; i++ ) {
var name = res.collections[i].name;
all.push( name );
}
return all;
}
DB.prototype.getCollectionNames = function(){
var res = this._getCollectionNamesCommand();
if ( res ) {
return res;
}
return this._getCollectionNamesSystemNamespaces();
}
DB.prototype.tojson = function(){
return this._name;
}
DB.prototype.toString = function(){
return this._name;
}
DB.prototype.isMaster = function () { return this.runCommand("isMaster"); }
DB.prototype.currentOp = function( arg ){
var q = {}
if ( arg ) {
if ( typeof( arg ) == "object" )
Object.extend( q , arg );
else if ( arg )
q["$all"] = true;
}
return this.$cmd.sys.inprog.findOne( q );
}
DB.prototype.currentOP = DB.prototype.currentOp;
DB.prototype.killOp = function(op) {
if( !op )
throw Error("no opNum to kill specified");
return this.$cmd.sys.killop.findOne({'op':op});
}
DB.prototype.killOP = DB.prototype.killOp;
DB.tsToSeconds = function(x){
if ( x.t && x.i )
return x.t;
return x / 4294967296; // low 32 bits are ordinal #s within a second
}
/**
Get a replication log information summary.
<p>
This command is for the database/cloud administer and not applicable to most databases.
It is only used with the local database. One might invoke from the JS shell:
<pre>
use local
db.getReplicationInfo();
</pre>
It is assumed that this database is a replication master -- the information returned is
about the operation log stored at local.oplog.$main on the replication master. (It also
works on a machine in a replica pair: for replica pairs, both machines are "masters" from
an internal database perspective.
<p>
* @return Object timeSpan: time span of the oplog from start to end if slave is more out
* of date than that, it can't recover without a complete resync
*/
DB.prototype.getReplicationInfo = function() {
var localdb = this.getSiblingDB("local");
var result = { };
var oplog;
if (localdb.system.namespaces.findOne({name:"local.oplog.rs"}) != null) {
oplog = 'oplog.rs';
}
else if (localdb.system.namespaces.findOne({name:"local.oplog.$main"}) != null) {
oplog = 'oplog.$main';
}
else {
result.errmsg = "neither master/slave nor replica set replication detected";
return result;
}
var ol_entry = localdb.system.namespaces.findOne({name:"local."+oplog});
if( ol_entry && ol_entry.options ) {
result.logSizeMB = ol_entry.options.size / ( 1024 * 1024 );
} else {
result.errmsg = "local."+oplog+", or its options, not found in system.namespaces collection";
return result;
}
ol = localdb.getCollection(oplog);
result.usedMB = ol.stats().size / ( 1024 * 1024 );
result.usedMB = Math.ceil( result.usedMB * 100 ) / 100;
var firstc = ol.find().sort({$natural:1}).limit(1);
var lastc = ol.find().sort({$natural:-1}).limit(1);
if( !firstc.hasNext() || !lastc.hasNext() ) {
result.errmsg = "objects not found in local.oplog.$main -- is this a new and empty db instance?";
result.oplogMainRowCount = ol.count();
return result;
}
var first = firstc.next();
var last = lastc.next();
var tfirst = first.ts;
var tlast = last.ts;
if( tfirst && tlast ) {
tfirst = DB.tsToSeconds( tfirst );
tlast = DB.tsToSeconds( tlast );
result.timeDiff = tlast - tfirst;
result.timeDiffHours = Math.round(result.timeDiff / 36)/100;
result.tFirst = (new Date(tfirst*1000)).toString();
result.tLast = (new Date(tlast*1000)).toString();
result.now = Date();
}
else {
result.errmsg = "ts element not found in oplog objects";
}
return result;
};
DB.prototype.printReplicationInfo = function() {
var result = this.getReplicationInfo();
if( result.errmsg ) {
if (!this.isMaster().ismaster) {
print("this is a slave, printing slave replication info.");
this.printSlaveReplicationInfo();
return;
}
print(tojson(result));
return;
}
print("configured oplog size: " + result.logSizeMB + "MB");
print("log length start to end: " + result.timeDiff + "secs (" + result.timeDiffHours + "hrs)");
print("oplog first event time: " + result.tFirst);
print("oplog last event time: " + result.tLast);
print("now: " + result.now);
}
DB.prototype.printSlaveReplicationInfo = function() {
var startOptimeDate = null;
function getReplLag(st) {
assert( startOptimeDate , "how could this be null (getReplLag startOptimeDate)" );
print("\tsyncedTo: " + st.toString() );
var ago = (startOptimeDate-st)/1000;
var hrs = Math.round(ago/36)/100;
print("\t" + Math.round(ago) + " secs (" + hrs + " hrs) behind the primary ");
};
function getMaster(members) {
var found;
members.forEach(function(row) {
if (row.self) {
found = row;
return false;
}
});
if (found) {
return found;
}
};
function g(x) {
assert( x , "how could this be null (printSlaveReplicationInfo gx)" )
print("source: " + x.host);
if ( x.syncedTo ){
var st = new Date( DB.tsToSeconds( x.syncedTo ) * 1000 );
getReplLag(st);
}
else {
print( "\tdoing initial sync" );
}
};
function r(x) {
assert( x , "how could this be null (printSlaveReplicationInfo rx)" );
if ( x.state == 1 || x.state == 7 ) { // ignore primaries (1) and arbiters (7)
return;
}
print("source: " + x.name);
if ( x.optime ) {
getReplLag(x.optimeDate);
}
else {
print( "\tno replication info, yet. State: " + x.stateStr );
}
};
var L = this.getSiblingDB("local");
if (L.system.replset.count() != 0) {
var status = this.adminCommand({'replSetGetStatus' : 1});
startOptimeDate = getMaster(status.members).optimeDate;
status.members.forEach(r);
}
else if( L.sources.count() != 0 ) {
startOptimeDate = new Date();
L.sources.find().forEach(g);
}
else {
print("local.sources is empty; is this db a --slave?");
return;
}
}
DB.prototype.serverBuildInfo = function(){
return this._adminCommand( "buildinfo" );
}
DB.prototype.serverStatus = function( options ){
var cmd = { serverStatus : 1 };
if ( options ) {
Object.extend( cmd, options );
}
return this._adminCommand( cmd );
}
DB.prototype.hostInfo = function(){
return this._adminCommand( "hostInfo" );
}
DB.prototype.serverCmdLineOpts = function(){
return this._adminCommand( "getCmdLineOpts" );
}
DB.prototype.version = function(){
return this.serverBuildInfo().version;
}
DB.prototype.serverBits = function(){
return this.serverBuildInfo().bits;
}
DB.prototype.listCommands = function(){
var x = this.runCommand( "listCommands" );
for ( var name in x.commands ){
var c = x.commands[name];
var s = name + ": ";
if (c.adminOnly) s += " adminOnly ";
if (c.adminOnly) s += " slaveOk ";
s += "\n ";
s += c.help.replace(/\n/g, '\n ');
s += "\n";
print( s );
}
}
DB.prototype.printShardingStatus = function( verbose ){
printShardingStatus( this.getSiblingDB( "config" ) , verbose );
}
DB.prototype.fsyncLock = function() {
return this.adminCommand({fsync:1, lock:true});
}
DB.prototype.fsyncUnlock = function() {
return this.getSiblingDB("admin").$cmd.sys.unlock.findOne()
}
DB.autocomplete = function(obj){
var colls = obj.getCollectionNames();
var ret=[];
for (var i=0; i<colls.length; i++){
if (colls[i].match(/^[a-zA-Z0-9_.\$]+$/))
ret.push(colls[i]);
}
return ret;
}
DB.prototype.setSlaveOk = function( value ) {
if( value == undefined ) value = true;
this._slaveOk = value;
}
DB.prototype.getSlaveOk = function() {
if (this._slaveOk != undefined) return this._slaveOk;
return this._mongo.getSlaveOk();
}
/* Loads any scripts contained in system.js into the client shell.
*/
DB.prototype.loadServerScripts = function(){
this.system.js.find().forEach(function(u){eval(u._id + " = " + u.value);});
}
////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////// Security shell helpers below //////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
var _defaultWriteConcern = { w: 'majority', wtimeout: 30 * 1000 }
function getUserObjString(userObj) {
var pwd = userObj.pwd;
delete userObj.pwd;
var toreturn = tojson(userObj);
userObj.pwd = pwd;
return toreturn;
}
DB.prototype._modifyCommandToDigestPasswordIfNecessary = function(cmdObj, username) {
if (!cmdObj["pwd"]) {
return;
}
if (cmdObj.hasOwnProperty("digestPassword")) {
throw Error("Cannot specify 'digestPassword' through the user management shell helpers, " +
"use 'passwordDigestor' instead");
}
var passwordDigestor = cmdObj["passwordDigestor"] ? cmdObj["passwordDigestor"] : "client";
if (passwordDigestor == "server") {
cmdObj["digestPassword"] = true;
} else if (passwordDigestor == "client") {
cmdObj["pwd"] = _hashPassword(username, cmdObj["pwd"]);
cmdObj["digestPassword"] = false;
} else {
throw Error("'passwordDigestor' must be either 'server' or 'client', got: '" +
passwordDigestor + "'");
}
delete cmdObj["passwordDigestor"];
}
DB.prototype.createUser = function(userObj, writeConcern) {
var name = userObj["user"];
var cmdObj = {createUser:name};
cmdObj = Object.extend(cmdObj, userObj);
delete cmdObj["user"];
this._modifyCommandToDigestPasswordIfNecessary(cmdObj, name);
cmdObj["writeConcern"] = writeConcern ? writeConcern : _defaultWriteConcern;
var res = this.runCommand(cmdObj);
if (res.ok) {
print("Successfully added user: " + getUserObjString(userObj));
return;
}
if (res.errmsg == "no such cmd: createUser") {
throw Error("'createUser' command not found. This is most likely because you are " +
"talking to an old (pre v2.6) MongoDB server");
}
if (res.errmsg == "timeout") {
throw Error("timed out while waiting for user authentication to replicate - " +
"database will not be fully secured until replication finishes");
}
throw Error("couldn't add user: " + res.errmsg);
}
function _hashPassword(username, password) {
if (typeof password != 'string') {
throw Error("User passwords must be of type string. Was given password with type: " +
typeof(password));
}
return hex_md5(username + ":mongo:" + password);
}
/**
* Used for updating users in systems with V1 style user information
* (ie MongoDB v2.4 and prior)
*/
DB.prototype._updateUserV1 = function(name, updateObject, writeConcern) {
var setObj = {};
if (updateObject.pwd) {
setObj["pwd"] = _hashPassword(name, updateObject.pwd);
}
if (updateObject.extraData) {
setObj["extraData"] = updateObject.extraData;
}
if (updateObject.roles) {
setObj["roles"] = updateObject.roles;
}
this.system.users.update({user : name, userSource : null},
{$set : setObj});
var err = this.getLastError(writeConcern['w'], writeConcern['wtimeout']);
if (err) {
throw Error("Updating user failed: " + err);
}
};
DB.prototype.updateUser = function(name, updateObject, writeConcern) {
var cmdObj = {updateUser:name};
cmdObj = Object.extend(cmdObj, updateObject);
cmdObj['writeConcern'] = writeConcern ? writeConcern : _defaultWriteConcern;
this._modifyCommandToDigestPasswordIfNecessary(cmdObj, name);
var res = this.runCommand(cmdObj);
if (res.ok) {
return;
}
if (res.errmsg == "no such cmd: updateUser") {
this._updateUserV1(name, updateObject, cmdObj['writeConcern']);
return;
}
throw Error("Updating user failed: " + res.errmsg);
};
DB.prototype.changeUserPassword = function(username, password, writeConcern) {
this.updateUser(username, {pwd:password}, writeConcern);
};
DB.prototype.logout = function(){
return this.getMongo().logout(this.getName());
};
// For backwards compatibility
DB.prototype.removeUser = function( username, writeConcern ) {
print("WARNING: db.removeUser has been deprecated, please use db.dropUser instead");
return this.dropUser(username, writeConcern);
}
DB.prototype.dropUser = function( username, writeConcern ){
var cmdObj = {dropUser: username,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (res.ok) {
return true;
}
if (res.code == 11) { // Code 11 = UserNotFound
return false;
}
if (res.errmsg == "no such cmd: dropUsers") {
return this._removeUserV1(username, cmdObj['writeConcern']);
}
throw Error(res.errmsg);
}
/**
* Used for removing users in systems with V1 style user information
* (ie MongoDB v2.4 and prior)
*/
DB.prototype._removeUserV1 = function(username, writeConcern) {
this.getCollection( "system.users" ).remove( { user : username } );
var le = this.getLastErrorObj(writeConcern['w'], writeConcern['wtimeout']);
if (le.err) {
throw Error( "Couldn't remove user: " + le.err );
}
if (le.n == 1) {
return true;
} else {
return false;
}
}
DB.prototype.dropAllUsers = function(writeConcern) {
var res = this.runCommand({dropAllUsersFromDatabase:1,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern});
if (!res.ok) {
throw Error(res.errmsg);
}
return res.n;
}
DB.prototype.__pwHash = function( nonce, username, pass ) {
return hex_md5(nonce + username + _hashPassword(username, pass));
}
DB.prototype._defaultAuthenticationMechanism = null;
DB.prototype._getDefaultAuthenticationMechanism = function() {
// Use the default auth mechanism if set on the command line.
if (this._defaultAuthenticationMechanism != null)
return this._defaultAuthenticationMechanism;
// Use MONGODB-CR for v2.6 and earlier.
if (this.isMaster().maxWireVersion < 3) {
return "MONGODB-CR";
}
return "SCRAM-SHA-1";
}
DB.prototype._defaultGssapiServiceName = null;
DB.prototype._authOrThrow = function () {
var params;
if (arguments.length == 2) {
params = { user: arguments[0], pwd: arguments[1] };
}
else if (arguments.length == 1) {
if (typeof(arguments[0]) != "object")
throw Error("Single-argument form of auth expects a parameter object");
params = Object.extend({}, arguments[0]);
}
else {
throw Error(
"auth expects either (username, password) or ({ user: username, pwd: password })");
}
if (params.mechanism === undefined)
params.mechanism = this._getDefaultAuthenticationMechanism();
if (params.db !== undefined) {
throw Error("Do not override db field on db.auth(). Use getMongo().auth(), instead.");
}
if (params.mechanism == "GSSAPI" &&
params.serviceName == null &&
this._defaultGssapiServiceName != null) {
params.serviceName = this._defaultGssapiServiceName;
}
params.db = this.getName();
var good = this.getMongo().auth(params);
if (good) {
// auth enabled, and should try to use isMaster and replSetGetStatus to build prompt
this.getMongo().authStatus = {authRequired:true, isMaster:true, replSetGetStatus:true};
}
return good;
}
DB.prototype.auth = function() {
var ex;
try {
this._authOrThrow.apply(this, arguments);
} catch (ex) {
print(ex);
return 0;
}
return 1;
}
DB.prototype.grantRolesToUser = function(username, roles, writeConcern) {
var cmdObj = {grantRolesToUser: username,
roles: roles,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
}
DB.prototype.revokeRolesFromUser = function(username, roles, writeConcern) {
var cmdObj = {revokeRolesFromUser: username,
roles: roles,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
}
DB.prototype.getUser = function(username, args) {
if (typeof username != "string") {
throw Error("User name for getUser shell helper must be a string");
}
var cmdObj = {usersInfo: username};
Object.extend(cmdObj, args);
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
if (res.users.length == 0) {
return null;
}
return res.users[0];
}
DB.prototype.getUsers = function(args) {
var cmdObj = {usersInfo: 1};
Object.extend(cmdObj, args);
var res = this.runCommand(cmdObj);
if (!res.ok) {
var authSchemaIncompatibleCode = 69;
if (res.code == authSchemaIncompatibleCode ||
(res.code == null && res.errmsg == "no such cmd: usersInfo")) {
// Working with 2.4 schema user data
return this.system.users.find({}).toArray();
}
throw Error(res.errmsg);
}
return res.users;
}
DB.prototype.createRole = function(roleObj, writeConcern) {
var name = roleObj["role"];
var cmdObj = {createRole:name};
cmdObj = Object.extend(cmdObj, roleObj);
delete cmdObj["role"];
cmdObj["writeConcern"] = writeConcern ? writeConcern : _defaultWriteConcern;
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
printjson(roleObj);
}
DB.prototype.updateRole = function(name, updateObject, writeConcern) {
var cmdObj = {updateRole:name};
cmdObj = Object.extend(cmdObj, updateObject);
cmdObj['writeConcern'] = writeConcern ? writeConcern : _defaultWriteConcern;
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
};
DB.prototype.dropRole = function(name, writeConcern) {
var cmdObj = {dropRole:name,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (res.ok) {
return true;
}
if (res.code == 31) { // Code 31 = RoleNotFound
return false;
}
throw Error(res.errmsg);
};
DB.prototype.dropAllRoles = function(writeConcern) {
var res = this.runCommand({dropAllRolesFromDatabase:1,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern});
if (!res.ok) {
throw Error(res.errmsg);
}
return res.n;
}
DB.prototype.grantRolesToRole = function(rolename, roles, writeConcern) {
var cmdObj = {grantRolesToRole: rolename,
roles: roles,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
}
DB.prototype.revokeRolesFromRole = function(rolename, roles, writeConcern) {
var cmdObj = {revokeRolesFromRole: rolename,
roles: roles,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
}
DB.prototype.grantPrivilegesToRole = function(rolename, privileges, writeConcern) {
var cmdObj = {grantPrivilegesToRole: rolename,
privileges: privileges,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
}
DB.prototype.revokePrivilegesFromRole = function(rolename, privileges, writeConcern) {
var cmdObj = {revokePrivilegesFromRole: rolename,
privileges: privileges,
writeConcern: writeConcern ? writeConcern : _defaultWriteConcern};
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
}
DB.prototype.getRole = function(rolename, args) {
if (typeof rolename != "string") {
throw Error("Role name for getRole shell helper must be a string");
}
var cmdObj = {rolesInfo:rolename};
Object.extend(cmdObj, args);
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
if (res.roles.length == 0) {
return null;
}
return res.roles[0];
}
DB.prototype.getRoles = function(args) {
var cmdObj = {rolesInfo:1};
Object.extend(cmdObj, args);
var res = this.runCommand(cmdObj);
if (!res.ok) {
throw Error(res.errmsg);
}
return res.roles;
}
DB.prototype.setWriteConcern = function( wc ) {
if ( wc instanceof WriteConcern ) {
this._writeConcern = wc;
}
else {
this._writeConcern = new WriteConcern( wc );
}
};
DB.prototype.getWriteConcern = function() {
if (this._writeConcern)
return this._writeConcern;
if (this._mongo.getWriteConcern())
return this._mongo.getWriteConcern();
return null;
};
DB.prototype.unsetWriteConcern = function() {
delete this._writeConcern;
};
DB.prototype.getLogComponents = function() {
return this.getMongo().getLogComponents();
}
DB.prototype.setLogLevel = function(logLevel, component) {
return this.getMongo().setLogLevel(logLevel, component);
}
}());
|
from rules import Rule
from scope import *
import math
import string
keywords = [
# C reserved keywords #
"AUTO",
"BREAK",
"CASE",
"CHAR",
"CONST",
"CONTINUE",
"DEFAULT",
"DO",
"DOUBLE",
"ELSE",
"ENUM",
"EXTERN",
"FLOAT",
"FOR",
"GOTO",
"IF",
"INT",
"LONG",
"REGISTER",
"RETURN",
"SHORT",
"SIGNED",
"SIZEOF",
"STATIC",
"STRUCT",
"SWITCH",
"TYPEDEF",
"UNION",
"UNSIGNED",
"VOID",
"VOLATILE",
"WHILE",
"IDENTIFIER"
]
assigns_or_eol = [
"RIGHT_ASSIGN",
"LEFT_ASSIGN",
"ADD_ASSIGN",
"SUB_ASSIGN",
"MUL_ASSIGN",
"DIV_ASSIGN",
"MOD_ASSIGN",
"AND_ASSIGN",
"XOR_ASSIGN",
"OR_ASSIGN",
"ASSIGN",
"SEMI_COLON",
"NEWLINE",
"COMMA"
]
class CheckVariableIndent(Rule):
def __init__(self):
super().__init__()
self.depends_on = ["IsVarDeclaration"]
def check_tabs(self, context):
i = 0
current_indent = context.scope.indent
type_identifier_nb = -1
has_tab = False
line_start = True
id_length = 0
buffer_len = 0
while context.check_token(i, assigns_or_eol) is False:
if context.check_token(i, keywords) is True:
type_identifier_nb += 1
if context.check_token(i, ["LPARENTHESIS", "LBRACE", "LBRACKET"]):
i = context.skip_nest(i)
i += 1
i = 0
while context.check_token(i, assigns_or_eol) is False:
if context.check_token(i, "LBRACKET") is True:
while context.check_token(i, "RBRACKET") is False:
if context.check_token(i, "IDENTIFIER") is True:
for c in context.peek_token(i).value:
if c in string.ascii_lowercase:
context.new_error("VLA_FORBIDDEN", context.peek_token(i))
continue
return True, i
i += 1
if context.check_token(i, keywords) is True and type_identifier_nb > 0:
line_start = False
type_identifier_nb -= 1
if context.peek_token(i).length == 0:
id_length = len(str(context.peek_token(i))) - 2
else:
id_length = context.peek_token(i).length
current_indent += math.floor((id_length + buffer_len) / 4)
buffer_len = 0
elif context.check_token(i, "SPACE") is True and type_identifier_nb > 0:
buffer_len += 1
elif context.check_token(i, "SPACE") is True and type_identifier_nb == 0:
context.new_error("SPACE_REPLACE_TAB", context.peek_token(i))
return True, i
elif context.check_token(i, "TAB") is True and type_identifier_nb == 0:
has_tab += 1
current_indent += 1
type_identifier_nb -= 1
elif context.check_token(i, "TAB") and type_identifier_nb > 0 and \
line_start == False:
context.new_error("TAB_REPLACE_SPACE", context.peek_token(i))
i += 1
return False, 0
def run(self, context):
"""
Each variable must be indented at the same level for its scope
"""
i = 0
identifier = None
self.check_tabs(context)
while context.peek_token(i) and context.check_token(i, ["SEMI_COLON", "COMMA", "ASSIGN"]) is False:
if context.check_token(i, ["LBRACKET", "LBRACE"]) is True:
i = context.skip_nest(i)
if context.check_token(i, "IDENTIFIER") is True:
ident = (context.peek_token(i), i)
i += 1
i = ident[1]
identifier = ident[0]
if context.check_token(i - 1, ["MULT", "BWISE_AND", "LPARENTHESIS"]) is True:
i -= 1
while context.check_token(i - 1, ["MULT", "BWISE_AND", "LPARENTHESIS"]) is True \
and context.is_operator(i) is False:
i -= 1
identifier = context.peek_token(i)
if context.scope.vars_alignment == 0:
context.scope.vars_alignment = identifier.pos[1]
elif context.scope.vars_alignment != identifier.pos[1]:
context.new_error("MISALIGNED_VAR_DECL", context.peek_token(i))
return True, i
return False, 0
|
from .handlers import uri_rewrites
|
from sklearn.naive_bayes import MultinomialNB
from sklearn.utils.class_weight import compute_sample_weight
from sklearn.pipeline import Pipeline
from sklearn import metrics
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
import os
file_path = os.path.dirname(os.path.abspath(__file__))
import sys
sys.path.insert(0, '../../')
sys.path.insert(0, '../../pyfunctor')
import csv_handler as csv_handler
import transform as transformer
from sklearn import metrics
import time
import xgboost as xgb
from util.weight import WeightClassCSV
start_time = time.time()
train_set = csv_handler.csv_readlines(sys.argv[1])
dev_set = csv_handler.csv_readlines(sys.argv[2])
log_file_path = sys.argv[3]
seed = int(sys.argv[4])
label_weight = WeightClassCSV(sys.argv[1]).get_weights(['0', '1'])
def sep(dataset):
sents = transformer.map_func(dataset, lambda triplet: triplet[1])
labels = transformer.map_func(dataset, lambda triplet: (int)(triplet[2]))
return (sents, labels)
(X_train, y_train) = sep(train_set)
(X_dev, y_dev) = sep(dev_set)
xgb_clf = Pipeline([('vect',
#CountVectorizer(ngram_range=(1, 2))),
CountVectorizer(ngram_range=(1, 2), analyzer='word', tokenizer = lambda doc : doc.split(), token_pattern=r"*")),
('tfidf', TfidfTransformer()),
#('xgboost', xgb.XGBClassifier(objective='binary:logistic', scale_pos_weight = label_weight[1], num_parallel_tree = 5)),
('xgboost', xgb.XGBClassifier(objective='binary:logistic', scale_pos_weight = label_weight[1], num_estimators = 5)),
])
xgb_clf.fit(X_train, y_train)
train_finish_time = time.time()
train_duration = train_finish_time - start_time
print("train time is " + str(train_finish_time - start_time))
predicted = xgb_clf.predict(X_dev)
test_duration = time.time() - train_finish_time
print("test time is " + str(time.time() - train_finish_time))
print(metrics.classification_report(y_dev, predicted))
# output metric: precision,recall,f1,train_time, test_time
(precision, recall, fscore, support) = metrics.precision_recall_fscore_support(y_dev, predicted)
print("F1 is " + str(fscore))
#csv_handler.append_row(log_file_path, ['dataset', 'precision', 'recall', 'fscore', 'train_time', 'test_time'])
row = []
row.append(sys.argv[1])
row.append(precision[1])
row.append(recall[1])
row.append(fscore[1])
row.append(train_duration)
row.append(test_duration)
csv_handler.append_row(log_file_path, row)
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var React = _interopRequireWildcard(require("react"));
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M24 15c0-2.64-2.05-4.78-4.65-4.96C18.67 6.59 15.64 4 12 4c-1.33 0-2.57.36-3.65.97l1.49 1.49C10.51 6.17 11.23 6 12 6c3.04 0 5.5 2.46 5.5 5.5v.5H19c1.66 0 3 1.34 3 3 0 .99-.48 1.85-1.21 2.4l1.41 1.41c1.09-.92 1.8-2.27 1.8-3.81zM4.41 3.86 3 5.27l2.77 2.77h-.42C2.34 8.36 0 10.91 0 14c0 3.31 2.69 6 6 6h11.73l2 2 1.41-1.41L4.41 3.86zM6 18c-2.21 0-4-1.79-4-4s1.79-4 4-4h1.73l8 8H6z"
}), 'CloudOffSharp');
exports.default = _default;
|
function checkTime(i) {
if (i < 10) {
i = "0" + i;
}
return i;
}
function deletes(e) {
dom_parent = $(e).parent().remove();
}
function show_depament(id, id_small) {
$('#scroll').html("");
var data = new FormData();
data.append('id', id);
data.append('id_small', id_small);
$.ajax({
type: "POST",
cache: false,
contentType: false,
processData: false,
// enctype: 'multipart/form-data',
url: '/company/Company_controller/list_department_by_id',
data: data,
dataType: "JSON",
async: false,
success: function(data) {
if (data.result == true) {
var html = ''
$('.l_remove_department').remove();
var a = '';
if (data.list.length > 0) {
for (let index = 0; index < data.list.length; index++) {
$('#append_department').append('<div class="col-md-4 col-sm-4 col-xs-12 d-input-radio-v1a l_remove_department ">' +
'<input type="checkbox" class="item_ca d-tao-lich2 l_curson" onchange = "show_staff(' + data.list[index].dep_id + ')" id ="' + data.list[index].dep_id + '" value="' + data.list[index].dep_id + '" name="department">' +
'<label for="' + data.list[index].dep_id + '" class="d-tao-lich2-v1 l_curson">' + data.list[index].dep_name + '</label>' +
'</div>');
}
} else {
$('#append_department').append('<div class="col-md-4 col-sm-4 col-xs-12 d-input-radio-v1a l_remove_department">Thêm phòng ban<a class="l_margin_link" href="https://chamcong.timviec365.vn/quan-ly-cong-ty/phong-ban.html"> Tại đây</a></div>');
}
if (data.list.length > 0) {
for (let i = 0; i < data.show_staff.length; i++) {
var name_dep = 'chưa xác định';
if (data.show_staff[i].dep_name != null) {
name_dep = data.show_staff[i].dep_name;
}
var html = '<div class="col-md-6 col-sm-6 col-xs-12 d-tao-lich-v3a1 remobe_staff">' +
'<div class="d-tao-lich-v3-img">' +
'<img src="' + data.show_staff[i].ep_image + '" onerror=' + 'this.onerror=null;this.src="/images_staff/avatar_default.png";' + 'alt="ten_nv" class="nv-tao-lich-img">' +
'</div>' +
'<label for="st' + data.show_staff[i].ep_id + '">' +
'<div class="d-ten-nv">' +
'<p class="d-cham-cong-p">(' + data.show_staff[i].ep_id + ') ' + data.show_staff[i].ep_name + '</p>' +
'<p class="d-cham-cong-p1">Nhân viên ' + name_dep + '</p>' +
'</div>' +
'</label>' +
'<div class="d-input-nv">' +
'<input type="checkbox" name="staff[]" class="item_cty d-tao-lich2 l_curson" id="st' + data.show_staff[i].ep_id + '" data-name="1" value="' + data.show_staff[i].ep_id + '">' +
'</div>' +
'</div>';
$('#scroll').append(html);
}
} else {
$('#scroll').append('<div class="col-md-4 col-sm-4 col-xs-12 d-input-radio-v1a remobe_staff">Thêm nhân viên<a class="l_margin_link" href="https://chamcong.timviec365.vn/quan-ly-cong-ty/nhan-vien.html"> Tại đây</a></div>');
}
} else {
return false;
}
}
});
}
function show_staff(a) {
var data = new FormData();
var arr_staff = [];
$(".item_ca").each(function() {
if ($(this).is(":checked")) {
arr_staff.push($(this).val());
}
});
data.append('id', arr_staff);
$.ajax({
type: "POST",
cache: false,
contentType: false,
processData: false,
// enctype: 'multipart/form-data',
url: '/company/Company_controller/show_staff_by_department',
data: data,
dataType: "JSON",
async: false,
success: function(data) {
if (data.result == true) {
// var html = ''
$('.remobe_staff').remove();
var dem = 0;
for (let index = 0; index < data.list.length; index++) {
var html = '<div class="col-md-6 col-sm-6 col-xs-12 d-tao-lich-v3a1 remobe_staff">' +
'<div class="d-tao-lich-v3-img">' +
'<img src="' + data.list[index].ep_image + '" onerror=' + 'this.onerror=null;this.src="/images_staff/avatar_default.png";' + 'alt="ten_nv" class="nv-tao-lich-img">' +
'</div>' +
'<label for="st' + data.list[index].ep_id + '">' +
'<div class="d-ten-nv">' +
'<p class="d-cham-cong-p">(' + data.list[index].ep_id + ') ' + data.list[index].ep_name + '</p>' +
'<p class="d-cham-cong-p1">Nhân viên ' + data.list[index].dep_name + '</p>' +
'</div>' +
'</label>' +
'<div class="d-input-nv">' +
'<input type="checkbox" name="staff[]" class="item_cty d-tao-lich2" id="st' + data.list[index].ep_id + '" data-name="1" value="' + data.list[index].ep_id + '">' +
'</div>' +
'</div>';
$('#scroll').append(html);
if (data.list.length == 0) {
dem++;
}
}
if (dem == data.list.length) {
$('#scroll').append('<div class="col-md-4 col-sm-4 col-xs-12 d-input-radio-v1a remobe_staff">Thêm nhân viên<a class="l_margin_link" href="/quan-ly-nhan-vien-cong-ty.html/1"> Tại đây</a></div>');
}
} else {
return false;
}
}
});
}
$(document).ready(function() {
$('.d-dropdown').hover(function() {
$(this).attr('src', "`+base_url+`/assets/images/them1.svg");
},
function() {
$(this).attr('src', '`+base_url+`/assets/images/them.svg');
});
var check_num = $('.d-dichuyen-input');
var diemdung = check_num.length;
$('#them_diem_dung').click(function() {
$('.l_remove').css('display', 'block');
html = `
<div class="d-dichuyen2">
<p class="d-dichuyen-p"><img src="/assets/images/dot_blue.svg" alt="dot" class="d-dichuyen-img "> <span class="d-dichuyen-sp">Đến điểm dừng:</span></p>
<input type="text" name="diemden[]" class="d-dichuyen-input" placeholder="Nhập điểm dừng">
<img src="/assets/images/Delete.svg" alt="xóa" class="d-delete-img l_remove" onClick="deletes(this)">
</div>`;
$(".d-them-diem-dung").before(html);
diemdung++;
});
$(".item_ca").on('click', function() {
if (!$(this).hasClass('active')) {
$(this).addClass('active').attr('data-chon', 1);
} else {
$(this).removeClass('active').attr('data-chon', 0);
}
});
$(".item_cty").on('click', function() {
if (!$(this).hasClass('active')) {
$(this).addClass('active').attr('data-nv', 1);
} else {
$(this).removeClass('active').attr('data-nv', 0);
}
});
$("#them_lich").submit(function(event) {
event.preventDefault();
var form_oke = true;
var arr_id_to_focus = [];
var tao_lich = $.trim($("#tao_lich").val());
var arr_staff = [];
$(".item_cty").each(function() {
if ($(this).is(":checked")) {
arr_staff.push($(this).val());
}
});
var date_start = $.trim($("#date_start").val());
var date_end = $.trim($("#date_end").val());
var ghi_chu = $.trim($("#ghi_chu").val());
var input_place = $('.d-dichuyen-input');
var place = '';
for (var i = 0; i < input_place.length; i++) {
if (input_place[i].value != '') {
place += input_place[i].value + ';';
}
}
var form_data = new FormData();
if (tao_lich == "") {
$("#err_lich").html("Bạn chưa nhập tên lịch trình");
arr_id_to_focus.push("#tao_lich");
form_oke = false;
} else {
$("#err_lich").html("");
form_data.append('tao_lich', tao_lich);
}
var cty = [];
$(".d-tao-lich2").each(function() {
if ($(this).is(":checked")) {
cty.push($(this).val());
}
});
if (cty.length == 0) {
$("#err_choose_cty").html("Bạn chưa chọn công ty");
arr_id_to_focus.push('#err_choose_cty');
form_oke = false;
} else {
$("#err_choose_cty").html("");
}
var cty = [];
$(".item_ca").each(function() {
if ($(this).is(":checked")) {
cty.push($(this).val());
}
});
// if (cty.length == 0) {
// $("#err_phongban").html("Bạn chưa chọn phòng ban");
// arr_id_to_focus.push('#err_phongban');
// form_oke = false;
// } else {
// $("#err_phongban").html("");
// }
if (arr_staff.length == 0) {
$('#err_choose_nv').html('Bạn chưa chọn nhân viên');
arr_id_to_focus.push('#err_choose_nv');
form_oke = false;
} else {
$('#err_choose_nv').html('');
form_data.append('chon_nv', arr_staff);
}
if (date_start == "" || date_start == null) {
$('#err_date_start').html("Bạn chưa chọn ngày bắt đầu");
form_oke = false;
arr_id_to_focus.push('#date_start');
} else {
var today = new Date();
var date = today.getFullYear() + '-' + checkTime((today.getMonth() + 1)) + '-' + checkTime((today.getDate()));
if (date_start >= date) {
$('#err_date_start').html("");
form_data.append('date_start', date_start);
} else {
$('#err_date_start').html("Ngày bắt đầu phải sau ngày tạo lịch");
form_oke = false;
arr_id_to_focus.push('#date_start');
}
}
if (date_end == "" || date_end == null) {
$('#err_date_end').html("Bạn chưa chọn ngày kết thúc");
form_oke = false;
arr_id_to_focus.push('#date_end');
} else {
if (date_end <= date_start) {
$('#err_date_end').html("Ngày kết thúc phải sau ngày bắt đầu");
form_oke = false;
arr_id_to_focus.push('#date_end');
} else {
var today = new Date();
var date = today.getFullYear() + '-' + checkTime((today.getMonth() + 1)) + '-' + checkTime((today.getDate()));
if (date_end > date) {
$('#err_date_end').html("");
form_data.append('date_end', date_end);
} else {
$('#err_date_end').html("Ngày kết thúc phải sau ngày tạo lịch");
form_oke = false;
arr_id_to_focus.push('#date_end');
}
}
}
if (place == '') {
$('#err_dd').html("Bạn phải nhập đủ điểm đầu, điểm cuối")
form_oke = false;
arr_id_to_focus.push('.d-dichuyen-input');
} else {
$('#err_dd').html("");
form_data.append('place', place);
}
form_data.append('ghi_chu', ghi_chu);
if (form_oke == true) {
$.ajax({
type: "POST",
cache: false,
contentType: false,
processData: false,
// enctype: 'multipart/form-data',
url: '/company/Company_controller/create_schedule_post',
data: form_data,
dataType: "JSON",
async: false,
success: function(data) {
if (data.result == true) {
$("#alert").append('<div class="alert-success">' + data.message + '</div>');
setTimeout(function() {
$(".alert-success").fadeOut(1000, function() {
$(".alert-success").remove();
window.location.href = "/quan-ly-lich-trinh.html";
});
}, 1500);
} else {
return false;
}
}
});
}
$(arr_id_to_focus[0]).focus();
return false;
});
});
function capnhatlichtrinh(id) {
event.preventDefault();
var form_oke = true;
var arr_id_to_focus = [];
var tao_lich = $.trim($("#tao_lich").val());
var arr_staff = [];
$(".item_cty").each(function() {
if ($(this).is(":checked")) {
arr_staff.push($(this).val());
}
});
var date_start = $.trim($("#date_start").val());
var date_end = $.trim($("#date_end").val());
var ghi_chu = $.trim($("#ghi_chu").val());
var input_place = $('.d-dichuyen-input');
var place = '';
for (var i = 0; i < input_place.length; i++) {
if (input_place[i].value != '') {
place += input_place[i].value + ';';
}
}
var form_data = new FormData();
if (id != '' && id != 0) {
form_data.append('id', id);
}
if (tao_lich == "") {
$("#err_lich").html("Bạn chưa nhập tên lịch trình");
arr_id_to_focus.push("#tao_lich");
form_oke = false;
} else {
$("#err_lich").html("");
form_data.append('tao_lich', tao_lich);
}
var cty = [];
$(".d-tao-lich2").each(function() {
if ($(this).is(":checked")) {
cty.push($(this).val());
}
});
if (cty.length == 0) {
$("#err_choose_cty").html("Bạn chưa chọn công ty");
arr_id_to_focus.push('#err_choose_cty');
form_oke = false;
} else {
$("#err_choose_cty").html("");
}
var cty = [];
$(".item_ca").each(function() {
if ($(this).is(":checked")) {
cty.push($(this).val());
}
});
if (cty.length == 0) {
$("#err_phongban").html("Bạn chưa chọn phòng ban");
arr_id_to_focus.push('#err_phongban');
form_oke = false;
} else {
$("#err_phongban").html("");
}
if (arr_staff.length == 0) {
$('#err_choose_nv').html('Bạn chưa chọn nhân viên');
arr_id_to_focus.push('#err_choose_nv');
form_oke = false;
} else {
$('#err_choose_nv').html('');
form_data.append('chon_nv', arr_staff);
}
if (date_start == "" || date_start == null) {
$('#err_date_start').html("Bạn chưa chọn ngày bắt đầu");
form_oke = false;
arr_id_to_focus.push('#date_start');
} else {
$('#err_date_start').html("");
form_data.append('date_start', date_start);
}
if (date_end == "" || date_end == null) {
$('#err_date_end').html("Bạn chưa chọn ngày kết thúc");
form_oke = false;
arr_id_to_focus.push('#date_end');
} else {
if (date_end < date_start) {
$('#err_date_end').html("Ngày kết thúc phải sau ngày bắt đầu");
form_oke = false;
arr_id_to_focus.push('#date_end');
} else {
$('#err_date_end').html("");
form_data.append('date_end', date_end);
}
}
form_data.append('ghi_chu', ghi_chu);
if (place == '') {
$('#err_dd').html("Bạn phải nhập đủ điểm đầu, điểm cuối")
form_oke = false;
arr_id_to_focus.push('.d-dichuyen-input');
} else {
$('#err_dd').html("");
form_data.append('place', place);
}
if (form_oke == true) {
$.ajax({
type: "POST",
cache: false,
contentType: false,
processData: false,
// enctype: 'multipart/form-data',
url: '/company/Company_controller/update_schedule_post',
data: form_data,
dataType: "JSON",
async: false,
success: function(data) {
if (data.result == true) {
$("#alert").append('<div class="alert-success">' + data.message + '</div>');
setTimeout(function() {
$(".alert-success").fadeOut(1000, function() {
$(".alert-success").remove();
window.location.href = "/quan-ly-lich-trinh.html";
});
}, 1500);
} else {
return false;
}
}
});
}
$(arr_id_to_focus[0]).focus();
return false;
}
|
# -*- coding: utf-8 -*-
import unittest
from pyparsing import ParseException
from tests.utils.grammar import get_record_grammar
"""
CWR Performing Artist grammar tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TestPerformingArtistGrammar(unittest.TestCase):
def setUp(self):
self.grammar = get_record_grammar('performing_artist')
def test_valid_full(self):
record = 'PER0000123400000023LAST NAME FIRST NAME 00014107338I-000000229-7'
result = self.grammar.parseString(record)[0]
self.assertEqual('PER', result.record_type)
self.assertEqual(1234, result.transaction_sequence_n)
self.assertEqual(23, result.record_sequence_n)
self.assertEqual('LAST NAME', result.performing_artist_last_name)
self.assertEqual('FIRST NAME', result.performing_artist_first_name)
self.assertEqual(14107338, result.performing_artist_ipi_name_n)
self.assertEqual('I-000000229-7', result.performing_artist_ipi_base_n)
class TestPerformingArtistGrammarException(unittest.TestCase):
def setUp(self):
self.grammar = get_record_grammar('performing_artist')
def test_empty(self):
"""
Tests that a exception is thrown when the the works number is zero.
"""
record = ''
self.assertRaises(ParseException, self.grammar.parseString, record)
def test_invalid(self):
record = 'This is an invalid string'
self.assertRaises(ParseException, self.grammar.parseString, record)
|
#!/usr/bin/env python3
import json
from pathlib import PurePath, Path
import subprocess
import tempfile
import zipfile
wasm_pack = Path("~/.cargo/bin/wasm-pack").expanduser()
root_files = ["module.json", "README.md", "CHANGELOG.md", "LICENSE"]
wasm_files = ["gridless_pathfinding_bg.wasm", "gridless_pathfinding.js"]
output_dir = Path("artifact")
copy_everything_directories = ["js", "lang", "templates"]
wasm_dir = Path("wasm")
root_dir = Path(".")
rust_dir = Path("rust")
build_dir_tmp = tempfile.TemporaryDirectory()
build_dir = Path(build_dir_tmp.name)
with open("module.json", "r") as file:
manifest = json.load(file)
zip_root = PurePath(f'{manifest["name"]}')
filename = f'{manifest["name"]}-{manifest["version"]}.zip'
result = subprocess.run([wasm_pack, "build", "--target", "web", "--out-dir", build_dir, root_dir / rust_dir])
if result.returncode != 0:
raise Exception("Wasm build failed")
output_dir.mkdir(parents=True, exist_ok=True)
def write_directory(archive, d):
for f in (root_dir / d).iterdir():
if f.is_dir():
write_directory(archive, f)
else:
assert(f.is_file())
archive.write(f, arcname=zip_root / d / f.name)
with zipfile.ZipFile(output_dir / filename, mode="w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as archive:
for f in root_files:
archive.write(root_dir / f, arcname=zip_root / f)
for d in copy_everything_directories:
write_directory(archive, d)
for f in wasm_files:
archive.write(build_dir / f, arcname=zip_root / wasm_dir / f)
print(f"Successfully built {output_dir / filename}")
|
$(document).ready(function(){
// Add smooth scrolling to all links
$("a").on('click', function(event) {
// Make sure this.hash has a value before overriding default behavior
if (this.hash !== "") {
// Prevent default anchor click behavior
event.preventDefault();
// Store hash
var hash = this.hash;
// Remove active class from all links
$(".nav-link").removeClass("active");
// Add class active to current item
$(this).addClass("active");
// Using jQuery's animate() method to add smooth page scroll
// The optional number (800) specifies the number of milliseconds it takes to scroll to the specified area
$('html, body').animate({
scrollTop: $(hash).offset().top
}, 800, function(){
// Add hash (#) to URL when done scrolling (default click behavior)
window.location.hash = hash;
});
} // End if
});
});
|
#include <dsverifier.h>
digital_system ds = {
.b = {4.244336814021699e-05, 8.488673628043397e-05, 4.244336814021699e-05},
.b_size = 3,
.a = { 1.000000000000000, -1.981488509144574, 9.816582826171342e-01},
.a_size = 3
};
implementation impl = {
.int_bits = 2,
.frac_bits = 5,
.min = -1.6,
.max = 1.6,
.max_error = 5.0
};
filter_parameters filter = {
.Ac = 0.707945784384138,
.Ap = 0.707945784384138,
.Ar = 0.707945784384138,
.wp = 0.0
.wc = 0.0041,
.wr = 0.1041,
.type = 1
};
|
const path = require('path');
const fixtures = require('require-all')({
dirname: __dirname,
filter: /.json$/,
map: function (__, path) {
return `${path}`;
}
})
module.exports = Object.keys(fixtures).map(item => {
return {
fileName: path.basename(item, '.json'),
data: fixtures[item]
};
});
|
var cooking = require('cooking');
var path = require('path');
var config = require('../../build/config');
cooking.set({
entry: {
index: path.join(__dirname, 'index.js')
},
dist: path.join(__dirname, 'lib'),
template: false,
format: 'umd',
moduleName: 'MintSideNavbar',
extractCSS: 'style.css',
extends: config.extends,
alias: config.alias,
externals: config.externals
});
module.exports = cooking.resolve();
|
import random
import time
from enos.message.upstream.tsl.MeasurepointPostBatchRequest import MeasurepointPostBatchRequest
from enos.message.upstream.status.SubDeviceLoginRequest import SubDeviceLoginRequest
from enos.message.upstream.tsl.MeasurepointPostRequest import MeasurepointPostRequest
from enos.sample.SampleHelper import SampleHelper
from enos.core.MqttClient import MqttClient
def post_measure_points():
measure_point_request = MeasurepointPostRequest.builder() \
.add_measurepoint('wywpoint2', random.randint(100, 200)) \
.set_timestamp(int(time.time())) \
.build()
measure_point_response = client.publish(measure_point_request)
if measure_point_response:
print('measurepoint post response code: {}'.format(measure_point_response.get_code()))
def post_measure_points_batch(allow, skip):
"""Directly connected devices report data with different timestamps"""
measure_point_post_requests = list()
measure_point_post_requests.append(MeasurepointPostRequest.builder()
.set_product_key(SampleHelper.GW1_PRODUCT_KEY)
.set_device_key(SampleHelper.GW1_DEVICE_KEY)
.add_measurepoint('wywpoint2', random.randint(100, 200))
.set_timestamp(int(time.time() * 1000))
.build())
measure_point_post_requests.append(MeasurepointPostRequest.builder()
.set_product_key(SampleHelper.GW1_PRODUCT_KEY)
.set_device_key(SampleHelper.GW1_DEVICE_KEY)
.add_measurepoints(SampleHelper.MEASURE_POINTS)
.set_timestamp(1573061486173)
.build())
measure_point_post_batch_request = MeasurepointPostBatchRequest.builder() \
.set_requests(measure_point_post_requests) \
.set_allow_offline_sub_device(allow) \
.set_skip_invalid_measurepoints(skip) \
.build()
measure_point_post_batch_response = client.publish(measure_point_post_batch_request)
if measure_point_post_batch_response:
print('post_measure_batch_point_response: %s' % measure_point_post_batch_response.get_code())
def post_measure_points_batch_sub(allow, skip):
"""This sample shows how sub-devices login and publish measure points to broker.
The gateway device be the proxy to make sub-device reports data in batches"""
sub_device_login_request = SubDeviceLoginRequest.builder() \
.set_sub_device_info(SampleHelper.SUB3_PRODUCT_KEY,
SampleHelper.SUB3_DEVICE_KEY,
SampleHelper.SUB3_DEVICE_SECRET) \
.build()
sub_device_login_response = client.publish(sub_device_login_request)
if sub_device_login_response:
print('sub_device_login_response: %s' % sub_device_login_response.get_code())
sub_device_login_request = SubDeviceLoginRequest.builder() \
.set_sub_device_info(SampleHelper.SUB1_PRODUCT_KEY,
SampleHelper.SUB1_DEVICE_KEY,
SampleHelper.SUB1_DEVICE_SECRET) \
.build()
sub_device_login_response = client.publish(sub_device_login_request)
if sub_device_login_response:
print('sub_device_login_response: %s' % sub_device_login_response.get_code())
measure_point_post_requests = list()
measure_point_post_requests.append(MeasurepointPostRequest.builder()
.set_product_key(SampleHelper.SUB_DEVICES[0]
.get_product_key())
.set_device_key(SampleHelper.SUB_DEVICES[0].get_device_key())
.add_measurepoint('wywpoint2', random.randint(100, 200))
.add_measurepoints(SampleHelper.MEASURE_POINTS)
.build())
measure_point_post_batch_request = MeasurepointPostBatchRequest.builder() \
.set_requests(measure_point_post_requests) \
.set_allow_offline_sub_device(allow) \
.set_skip_invalid_measurepoints(skip) \
.build()
measure_point_post_batch_response = client.publish(measure_point_post_batch_request)
if measure_point_post_batch_response:
print('post_measure_batch_points_response: %s' % measure_point_post_batch_response.get_code())
if __name__ == '__main__':
client = MqttClient(SampleHelper.TCP_SERVER_URL, SampleHelper.GW1_PRODUCT_KEY, SampleHelper.GW1_DEVICE_KEY,
SampleHelper.GW1_DEVICE_SECRET)
client.get_profile().set_auto_reconnect(True) # if connection interrupted, the client can automaticlly reconnect
client.setup_basic_logger('INFO')
client.connect() # connect in sync
post_measure_points()
post_measure_points_batch_sub(False, False)
post_measure_points_batch(False, False)
|
import os
DB_URI = os.environ.get('DB_URI', 'sqlite:///app.db')
DB_API = os.environ.get('DB_API', 'http://localhost:57302')
|
// drawing.js
// This file contains functions to draw on the HTML5 canvas
var clearScreen = function(game) {
game.ctx.clearRect(0, 0, game.world.width, game.world.height);
};
var drawScreen = function(game, mouseX, mouseY) {
var player = {}; //game.get_player(globalGame.my_id)
// draw background
game.ctx.fillStyle = "#FFFFFF";
game.ctx.fillRect(0,0,game.viewport.width,game.viewport.height);
// Draw message in center (for countdown, e.g.)
if (player.message) {
game.ctx.font = "bold 23pt Helvetica";
game.ctx.fillStyle = 'blue';
game.ctx.textAlign = 'center';
wrapText(game, player.message,
game.world.width/2, game.world.width/4,
game.world.width*4/5,
25);
} else {
drawObjects(game);
if (game.my_role === game.playerRoleNames.role1) { // Speaker
drawTargetBox(game);
} else if (game.my_role ==game.playerRoleNames.role2) { // Listener
drawHoverBox(game, mouseX, mouseY);
}
drawDividers(game);
}
};
var drawDividers = function(game) {
var numObjs = game.currStim.objs.length;
var objWidth = game.world.width / numObjs;
for (var i = 1; i < numObjs; i++) {
game.ctx.strokeStyle = 'black';
game.ctx.lineWidth = 5;
game.ctx.beginPath();
game.ctx.moveTo(objWidth*i, 0);
game.ctx.lineTo(objWidth*i, game.world.height);
game.ctx.closePath();
game.ctx.stroke();
}
};
var getHoverIndex = function(game, mouseX, mouseY) {
var numObjs = game.currStim.objs.length;
var objWidth = game.world.width / numObjs;
return Math.floor(mouseX/objWidth);
};
var drawHoverBox = function(game, mouseX, mouseY) {
if (typeof(mouseX) == 'undefined' && typeof(mouseY) == 'undefined')
return undefined;
var hoverObj = getHoverIndex(game, mouseX, mouseY);
if (game.my_role !== game.playerRoleNames.role1) { // Listener
drawBox(game, hoverObj, "rgba(0, 0, 255, 0.8)");
}
return hoverObj;
};
var drawTargetBox = function(game) {
var world = game.currStim;
var objIndex = 0;
if (game.my_role === game.playerRoleNames.role1) { // Speaker
objIndex = world.speakerOrder.indexOf(world.target);
} else {
objIndex = world.listenerOrder.indexOf(world.target);
}
drawBox(game, objIndex, "rgba(255, 0, 0, 0.8)")
}
var drawClickedCorrectBox = function(game, mouseX, mouseY) {
var numObjs = game.currStim.objs.length;
var objWidth = game.world.width / numObjs;
var clickedObj = Math.floor(mouseX/objWidth);
if (game.my_role !== game.playerRoleNames.role1) { // Listener
var targetIndex = game.currStim.listenerOrder.indexOf(game.currStim.target);
if (targetIndex == clickedObj) {
drawBox(game, clickedObj, "rgba(0, 255, 0, 0.8)");
} else {
drawTargetBox(game);
}
}
return clickedObj;
};
var drawBox = function(game, objIndex, color) {
var numObjs = game.currStim.objs.length;
var objWidth = game.world.width / numObjs;
game.ctx.strokeStyle = color;
game.ctx.lineWidth = 20;
game.ctx.beginPath();
game.ctx.rect(objWidth*objIndex + game.ctx.lineWidth / 2.0, // top-left x
game.ctx.lineWidth / 2.0, // top-left y
objWidth - game.ctx.lineWidth, // width
game.world.height - game.ctx.lineWidth); // height
game.ctx.closePath();
game.ctx.stroke();
};
var drawObjects = function(game) {
var trial = game.currStim;
var objWidth = game.world.width / trial.objs.length;
var order = trial.listenerOrder;
if (game.my_role === game.playerRoleNames.role1) { // Speaker
order = trial.speakerOrder;
}
for (var i = 0; i < trial.speakerOrder.length; i++) {
var objShiftX = objWidth * i;
var obj = trial.objs[order[i]];
for (var j = 0; j < obj.shapeCount; j++) {
var shape = obj.shapes[j];
game.ctx.fillStyle = ('hsl(' + shape.color[0] + ',' + shape.color[1] + '%, ' + shape.color[2] + '%)');
game.ctx.beginPath();
game.ctx.moveTo(objShiftX + shape.points[0].x, shape.points[0].y);
for (var k = 1; k < shape.points.length; k++) {
game.ctx.lineTo(objShiftX + shape.points[k].x, shape.points[k].y);
}
game.ctx.closePath();
game.ctx.fill();
}
}
};
// This is a helper function to write a text string onto the HTML5 canvas.
// It automatically figures out how to break the text into lines that will fit
// Input:
// * game: the game object (containing the ctx canvas object)
// * text: the string of text you want to writ
// * x: the x coordinate of the point you want to start writing at (in pixels)
// * y: the y coordinate of the point you want to start writing at (in pixels)
// * maxWidth: the maximum width you want to allow the text to span (in pixels)
// * lineHeight: the vertical space you want between lines (in pixels)
function wrapText(game, text, x, y, maxWidth, lineHeight) {
var cars = text.split("\n");
game.ctx.fillStyle = 'white';
game.ctx.fillRect(0, 0, game.viewport.width, game.viewport.height);
game.ctx.fillStyle = 'red';
for (var ii = 0; ii < cars.length; ii++) {
var line = "";
var words = cars[ii].split(" ");
for (var n = 0; n < words.length; n++) {
var testLine = line + words[n] + " ";
var metrics = game.ctx.measureText(testLine);
var testWidth = metrics.width;
if (testWidth > maxWidth) {
game.ctx.fillText(line, x, y);
line = words[n] + " ";
y += lineHeight;
}
else {
line = testLine;
}
}
game.ctx.fillText(line, x, y);
y += lineHeight;
}
};
|