text
stringlengths 3
1.05M
|
|---|
var at__quick__exit_8c =
[
[ "__attribute__", "d8/d48/at__quick__exit_8c.html#af9aace1b44b73111e15aa39f06f43456", null ]
];
|
import React from 'react';
import App, { Container } from 'next/app';
import { ThemeProvider } from 'emotion-theming';
const theme = {
color: 'red',
backgroundColor: '#252627',
};
export default class VortexDotNameApp extends App {
render() {
const { Component, pageProps } = this.props;
return (
<ThemeProvider theme={theme}>
<Container>
<Component {...pageProps} />
</Container>
</ThemeProvider>
);
}
}
|
/*
* This file is part of the MicroPython project, http://micropython.org/
*
* The MIT License (MIT)
*
* Copyright (c) 2013, 2014 Damien P. George
* Copyright (c) 2015 Daniel Campora
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <stdint.h>
#include <string.h>
#include "simplelink.h"
#include "py/mpconfig.h"
#include "py/obj.h"
#include "py/objstr.h"
#include "py/runtime.h"
#include "py/stream.h"
#include "py/mphal.h"
#include "shared/netutils/netutils.h"
#include "modnetwork.h"
#include "modusocket.h"
/******************************************************************************/
// The following set of macros and functions provide a glue between the CC3100
// simplelink layer and the functions/methods provided by the usocket module.
// They were historically in a separate file because usocket was designed to
// work with multiple NICs, and the wlan_XXX functions just provided one
// particular NIC implementation (that of the CC3100). But the CC3200 port only
// supports a single NIC (being the CC3100) so it's unnecessary and inefficient
// to provide an intermediate wrapper layer. Hence the wlan_XXX functions
// are provided below as static functions so they can be inlined directly by
// the corresponding usocket calls.
#define WLAN_MAX_RX_SIZE 16000
#define WLAN_MAX_TX_SIZE 1476
#define MAKE_SOCKADDR(addr, ip, port) SlSockAddr_t addr; \
addr.sa_family = SL_AF_INET; \
addr.sa_data[0] = port >> 8; \
addr.sa_data[1] = port; \
addr.sa_data[2] = ip[3]; \
addr.sa_data[3] = ip[2]; \
addr.sa_data[4] = ip[1]; \
addr.sa_data[5] = ip[0];
#define UNPACK_SOCKADDR(addr, ip, port) port = (addr.sa_data[0] << 8) | addr.sa_data[1]; \
ip[0] = addr.sa_data[5]; \
ip[1] = addr.sa_data[4]; \
ip[2] = addr.sa_data[3]; \
ip[3] = addr.sa_data[2];
#define SOCKET_TIMEOUT_QUANTA_MS (20)
STATIC int convert_sl_errno(int sl_errno) {
return -sl_errno;
}
// This function is left as non-static so it's not inlined.
int check_timedout(mod_network_socket_obj_t *s, int ret, uint32_t *timeout_ms, int *_errno) {
if (*timeout_ms == 0 || ret != SL_EAGAIN) {
if (s->sock_base.timeout_ms > 0 && ret == SL_EAGAIN) {
*_errno = MP_ETIMEDOUT;
} else {
*_errno = convert_sl_errno(ret);
}
return -1;
}
mp_hal_delay_ms(SOCKET_TIMEOUT_QUANTA_MS);
if (*timeout_ms < SOCKET_TIMEOUT_QUANTA_MS) {
*timeout_ms = 0;
} else {
*timeout_ms -= SOCKET_TIMEOUT_QUANTA_MS;
}
return 0;
}
STATIC int wlan_gethostbyname(const char *name, mp_uint_t len, uint8_t *out_ip, uint8_t family) {
uint32_t ip;
int result = sl_NetAppDnsGetHostByName((_i8 *)name, (_u16)len, (_u32*)&ip, (_u8)family);
out_ip[0] = ip;
out_ip[1] = ip >> 8;
out_ip[2] = ip >> 16;
out_ip[3] = ip >> 24;
return result;
}
STATIC int wlan_socket_socket(mod_network_socket_obj_t *s, int *_errno) {
int16_t sd = sl_Socket(s->sock_base.u_param.domain, s->sock_base.u_param.type, s->sock_base.u_param.proto);
if (sd < 0) {
*_errno = sd;
return -1;
}
s->sock_base.sd = sd;
return 0;
}
STATIC void wlan_socket_close(mod_network_socket_obj_t *s) {
// this is to prevent the finalizer to close a socket that failed when being created
if (s->sock_base.sd >= 0) {
modusocket_socket_delete(s->sock_base.sd);
sl_Close(s->sock_base.sd);
s->sock_base.sd = -1;
}
}
STATIC int wlan_socket_bind(mod_network_socket_obj_t *s, byte *ip, mp_uint_t port, int *_errno) {
MAKE_SOCKADDR(addr, ip, port)
int ret = sl_Bind(s->sock_base.sd, &addr, sizeof(addr));
if (ret != 0) {
*_errno = ret;
return -1;
}
return 0;
}
STATIC int wlan_socket_listen(mod_network_socket_obj_t *s, mp_int_t backlog, int *_errno) {
int ret = sl_Listen(s->sock_base.sd, backlog);
if (ret != 0) {
*_errno = ret;
return -1;
}
return 0;
}
STATIC int wlan_socket_accept(mod_network_socket_obj_t *s, mod_network_socket_obj_t *s2, byte *ip, mp_uint_t *port, int *_errno) {
// accept incoming connection
int16_t sd;
SlSockAddr_t addr;
SlSocklen_t addr_len = sizeof(addr);
uint32_t timeout_ms = s->sock_base.timeout_ms;
for (;;) {
sd = sl_Accept(s->sock_base.sd, &addr, &addr_len);
if (sd >= 0) {
// save the socket descriptor
s2->sock_base.sd = sd;
// return ip and port
UNPACK_SOCKADDR(addr, ip, *port);
return 0;
}
if (check_timedout(s, sd, &timeout_ms, _errno)) {
return -1;
}
}
}
STATIC int wlan_socket_connect(mod_network_socket_obj_t *s, byte *ip, mp_uint_t port, int *_errno) {
MAKE_SOCKADDR(addr, ip, port)
uint32_t timeout_ms = s->sock_base.timeout_ms;
// For a non-blocking connect the CC3100 will return SL_EALREADY while the
// connection is in progress.
for (;;) {
int ret = sl_Connect(s->sock_base.sd, &addr, sizeof(addr));
if (ret == 0) {
return 0;
}
// Check if we are in non-blocking mode and the connection is in progress
if (s->sock_base.timeout_ms == 0 && ret == SL_EALREADY) {
// To match BSD we return EINPROGRESS here
*_errno = MP_EINPROGRESS;
return -1;
}
// We are in blocking mode, so if the connection isn't in progress then error out
if (ret != SL_EALREADY) {
*_errno = convert_sl_errno(ret);
return -1;
}
if (check_timedout(s, SL_EAGAIN, &timeout_ms, _errno)) {
return -1;
}
}
}
STATIC int wlan_socket_send(mod_network_socket_obj_t *s, const byte *buf, mp_uint_t len, int *_errno) {
if (len == 0) {
return 0;
}
uint32_t timeout_ms = s->sock_base.timeout_ms;
for (;;) {
int ret = sl_Send(s->sock_base.sd, (const void *)buf, len, 0);
if (ret > 0) {
return ret;
}
if (check_timedout(s, ret, &timeout_ms, _errno)) {
return -1;
}
}
}
STATIC int wlan_socket_recv(mod_network_socket_obj_t *s, byte *buf, mp_uint_t len, int *_errno) {
uint32_t timeout_ms = s->sock_base.timeout_ms;
for (;;) {
int ret = sl_Recv(s->sock_base.sd, buf, MIN(len, WLAN_MAX_RX_SIZE), 0);
if (ret >= 0) {
return ret;
}
if (check_timedout(s, ret, &timeout_ms, _errno)) {
return -1;
}
}
}
STATIC int wlan_socket_sendto( mod_network_socket_obj_t *s, const byte *buf, mp_uint_t len, byte *ip, mp_uint_t port, int *_errno) {
MAKE_SOCKADDR(addr, ip, port)
uint32_t timeout_ms = s->sock_base.timeout_ms;
for (;;) {
int ret = sl_SendTo(s->sock_base.sd, (byte*)buf, len, 0, (SlSockAddr_t*)&addr, sizeof(addr));
if (ret >= 0) {
return ret;
}
if (check_timedout(s, ret, &timeout_ms, _errno)) {
return -1;
}
}
}
STATIC int wlan_socket_recvfrom(mod_network_socket_obj_t *s, byte *buf, mp_uint_t len, byte *ip, mp_uint_t *port, int *_errno) {
SlSockAddr_t addr;
SlSocklen_t addr_len = sizeof(addr);
uint32_t timeout_ms = s->sock_base.timeout_ms;
for (;;) {
int ret = sl_RecvFrom(s->sock_base.sd, buf, MIN(len, WLAN_MAX_RX_SIZE), 0, &addr, &addr_len);
if (ret >= 0) {
UNPACK_SOCKADDR(addr, ip, *port);
return ret;
}
if (check_timedout(s, ret, &timeout_ms, _errno)) {
return -1;
}
}
}
STATIC int wlan_socket_setsockopt(mod_network_socket_obj_t *s, mp_uint_t level, mp_uint_t opt, const void *optval, mp_uint_t optlen, int *_errno) {
int ret = sl_SetSockOpt(s->sock_base.sd, level, opt, optval, optlen);
if (ret < 0) {
*_errno = ret;
return -1;
}
return 0;
}
STATIC int wlan_socket_settimeout(mod_network_socket_obj_t *s, mp_uint_t timeout_s, int *_errno) {
SlSockNonblocking_t option;
if (timeout_s == 0 || timeout_s == -1) {
if (timeout_s == 0) {
// set non-blocking mode
option.NonblockingEnabled = 1;
} else {
// set blocking mode
option.NonblockingEnabled = 0;
}
timeout_s = 0;
} else {
// synthesize timeout via non-blocking behaviour with a loop
option.NonblockingEnabled = 1;
}
int ret = sl_SetSockOpt(s->sock_base.sd, SL_SOL_SOCKET, SL_SO_NONBLOCKING, &option, sizeof(option));
if (ret != 0) {
*_errno = convert_sl_errno(ret);
return -1;
}
s->sock_base.timeout_ms = timeout_s * 1000;
return 0;
}
STATIC int wlan_socket_ioctl (mod_network_socket_obj_t *s, mp_uint_t request, mp_uint_t arg, int *_errno) {
mp_int_t ret;
if (request == MP_STREAM_POLL) {
mp_uint_t flags = arg;
ret = 0;
int32_t sd = s->sock_base.sd;
// init fds
SlFdSet_t rfds, wfds, xfds;
SL_FD_ZERO(&rfds);
SL_FD_ZERO(&wfds);
SL_FD_ZERO(&xfds);
// set fds if needed
if (flags & MP_STREAM_POLL_RD) {
SL_FD_SET(sd, &rfds);
}
if (flags & MP_STREAM_POLL_WR) {
SL_FD_SET(sd, &wfds);
}
if (flags & MP_STREAM_POLL_HUP) {
SL_FD_SET(sd, &xfds);
}
// call simplelink's select with minimum timeout
SlTimeval_t tv;
tv.tv_sec = 0;
tv.tv_usec = 1;
int32_t nfds = sl_Select(sd + 1, &rfds, &wfds, &xfds, &tv);
// check for errors
if (nfds == -1) {
*_errno = nfds;
return -1;
}
// check return of select
if (SL_FD_ISSET(sd, &rfds)) {
ret |= MP_STREAM_POLL_RD;
}
if (SL_FD_ISSET(sd, &wfds)) {
ret |= MP_STREAM_POLL_WR;
}
if (SL_FD_ISSET(sd, &xfds)) {
ret |= MP_STREAM_POLL_HUP;
}
} else if (request == MP_STREAM_CLOSE) {
wlan_socket_close(s);
ret = 0;
} else {
*_errno = MP_EINVAL;
ret = MP_STREAM_ERROR;
}
return ret;
}
/******************************************************************************
DEFINE PRIVATE CONSTANTS
******************************************************************************/
#define MOD_NETWORK_MAX_SOCKETS 10
/******************************************************************************
DEFINE PRIVATE TYPES
******************************************************************************/
typedef struct {
int16_t sd;
bool user;
} modusocket_sock_t;
/******************************************************************************
DEFINE PRIVATE DATA
******************************************************************************/
STATIC const mp_obj_type_t socket_type;
STATIC OsiLockObj_t modusocket_LockObj;
STATIC modusocket_sock_t modusocket_sockets[MOD_NETWORK_MAX_SOCKETS] = {{.sd = -1}, {.sd = -1}, {.sd = -1}, {.sd = -1}, {.sd = -1},
{.sd = -1}, {.sd = -1}, {.sd = -1}, {.sd = -1}, {.sd = -1}};
/******************************************************************************
DEFINE PUBLIC FUNCTIONS
******************************************************************************/
__attribute__ ((section (".boot")))
void modusocket_pre_init (void) {
// create the wlan lock
ASSERT(OSI_OK == sl_LockObjCreate(&modusocket_LockObj, "SockLock"));
sl_LockObjUnlock (&modusocket_LockObj);
}
void modusocket_socket_add (int16_t sd, bool user) {
sl_LockObjLock (&modusocket_LockObj, SL_OS_WAIT_FOREVER);
for (int i = 0; i < MOD_NETWORK_MAX_SOCKETS; i++) {
if (modusocket_sockets[i].sd < 0) {
modusocket_sockets[i].sd = sd;
modusocket_sockets[i].user = user;
break;
}
}
sl_LockObjUnlock (&modusocket_LockObj);
}
void modusocket_socket_delete (int16_t sd) {
sl_LockObjLock (&modusocket_LockObj, SL_OS_WAIT_FOREVER);
for (int i = 0; i < MOD_NETWORK_MAX_SOCKETS; i++) {
if (modusocket_sockets[i].sd == sd) {
modusocket_sockets[i].sd = -1;
break;
}
}
sl_LockObjUnlock (&modusocket_LockObj);
}
void modusocket_enter_sleep (void) {
SlFdSet_t socketset;
int16_t maxfd = 0;
for (int i = 0; i < MOD_NETWORK_MAX_SOCKETS; i++) {
int16_t sd;
if ((sd = modusocket_sockets[i].sd) >= 0) {
SL_FD_SET(sd, &socketset);
maxfd = (maxfd > sd) ? maxfd : sd;
}
}
if (maxfd > 0) {
// wait for any of the sockets to become ready...
sl_Select(maxfd + 1, &socketset, NULL, NULL, NULL);
}
}
void modusocket_close_all_user_sockets (void) {
sl_LockObjLock (&modusocket_LockObj, SL_OS_WAIT_FOREVER);
for (int i = 0; i < MOD_NETWORK_MAX_SOCKETS; i++) {
if (modusocket_sockets[i].sd >= 0 && modusocket_sockets[i].user) {
sl_Close(modusocket_sockets[i].sd);
modusocket_sockets[i].sd = -1;
}
}
sl_LockObjUnlock (&modusocket_LockObj);
}
/******************************************************************************/
// socket class
// constructor socket(family=AF_INET, type=SOCK_STREAM, proto=IPPROTO_TCP, fileno=None)
STATIC mp_obj_t socket_make_new(const mp_obj_type_t *type, size_t n_args, size_t n_kw, const mp_obj_t *args) {
mp_arg_check_num(n_args, n_kw, 0, 4, false);
// create socket object
mod_network_socket_obj_t *s = m_new_obj_with_finaliser(mod_network_socket_obj_t);
s->base.type = (mp_obj_t)&socket_type;
s->sock_base.u_param.domain = SL_AF_INET;
s->sock_base.u_param.type = SL_SOCK_STREAM;
s->sock_base.u_param.proto = SL_IPPROTO_TCP;
s->sock_base.u_param.fileno = -1;
s->sock_base.timeout_ms = 0;
s->sock_base.cert_req = false;
if (n_args > 0) {
s->sock_base.u_param.domain = mp_obj_get_int(args[0]);
if (n_args > 1) {
s->sock_base.u_param.type = mp_obj_get_int(args[1]);
if (n_args > 2) {
s->sock_base.u_param.proto = mp_obj_get_int(args[2]);
if (n_args > 3) {
s->sock_base.u_param.fileno = mp_obj_get_int(args[3]);
}
}
}
}
// create the socket
int _errno;
if (wlan_socket_socket(s, &_errno) != 0) {
mp_raise_OSError(-_errno);
}
// add the socket to the list
modusocket_socket_add(s->sock_base.sd, true);
return s;
}
// method socket.bind(address)
STATIC mp_obj_t socket_bind(mp_obj_t self_in, mp_obj_t addr_in) {
mod_network_socket_obj_t *self = self_in;
// get address
uint8_t ip[MOD_NETWORK_IPV4ADDR_BUF_SIZE];
mp_uint_t port = netutils_parse_inet_addr(addr_in, ip, NETUTILS_LITTLE);
// call the NIC to bind the socket
int _errno = 0;
if (wlan_socket_bind(self, ip, port, &_errno) != 0) {
mp_raise_OSError(-_errno);
}
return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_bind_obj, socket_bind);
// method socket.listen([backlog])
STATIC mp_obj_t socket_listen(size_t n_args, const mp_obj_t *args) {
mod_network_socket_obj_t *self = args[0];
int32_t backlog = MICROPY_PY_USOCKET_LISTEN_BACKLOG_DEFAULT;
if (n_args > 1) {
backlog = mp_obj_get_int(args[1]);
backlog = (backlog < 0) ? 0 : backlog;
}
int _errno;
if (wlan_socket_listen(self, backlog, &_errno) != 0) {
mp_raise_OSError(-_errno);
}
return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(socket_listen_obj, 1, 2, socket_listen);
// method socket.accept()
STATIC mp_obj_t socket_accept(mp_obj_t self_in) {
mod_network_socket_obj_t *self = self_in;
// create new socket object
mod_network_socket_obj_t *socket2 = m_new_obj_with_finaliser(mod_network_socket_obj_t);
// the new socket inherits all properties from its parent
memcpy (socket2, self, sizeof(mod_network_socket_obj_t));
// accept the incoming connection
uint8_t ip[MOD_NETWORK_IPV4ADDR_BUF_SIZE];
mp_uint_t port = 0;
int _errno = 0;
if (wlan_socket_accept(self, socket2, ip, &port, &_errno) != 0) {
mp_raise_OSError(_errno);
}
// add the socket to the list
modusocket_socket_add(socket2->sock_base.sd, true);
// make the return value
mp_obj_tuple_t *client = mp_obj_new_tuple(2, NULL);
client->items[0] = socket2;
client->items[1] = netutils_format_inet_addr(ip, port, NETUTILS_LITTLE);
return client;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_1(socket_accept_obj, socket_accept);
// method socket.connect(address)
STATIC mp_obj_t socket_connect(mp_obj_t self_in, mp_obj_t addr_in) {
mod_network_socket_obj_t *self = self_in;
// get address
uint8_t ip[MOD_NETWORK_IPV4ADDR_BUF_SIZE];
mp_uint_t port = netutils_parse_inet_addr(addr_in, ip, NETUTILS_LITTLE);
// connect the socket
int _errno;
if (wlan_socket_connect(self, ip, port, &_errno) != 0) {
if (!self->sock_base.cert_req && _errno == SL_ESECSNOVERIFY) {
return mp_const_none;
}
mp_raise_OSError(_errno);
}
return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_connect_obj, socket_connect);
// method socket.send(bytes)
STATIC mp_obj_t socket_send(mp_obj_t self_in, mp_obj_t buf_in) {
mod_network_socket_obj_t *self = self_in;
mp_buffer_info_t bufinfo;
mp_get_buffer_raise(buf_in, &bufinfo, MP_BUFFER_READ);
int _errno;
mp_int_t ret = wlan_socket_send(self, bufinfo.buf, bufinfo.len, &_errno);
if (ret < 0) {
mp_raise_OSError(_errno);
}
return mp_obj_new_int_from_uint(ret);
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_send_obj, socket_send);
// method socket.recv(bufsize)
STATIC mp_obj_t socket_recv(mp_obj_t self_in, mp_obj_t len_in) {
mod_network_socket_obj_t *self = self_in;
mp_int_t len = mp_obj_get_int(len_in);
vstr_t vstr;
vstr_init_len(&vstr, len);
int _errno;
mp_int_t ret = wlan_socket_recv(self, (byte*)vstr.buf, len, &_errno);
if (ret < 0) {
mp_raise_OSError(_errno);
}
if (ret == 0) {
return mp_const_empty_bytes;
}
vstr.len = ret;
vstr.buf[vstr.len] = '\0';
return mp_obj_new_str_from_vstr(&mp_type_bytes, &vstr);
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_recv_obj, socket_recv);
// method socket.sendto(bytes, address)
STATIC mp_obj_t socket_sendto(mp_obj_t self_in, mp_obj_t data_in, mp_obj_t addr_in) {
mod_network_socket_obj_t *self = self_in;
// get the data
mp_buffer_info_t bufinfo;
mp_get_buffer_raise(data_in, &bufinfo, MP_BUFFER_READ);
// get address
uint8_t ip[MOD_NETWORK_IPV4ADDR_BUF_SIZE];
mp_uint_t port = netutils_parse_inet_addr(addr_in, ip, NETUTILS_LITTLE);
// call the nic to sendto
int _errno = 0;
mp_int_t ret = wlan_socket_sendto(self, bufinfo.buf, bufinfo.len, ip, port, &_errno);
if (ret < 0) {
mp_raise_OSError(_errno);
}
return mp_obj_new_int(ret);
}
STATIC MP_DEFINE_CONST_FUN_OBJ_3(socket_sendto_obj, socket_sendto);
// method socket.recvfrom(bufsize)
STATIC mp_obj_t socket_recvfrom(mp_obj_t self_in, mp_obj_t len_in) {
mod_network_socket_obj_t *self = self_in;
vstr_t vstr;
vstr_init_len(&vstr, mp_obj_get_int(len_in));
byte ip[4];
mp_uint_t port = 0;
int _errno = 0;
mp_int_t ret = wlan_socket_recvfrom(self, (byte*)vstr.buf, vstr.len, ip, &port, &_errno);
if (ret < 0) {
mp_raise_OSError(_errno);
}
mp_obj_t tuple[2];
if (ret == 0) {
tuple[0] = mp_const_empty_bytes;
} else {
vstr.len = ret;
vstr.buf[vstr.len] = '\0';
tuple[0] = mp_obj_new_str_from_vstr(&mp_type_bytes, &vstr);
}
tuple[1] = netutils_format_inet_addr(ip, port, NETUTILS_LITTLE);
return mp_obj_new_tuple(2, tuple);
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_recvfrom_obj, socket_recvfrom);
// method socket.setsockopt(level, optname, value)
STATIC mp_obj_t socket_setsockopt(size_t n_args, const mp_obj_t *args) {
mod_network_socket_obj_t *self = args[0];
mp_int_t level = mp_obj_get_int(args[1]);
mp_int_t opt = mp_obj_get_int(args[2]);
const void *optval;
mp_uint_t optlen;
mp_int_t val;
if (mp_obj_is_integer(args[3])) {
val = mp_obj_get_int_truncated(args[3]);
optval = &val;
optlen = sizeof(val);
} else {
mp_buffer_info_t bufinfo;
mp_get_buffer_raise(args[3], &bufinfo, MP_BUFFER_READ);
optval = bufinfo.buf;
optlen = bufinfo.len;
}
int _errno;
if (wlan_socket_setsockopt(self, level, opt, optval, optlen, &_errno) != 0) {
mp_raise_OSError(-_errno);
}
return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(socket_setsockopt_obj, 4, 4, socket_setsockopt);
// method socket.settimeout(value)
// timeout=0 means non-blocking
// timeout=None means blocking
// otherwise, timeout is in seconds
STATIC mp_obj_t socket_settimeout(mp_obj_t self_in, mp_obj_t timeout_in) {
mod_network_socket_obj_t *self = self_in;
mp_uint_t timeout;
if (timeout_in == mp_const_none) {
timeout = -1;
} else {
timeout = mp_obj_get_int(timeout_in);
}
int _errno = 0;
if (wlan_socket_settimeout(self, timeout, &_errno) != 0) {
mp_raise_OSError(_errno);
}
return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_settimeout_obj, socket_settimeout);
// method socket.setblocking(flag)
STATIC mp_obj_t socket_setblocking(mp_obj_t self_in, mp_obj_t blocking) {
if (mp_obj_is_true(blocking)) {
return socket_settimeout(self_in, mp_const_none);
} else {
return socket_settimeout(self_in, MP_OBJ_NEW_SMALL_INT(0));
}
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(socket_setblocking_obj, socket_setblocking);
STATIC mp_obj_t socket_makefile(size_t n_args, const mp_obj_t *args) {
(void)n_args;
return args[0];
}
STATIC MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(socket_makefile_obj, 1, 6, socket_makefile);
STATIC const mp_rom_map_elem_t socket_locals_dict_table[] = {
{ MP_ROM_QSTR(MP_QSTR___del__), MP_ROM_PTR(&mp_stream_close_obj) },
{ MP_ROM_QSTR(MP_QSTR_close), MP_ROM_PTR(&mp_stream_close_obj) },
{ MP_ROM_QSTR(MP_QSTR_bind), MP_ROM_PTR(&socket_bind_obj) },
{ MP_ROM_QSTR(MP_QSTR_listen), MP_ROM_PTR(&socket_listen_obj) },
{ MP_ROM_QSTR(MP_QSTR_accept), MP_ROM_PTR(&socket_accept_obj) },
{ MP_ROM_QSTR(MP_QSTR_connect), MP_ROM_PTR(&socket_connect_obj) },
{ MP_ROM_QSTR(MP_QSTR_send), MP_ROM_PTR(&socket_send_obj) },
{ MP_ROM_QSTR(MP_QSTR_sendall), MP_ROM_PTR(&socket_send_obj) },
{ MP_ROM_QSTR(MP_QSTR_recv), MP_ROM_PTR(&socket_recv_obj) },
{ MP_ROM_QSTR(MP_QSTR_sendto), MP_ROM_PTR(&socket_sendto_obj) },
{ MP_ROM_QSTR(MP_QSTR_recvfrom), MP_ROM_PTR(&socket_recvfrom_obj) },
{ MP_ROM_QSTR(MP_QSTR_setsockopt), MP_ROM_PTR(&socket_setsockopt_obj) },
{ MP_ROM_QSTR(MP_QSTR_settimeout), MP_ROM_PTR(&socket_settimeout_obj) },
{ MP_ROM_QSTR(MP_QSTR_setblocking), MP_ROM_PTR(&socket_setblocking_obj) },
{ MP_ROM_QSTR(MP_QSTR_makefile), MP_ROM_PTR(&socket_makefile_obj) },
// stream methods
{ MP_ROM_QSTR(MP_QSTR_read), MP_ROM_PTR(&mp_stream_read1_obj) },
{ MP_ROM_QSTR(MP_QSTR_readinto), MP_ROM_PTR(&mp_stream_readinto_obj) },
{ MP_ROM_QSTR(MP_QSTR_readline), MP_ROM_PTR(&mp_stream_unbuffered_readline_obj) },
{ MP_ROM_QSTR(MP_QSTR_write), MP_ROM_PTR(&mp_stream_write_obj) },
};
MP_DEFINE_CONST_DICT(socket_locals_dict, socket_locals_dict_table);
STATIC mp_uint_t socket_read(mp_obj_t self_in, void *buf, mp_uint_t size, int *errcode) {
mod_network_socket_obj_t *self = self_in;
mp_int_t ret = wlan_socket_recv(self, buf, size, errcode);
if (ret < 0) {
// we need to ignore the socket closed error here because a read() without params
// only returns when the socket is closed by the other end
if (*errcode != -SL_ESECCLOSED) {
ret = MP_STREAM_ERROR;
} else {
ret = 0;
}
}
return ret;
}
STATIC mp_uint_t socket_write(mp_obj_t self_in, const void *buf, mp_uint_t size, int *errcode) {
mod_network_socket_obj_t *self = self_in;
mp_int_t ret = wlan_socket_send(self, buf, size, errcode);
if (ret < 0) {
ret = MP_STREAM_ERROR;
}
return ret;
}
STATIC mp_uint_t socket_ioctl(mp_obj_t self_in, mp_uint_t request, mp_uint_t arg, int *errcode) {
mod_network_socket_obj_t *self = self_in;
return wlan_socket_ioctl(self, request, arg, errcode);
}
const mp_stream_p_t socket_stream_p = {
.read = socket_read,
.write = socket_write,
.ioctl = socket_ioctl,
.is_text = false,
};
STATIC const mp_obj_type_t socket_type = {
{ &mp_type_type },
.name = MP_QSTR_socket,
.make_new = socket_make_new,
.protocol = &socket_stream_p,
.locals_dict = (mp_obj_t)&socket_locals_dict,
};
/******************************************************************************/
// usocket module
// function usocket.getaddrinfo(host, port)
/// \function getaddrinfo(host, port)
STATIC mp_obj_t mod_usocket_getaddrinfo(mp_obj_t host_in, mp_obj_t port_in) {
size_t hlen;
const char *host = mp_obj_str_get_data(host_in, &hlen);
mp_int_t port = mp_obj_get_int(port_in);
// ipv4 only
uint8_t out_ip[MOD_NETWORK_IPV4ADDR_BUF_SIZE];
int32_t result = wlan_gethostbyname(host, hlen, out_ip, SL_AF_INET);
if (result < 0) {
mp_raise_OSError(-result);
}
mp_obj_tuple_t *tuple = mp_obj_new_tuple(5, NULL);
tuple->items[0] = MP_OBJ_NEW_SMALL_INT(SL_AF_INET);
tuple->items[1] = MP_OBJ_NEW_SMALL_INT(SL_SOCK_STREAM);
tuple->items[2] = MP_OBJ_NEW_SMALL_INT(0);
tuple->items[3] = MP_OBJ_NEW_QSTR(MP_QSTR_);
tuple->items[4] = netutils_format_inet_addr(out_ip, port, NETUTILS_LITTLE);
return mp_obj_new_list(1, (mp_obj_t*)&tuple);
}
STATIC MP_DEFINE_CONST_FUN_OBJ_2(mod_usocket_getaddrinfo_obj, mod_usocket_getaddrinfo);
STATIC const mp_rom_map_elem_t mp_module_usocket_globals_table[] = {
{ MP_ROM_QSTR(MP_QSTR___name__), MP_ROM_QSTR(MP_QSTR_usocket) },
{ MP_ROM_QSTR(MP_QSTR_socket), MP_ROM_PTR(&socket_type) },
{ MP_ROM_QSTR(MP_QSTR_getaddrinfo), MP_ROM_PTR(&mod_usocket_getaddrinfo_obj) },
// class constants
{ MP_ROM_QSTR(MP_QSTR_AF_INET), MP_ROM_INT(SL_AF_INET) },
{ MP_ROM_QSTR(MP_QSTR_SOCK_STREAM), MP_ROM_INT(SL_SOCK_STREAM) },
{ MP_ROM_QSTR(MP_QSTR_SOCK_DGRAM), MP_ROM_INT(SL_SOCK_DGRAM) },
{ MP_ROM_QSTR(MP_QSTR_IPPROTO_SEC), MP_ROM_INT(SL_SEC_SOCKET) },
{ MP_ROM_QSTR(MP_QSTR_IPPROTO_TCP), MP_ROM_INT(SL_IPPROTO_TCP) },
{ MP_ROM_QSTR(MP_QSTR_IPPROTO_UDP), MP_ROM_INT(SL_IPPROTO_UDP) },
};
STATIC MP_DEFINE_CONST_DICT(mp_module_usocket_globals, mp_module_usocket_globals_table);
const mp_obj_module_t mp_module_usocket = {
.base = { &mp_type_module },
.globals = (mp_obj_dict_t*)&mp_module_usocket_globals,
};
MP_REGISTER_MODULE(MP_QSTR_usocket, mp_module_usocket, 1);
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(
<React.Fragment><path fill="none" d="M0 0h24v24H0V0z" /><g><g opacity=".3"><path d="M6 13h13v3H6zM6 8h13v3H6z" /></g><path d="M4 6v12h17V6H4zm15 10H6v-3h13v3zm0-5H6V8h13v3z" /></g></React.Fragment>
, 'ViewStreamTwoTone');
|
inherit ROOM;
void create()
{
set("short", "城外空地");
set("long", @LONG
這是城外的空地,一些蒙古兵的奸細和箭手常在這帶遊弋,以尋
找他們感興趣的獵物,不時有幾枚冷箭從頭頂嗖地飛過,令人防不勝
防。
LONG );
set("outdoors", "xiangyang");
set("no_clean_up", 0);
set("step",2);
set("no_fly",1);
set("exits", ([
"east" : __DIR__"east_out8",
"west" : __DIR__"east_out6",
]));
set("coor/x", -7790);
set("coor/y", -770);
set("coor/z", 0);
setup();
}
|
import React, { useEffect, useState } from "react";
import { useParams, useHistory } from "react-router-dom";
import axios from "axios";
import { axiosWithAuth } from "../../../utils/axiosWithAuth";
import Sharednav from "../Sharednav";
import { makeStyles } from "@material-ui/core/styles";
import TextField from "@material-ui/core/TextField";
import DateFnsUtils from "@date-io/date-fns";
import {
MuiPickersUtilsProvider,
KeyboardTimePicker,
KeyboardDatePicker,
} from "@material-ui/pickers";
const useStyles = makeStyles((theme) => ({
root: {
"& > *": {
margin: theme.spacing(3),
width: "25ch",
},
},
}));
const values = {
name: "",
type: "",
location: "",
start_time: "",
intensity: "",
status: "",
price: "",
duration: "",
max_class_size: "",
description: "",
};
const getTime = new Date().toLocaleDateString();
const EditClass = () => {
const { id, c_id } = useParams();
const history = useHistory();
const [classe, setClasse] = useState(values);
const [img, setImg] = useState("");
const classes = useStyles();
const [selectedTime, setSelectedTime] = useState(getTime);
const [selectedDate, setSelectedDate] = useState(
new Date("2014-08-18T21:11:54")
);
// GET /api/instructors/:id/classes/:class_id
useEffect(() => {
axiosWithAuth()
.get(`/api/instructors/${id}/classes/${c_id}`)
.then((res) => {
setClasse(res.data);
})
.catch((err) => {
console.log(err);
});
}, [setClasse, id, c_id]);
const uploadImage = (e) => {
const files = e.target.files[0];
const formData = new FormData();
formData.append("upload_preset", "pl2czq6m");
formData.append("file", files);
axios
.post(`https://api.cloudinary.com/v1_1/dedps0vtx/image/upload`, formData)
.then((res) => {
setImg(res.data.secure_url);
})
.catch((err) => [console.log(err)]);
};
const handleTimeChange = (date) => {
setSelectedTime(date);
};
const handleDateChange = (date) => {
setSelectedDate(date.toLocaleDateString());
};
const handleValueChange = (e) => {
const newObj = e.target.value;
setClasse({
...classe,
[e.target.name]: newObj,
});
};
const submitNewValues = (e) => {
e.preventDefault();
const datesFormatted = `${selectedDate} ${selectedTime.toLocaleTimeString()}`;
const {
name,
type,
location,
start_time,
intensity,
price,
duration,
max_class_size,
description,
image_url,
} = classe;
const newValues = {
name,
type,
location,
start_time: datesFormatted,
intensity,
price,
duration,
max_class_size,
description,
image_url: img,
};
// /api/instructors/:id/classes/:class_id
axiosWithAuth()
.put(`/api/instructors/${id}/classes/${c_id}`, newValues)
.then((res) => {
history.push(`/account/instructor/${id}`);
})
.catch((err) => {
console.log(err);
});
};
return (
<div className="EditClass">
<Sharednav />
<form className={classes.root} onSubmit={submitNewValues}>
<label htmlFor="name">
<TextField
type="text"
id="name"
name="name"
placeholder="class name"
value={classe.name}
onChange={handleValueChange}
/>
</label>
<label htmlFor="type">
<TextField
type="text"
id="type"
name="type"
placeholder="class type"
value={classe.type}
onChange={handleValueChange}
/>
</label>
<label htmlFor="location">
<TextField
type="text"
id="location"
name="location"
placeholder="class location"
value={classe.location}
onChange={handleValueChange}
/>
</label>
<label htmlFor="intensity">
<TextField
type="number"
id="intensity"
name="intensity"
placeholder="class intensity"
value={classe.intensity}
onChange={handleValueChange}
/>
</label>
<label htmlFor="status">
<TextField
type="text"
id="status"
name="status"
placeholder="class status 'optional' "
value={classe.status}
onChange={handleValueChange}
/>
</label>
<label htmlFor="price">
<TextField
type="number"
id="price"
name="price"
placeholder="class price"
value={classe.price}
onChange={handleValueChange}
/>
</label>
<label htmlFor="duration">
<TextField
type="number"
id="duration"
name="duration"
placeholder="class duration"
value={classe.duration}
onChange={handleValueChange}
/>
</label>
<label htmlFor="max_class_size">
<TextField
type="number"
id="max_class_size"
name="max_class_size"
placeholder="max class size"
value={classe.max_class_size}
onChange={handleValueChange}
/>
</label>
<label htmlFor="description">
<TextField
type="text"
id="description"
name="description"
placeholder="desctioption of class"
value={classe.description}
onChange={handleValueChange}
/>
</label>
<TextField
className="file"
type="file"
name="file"
onChange={uploadImage}
/>
<MuiPickersUtilsProvider utils={DateFnsUtils}>
<KeyboardTimePicker
className="time-picker"
margin="normal"
id="time-picker"
label="class start time"
value={selectedTime}
onChange={handleTimeChange}
KeyboardButtonProps={{
"aria-label": "change time",
}}
/>
<KeyboardDatePicker
margin="normal"
id="date-picker-dialog"
label="class date"
format="MM/dd/yyyy"
value={selectedDate}
onChange={handleDateChange}
KeyboardButtonProps={{
"aria-label": "change date",
}}
/>
</MuiPickersUtilsProvider>
<div className="btn-add-edit">
<button type="submit">Update</button>
</div>
</form>
</div>
);
};
export default EditClass;
|
import { h } from 'vue'
export default {
name: "ClockTimeEight",
vendor: "Mdi",
type: "",
tags: ["clock","time","eight"],
render() {
return h(
"svg",
{"xmlns":"http://www.w3.org/2000/svg","width":"24","height":"24","viewBox":"0 0 24 24","class":"v-icon","fill":"currentColor","data-name":"mdi-clock-time-eight","innerHTML":"<path d='M12 2C6.5 2 2 6.5 2 12C2 17.5 6.5 22 12 22C17.5 22 22 17.5 22 12S17.5 2 12 2M7.7 15.5L7 14.2L11 11.9V7H12.5V12.8L7.7 15.5Z' />"},
)
}
}
|
client.on('message', message => {
if(message.content.startsWith('-discrim') ) {
if(!message.channel.guild) return message.reply('** This command only for servers **')
var args = message.content.split(" ").slice(1);
let sent = 0
let count = 1;
if(args){
client.users.filter(u => u.discriminator == args[0]).forEach(u => {
if(sent > 4){
return
}
sent = sent + 1
message.channel.send(`
** ${count}? ${u.tag}**
`)
count++;
})
}
}
if(message.content ===('-discrim') ) {
if(!message.channel.guild) return message.reply('** This command only for servers **')
let sent = 0
let count = 1;
client.users.filter(u => u.discriminator == message.author.discriminator).forEach(u => {
if(sent > 4){
return
}
sent = sent + 1
message.channel.send(`
** ${count}? ${u.tag}**
`)
count++;
})
}
});
|
import { useAllPodsByChainId } from 'lib/hooks/useAllPodsByChainId'
export const usePrizeSortedPods = () => {
const { data: pods, isFetched, ...remainder } = useAllPodsByChainId()
if (!isFetched) {
return {
data: pods,
isFetched,
...remainder
}
}
const flattenedPods = Object.values(pods).flat()
const sortedPods = flattenedPods.sort(
(a, b) => Number(b.prizePool.prize.totalValueUsd) - Number(a.prizePool.prize.totalValueUsd)
)
return {
data: sortedPods,
isFetched,
...remainder
}
}
|
/* @flow */
import isEqual from 'lodash.isequal';
import { REHYDRATE } from 'redux-persist/constants';
import type { ChatState, Action } from '../types';
import config from '../config';
import {
APP_REFRESH,
LOGOUT,
LOGIN_SUCCESS,
ACCOUNT_SWITCH,
SWITCH_NARROW,
MESSAGE_FETCH_COMPLETE,
EVENT_NEW_MESSAGE,
EVENT_REACTION_ADD,
EVENT_REACTION_REMOVE,
EVENT_UPDATE_MESSAGE,
} from '../actionConstants';
import { homeNarrow, isMessageInNarrow, getNarrowFromMessage } from '../utils/narrow';
import chatUpdater from './chatUpdater';
import { getMessagesById } from '../selectors';
import { NULL_ARRAY, NULL_OBJECT } from '../nullObjects';
const initialState: ChatState = {
narrow: homeNarrow,
messages: NULL_OBJECT,
};
export default (state: ChatState = initialState, action: Action) => {
switch (action.type) {
case APP_REFRESH:
case LOGOUT:
case LOGIN_SUCCESS:
case ACCOUNT_SWITCH:
return initialState;
case REHYDRATE:
if (!config.startup.narrow) return state;
return {
...state,
narrow: config.startup.narrow,
};
case SWITCH_NARROW: {
return {
...state,
narrow: action.narrow,
};
}
case MESSAGE_FETCH_COMPLETE: {
if (action.messages.length === 0) {
return state;
}
const key = JSON.stringify(action.narrow);
const messages = state.messages[key] || NULL_ARRAY;
const messagesById = getMessagesById(state);
const newMessages = action.replaceExisting
? action.messages.map(
item =>
messagesById[item.id]
? isEqual(messagesById[item.id], item) ? messagesById[item.id] : item
: item,
)
: action.messages
.filter(x => !messagesById[x.id])
.concat(messages)
.sort((a, b) => a.id - b.id);
return {
...state,
messages: {
...state.messages,
[key]: newMessages,
},
};
}
case EVENT_REACTION_ADD:
return chatUpdater(state, action.messageId, oldMessage => ({
...oldMessage,
reactions: oldMessage.reactions.concat({
emoji_name: action.emoji,
user: action.user,
}),
}));
case EVENT_REACTION_REMOVE:
return chatUpdater(state, action.messageId, oldMessage => ({
...oldMessage,
reactions: oldMessage.reactions.filter(
x => !(x.emoji_name === action.emoji && x.user.email === action.user.email),
),
}));
case EVENT_NEW_MESSAGE: {
let stateChange = false;
let newState = {
...state,
messages: Object.keys(state.messages).reduce((msg, key) => {
const isInNarrow = isMessageInNarrow(action.message, JSON.parse(key), action.ownEmail);
if (
isInNarrow &&
(action.caughtUp[key] && action.caughtUp[key].newer) &&
state.messages[key].find(item => action.message.id === item.id) === undefined
) {
stateChange = true;
msg[key] = [...state.messages[key], action.message];
} else {
msg[key] = state.messages[key];
}
return msg;
}, {}),
};
const { message } = action;
const key = JSON.stringify(getNarrowFromMessage(message, action.ownEmail));
if (!stateChange && state.messages[key] === undefined) {
// new message is in new narrow in which we don't have any message
stateChange = true;
newState = {
...state,
messages: {
...state.messages,
[key]: [action.message],
},
};
}
return stateChange ? newState : state;
}
case EVENT_UPDATE_MESSAGE:
return chatUpdater(state, action.message_id, oldMessage => ({
...oldMessage,
content: action.rendered_content || oldMessage.content,
subject: action.subject || oldMessage.subject,
subject_links: action.subject_links || oldMessage.subject_links,
edit_history: [
action.orig_rendered_content
? action.orig_subject
? {
prev_rendered_content: action.orig_rendered_content,
prev_subject: oldMessage.subject,
timestamp: action.edit_timestamp,
prev_rendered_content_version: action.prev_rendered_content_version,
user_id: action.user_id,
}
: {
prev_rendered_content: action.orig_rendered_content,
timestamp: action.edit_timestamp,
prev_rendered_content_version: action.prev_rendered_content_version,
user_id: action.user_id,
}
: {
prev_subject: oldMessage.subject,
timestamp: action.edit_timestamp,
user_id: action.user_id,
},
...(oldMessage.edit_history || NULL_ARRAY),
],
last_edit_timestamp: action.edit_timestamp,
}));
default:
return state;
}
};
|
/* *
* spi_controller.c *
* *
* Author: Cezary Sobczak https://github.com/Cezarus27 *
* Date: 04.09.2020 *
* Reference article: elm-chan.org/docs/mmc/mmc_e.html *
* */
#ifndef INC_FATFS_SD_H_
#define INC_FATFS_SD_H_
#ifndef __FATFS_SD_H
#define __FATFS_SD_H
/* Definitions for MMC/SDC command */
#define CMD0 (0x40+0) /* GO_IDLE_STATE */
#define CMD1 (0x40+1) /* SEND_OP_COND */
#define CMD8 (0x40+8) /* SEND_IF_COND */
#define CMD9 (0x40+9) /* SEND_CSD */
#define CMD10 (0x40+10) /* SEND_CID */
#define CMD12 (0x40+12) /* STOP_TRANSMISSION */
#define CMD13 (0x40+13) /* SD_STATUS (SDC) */
#define CMD16 (0x40+16) /* SET_BLOCKLEN */
#define CMD17 (0x40+17) /* READ_SINGLE_BLOCK */
#define CMD18 (0x40+18) /* READ_MULTIPLE_BLOCK */
#define CMD23 (0x40+23) /* SET_BLOCK_COUNT */
#define CMD24 (0x40+24) /* WRITE_BLOCK */
#define CMD25 (0x40+25) /* WRITE_MULTIPLE_BLOCK */
#define CMD41 (0x40+41) /* SEND_OP_COND (ACMD) */
#define CMD55 (0x40+55) /* APP_CMD */
#define CMD58 (0x40+58) /* READ_OCR */
DSTATUS SD_disk_initialize (BYTE pdrv);
DSTATUS SD_disk_status (BYTE pdrv);
DRESULT SD_disk_read (BYTE pdrv, BYTE* buff, DWORD sector, UINT count);
DRESULT SD_disk_write (BYTE pdrv, const BYTE* buff, DWORD sector, UINT count);
DRESULT SD_disk_ioctl (BYTE pdrv, BYTE cmd, void* buff);
#define SPI_TIMEOUT 1000
// Card types
#define SDv2_BLOCK 1
#define SDv2_BYTE 2
#define SDv1 3
#define MMCv3 4
#endif
#endif /* INC_FATFS_SD_H_ */
|
/*******************************************************************\
Module: Command line interpretation for goto-cc
Author: Daniel Kroening
Date: April 2010
\*******************************************************************/
/// \file
/// Command line interpretation for goto-cc
#ifndef CPROVER_GOTO_CC_GOTO_CC_CMDLINE_H
#define CPROVER_GOTO_CC_GOTO_CC_CMDLINE_H
#include <util/cmdline.h>
class goto_cc_cmdlinet:public cmdlinet
{
public:
~goto_cc_cmdlinet();
using cmdlinet::parse;
virtual bool parse(int argc, const char **argv)=0;
static bool in_list(const char *option, const char **list);
static bool prefix_in_list(
const char *option,
const char **list,
std::string &prefix);
// never fails, will add if not found
std::size_t get_optnr(const std::string &option);
/// Set option \p option to \p value.
void set(const std::string &opt, const char *value) override
{
set(opt, std::string{value});
}
void set(const std::string &opt, const std::string &value) override
{
std::size_t nr=get_optnr(opt);
options[nr].isset=true;
options[nr].values.push_back(value);
}
void set(const std::string &opt, bool value = true) override
{
options[get_optnr(opt)].isset = value;
}
// This lets you distinguish input file name arguments
// from others, but is otherwise identical to the
// original command line.
struct argt
{
public:
argt():is_infile_name(false) { }
explicit argt(const std::string &_arg):is_infile_name(false), arg(_arg) { }
bool is_infile_name;
std::string arg;
};
typedef std::list<argt> parsed_argvt;
parsed_argvt parsed_argv;
bool have_infile_arg() const
{
for(parsed_argvt::const_iterator
it=parsed_argv.begin(); it!=parsed_argv.end(); it++)
if(it->is_infile_name)
return true;
return false;
}
std::string stdin_file;
protected:
void add_arg(const std::string &arg)
{
parsed_argv.push_back(argt(arg));
}
void add_infile_arg(const std::string &arg);
};
#endif // CPROVER_GOTO_CC_GOTO_CC_CMDLINE_H
|
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#define SURFACE_FLESHDEFAULG SurfaceType1
#define SURFACE_FLESHVULNERABLE SurfaceType2
#define WEAPONCOLLISIONCHANNEL ECC_GameTraceChannel1
|
/******************************************************************************
QtAV: Multimedia framework based on Qt and FFmpeg
Copyright (C) 2012-2016 Wang Bin <wbsecg1@gmail.com>
* This file is part of QtAV (from 2014)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
******************************************************************************/
#ifndef QTAV_SURFACEINTEROPVAAPI_H
#define QTAV_SURFACEINTEROPVAAPI_H
#include <QtCore/qglobal.h>
#include "vaapi_helper.h"
#define VA_X11_INTEROP 1
#ifndef QT_NO_OPENGL
#include <QtCore/QMap>
#include <QtCore/QSharedPointer>
#include "QtAV/SurfaceInterop.h"
namespace QtAV {
namespace vaapi {
bool checkEGL_DMA();
bool checkEGL_Pixmap();
class InteropResource
{
public:
virtual ~InteropResource() {}
// egl supports yuv extension
/*!
* \brief map
* \param surface va decoded surface
* \param tex opengl texture
* \param w frame width(visual width) without alignment, <= dxva surface width
* \param h frame height(visual height)
* \param plane useless now
* \return true if success
*/
virtual bool map(const surface_ptr &surface, GLuint tex, int w, int h, int plane) = 0;
virtual bool unmap(const surface_ptr &surface, GLuint tex) {
Q_UNUSED(surface);
Q_UNUSED(tex);
return true;
}
};
typedef QSharedPointer<InteropResource> InteropResourcePtr;
class SurfaceInteropVAAPI Q_DECL_FINAL: public VideoSurfaceInterop
{
public:
SurfaceInteropVAAPI(const InteropResourcePtr& res) : frame_width(0), frame_height(0), m_resource(res) {}
void setSurface(const surface_ptr& surface, int w, int h); // use surface->width/height if w/h is 0
void* map(SurfaceType type, const VideoFormat& fmt, void* handle, int plane) Q_DECL_OVERRIDE;
void unmap(void *handle) Q_DECL_OVERRIDE;
protected:
void* mapToHost(const VideoFormat &format, void *handle, int plane);
private:
int frame_width, frame_height;
// NOTE: must ensure va-x11/va-glx is unloaded after all va calls(don't know why, but it's true), for example vaTerminate(), to avoid crash
// so declare InteropResourcePtr first then surface_ptr. InteropResource (va-xxx.so) will be destroyed later than surface_t (vaTerminate())
// also call vaInitialize() before vaTerminate() can avoid such crashes. Don't know why.
InteropResourcePtr m_resource;
surface_ptr m_surface;
};
// load/resolve symbols only once in decoder and pass a VAAPI_XXX ptr
// or use pool
class GLXInteropResource Q_DECL_FINAL: public InteropResource, protected VAAPI_GLX
{
public:
bool map(const surface_ptr &surface, GLuint tex, int w, int h, int) Q_DECL_OVERRIDE;
private:
surface_glx_ptr surfaceGLX(const display_ptr& dpy, GLuint tex);
QMap<GLuint,surface_glx_ptr> glx_surfaces; // render to different texture. surface_glx_ptr is created with texture
};
class X11;
class X11InteropResource Q_DECL_FINAL: public InteropResource, protected VAAPI_X11
{
public:
X11InteropResource();
~X11InteropResource();
bool map(const surface_ptr &surface, GLuint tex, int w, int h, int) Q_DECL_OVERRIDE;
bool unmap(const surface_ptr &surface, GLuint tex) Q_DECL_OVERRIDE;
private:
bool ensurePixmaps(int w, int h);
Display *xdisplay;
int width, height;
X11 *x11;
};
#if QTAV_HAVE(EGL_CAPI)
// libva-egl is dead and not complete. here we use dma
class EGL;
class EGLInteropResource Q_DECL_FINAL : public InteropResource
{
public:
EGLInteropResource();
~EGLInteropResource();
bool map(const surface_ptr &surface, GLuint tex, int w, int h, int plane) Q_DECL_OVERRIDE;
bool unmap(const surface_ptr &surface, GLuint tex) Q_DECL_OVERRIDE;
private:
bool ensure();
void destroy(VADisplay va_dpy); //destroy dma buffer and egl images
uintptr_t vabuf_handle;
VAImage va_image;
QMap<GLuint, int> mapped;
EGL *egl;
};
#endif //QTAV_HAVE(EGL_CAPI)
} //namespace vaapi
} //namespace QtAV
#endif //QT_NO_OPENGL
#endif // QTAV_SURFACEINTEROPVAAPI_H
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from contextlib import contextmanager
import os
import re
from collections import namedtuple
ParseResult = namedtuple(
'ParseResult',
[
'protocol',
'username',
'password',
'resource',
'path'
]
)
_RE_FS_URL = re.compile(r'''
^
(.*?)
:\/\/
(?:
(?:(.*?)@(.*?))
|(.*?)
)
(?:
!(.*?)$
)*$
''', re.VERBOSE)
@contextmanager
def manage_fs(fs_url, create=False, writeable=True, cwd='.'):
'''
A context manager opens / closes a filesystem.
:param fs_url: A FS instance or a FS URL.
:type fs_url: str or FS
:param bool create: If ``True``, then create the filesytem if it
doesn't already exist.
:param bool writeable: If ``True``, then the filesystem should be
writeable.
:param str cwd: The current working directory, if opening a
:class:`~fs.osfs.OSFS`.
Sometimes it is convenient to be able to pass either a FS object
*or* an FS URL to a function. This context manager handles the
required logic for that.
Here's an example::
def print_ls(list_fs):
"""List a directory."""
with manage_fs(list_fs) as fs:
print(" ".join(fs.listdir()))
This function may be used in two ways. You may either pass either a
``str``, as follows::
print_list('zip://projects.zip')
Or, an FS instance::
from fs.osfs import OSFS
projects_fs = OSFS('~/')
print_list(projects_fs)
'''
from .base import FS
if isinstance(fs_url, FS):
yield fs_url
else:
_fs = open_fs(
fs_url,
create=create,
writeable=writeable,
cwd=cwd
)
try:
yield _fs
except:
raise
finally:
_fs.close()
class ParseError(ValueError):
"""Raised when attempting to parse an invalid FS URL."""
class OpenerError(Exception):
"""Base class for opener related errors."""
class Unsupported(OpenerError):
"""May be raised by opener if the opener fails to open a FS."""
def parse(fs_url):
"""
Parse a Filesystem URL and return a :class:`ParseResult`, or raise
:class:`ParseError` (subclass of ValueError) if the FS URL is
not value.
:param fs_url: A filesystem URL
:type fs_url: str
:rtype: :class:`ParseResult`
"""
match = _RE_FS_URL.match(fs_url)
if match is None:
raise ParseError('{!r} is not a fs2 url'.format(fs_url))
fs_name, credentials, url1, url2, path = match.groups()
if credentials:
username, _, password = credentials.partition(':')
url = url1
else:
username = None
password = None
url = url2
return ParseResult(
fs_name,
username,
password,
url,
path
)
class Opener(object):
"""
The opener base class.
An opener is responsible for opening a filesystems from one or more
protocols. A list of supported protocols is supplied in a class
attribute called `protocols`.
Openers should be registered with a :class:`~fs.opener.Registry`
object, which picks an appropriate opener object for a given FS URL.
"""
protocols = []
def __repr__(self):
return "<opener {!r}>".format(self.protocols)
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
"""
Open a filesystem object from a FS URL.
:param str fs_url: A filesystem URL
:param parse_result: A parsed filesystem URL.
:type parse_result: :class:`ParseResult`
:param bool writeable: True if the filesystem must be writeable.
:param bool create: True if the filesystem should be created if
it does not exist.
:param str cwd: The current working directory (generally only
relevant for OS filesystems).
:returns: :class:`~fs.base.FS` object
"""
class Registry(object):
"""
A registry for `Opener` instances.
"""
def __init__(self, default_opener='osfs'):
"""
Create a registry object.
:param default_opener: The protocol to use, if one is not
supplied. The default is to use 'osfs', so that the FS URL
is treated as a system path if no protocol is given.
"""
self.default_opener = default_opener
self.protocols = {}
def install(self, opener):
"""
Install an opener.
:param opener: An :class:`Opener` instance, or a callable
that returns an opener instance.
May be used as a class decorator. For example::
registry = Registry()
@registry.install
class ArchiveOpener(Opener):
protocols = ['zip', 'tar']
"""
if not isinstance(opener, Opener):
opener = opener()
assert opener.protocols, "must list one or more protocols"
for protocol in opener.protocols:
self.protocols[protocol] = opener
def open(self,
fs_url,
writeable=True,
create=False,
cwd=".",
default_protocol='osfs'):
"""
Open a filesystem from a FS URL. Returns a tuple of a filesystem
object and a path. If there is no path in the FS URL, the path
value will be ``None``.
:param str fs_url: A filesystem URL
:param bool writeable: True if the filesystem must be writeable.
:param bool create: True if the filesystem should be created if
it does not exist.
:param cwd: The current working directory.
:type cwd: str or None
:rtype: Tuple of ``(<filesystem>, <path from url>)``
"""
if '://' not in fs_url:
# URL may just be a path
fs_url = "{}://{}".format(default_protocol, fs_url)
parse_result = parse(fs_url)
protocol = parse_result.protocol
open_path = parse_result.path
opener = self.protocols.get(protocol, None)
if not opener:
raise Unsupported(
"protocol '{}' is not supported".format(protocol)
)
open_fs = opener.open_fs(
fs_url,
parse_result,
writeable,
create,
cwd
)
return open_fs, open_path
def open_fs(self,
fs_url,
writeable=True,
create=False,
cwd=".",
default_protocol='osfs'):
"""
Open a filesystem object from a FS URL (ignoring the path
component).
:param str fs_url: A filesystem URL
:param parse_result: A parsed filesystem URL.
:type parse_result: :class:`ParseResult`
:param bool writeable: True if the filesystem must be writeable.
:param bool create: True if the filesystem should be created if
it does not exist.
:param str cwd: The current working directory (generally only
relevant for OS filesystems).
:param str default_protocol: The protocol to use if one is not
supplied in the FS URL (defaults to ``"osfs"``).
:returns: :class:`~fs.base.FS` object
"""
from .base import FS
if isinstance(fs_url, FS):
_fs = fs_url
else:
_fs, _path = self.open(
fs_url,
writeable=writeable,
create=create,
cwd=cwd,
default_protocol=default_protocol
)
return _fs
registry = Registry()
open_fs = registry.open_fs
open = registry.open
@registry.install
class OSFSOpener(Opener):
protocols = ['file', 'osfs']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
from .osfs import OSFS
_path = os.path.join(cwd, parse_result.resource)
path = os.path.normpath(_path)
osfs = OSFS(path, create=create)
return osfs
@registry.install
class TempOpener(Opener):
protocols = ['temp']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
from .tempfs import TempFS
temp_fs = TempFS(identifier=parse_result.resource)
return temp_fs
@registry.install
class MemOpener(Opener):
protocols = ['mem']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
from .memoryfs import MemoryFS
mem_fs = MemoryFS()
return mem_fs
@registry.install
class ZipOpener(Opener):
protocols = ['zip']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
from .zipfs import ZipFS
zip_fs = ZipFS(
parse_result.resource,
write=create
)
return zip_fs
@registry.install
class FTPOpener(Opener):
protocols = ['ftp']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
from .ftpfs import FTPFS
ftp_host, _, dir_path = parse_result.resource.partition('/')
ftp_host, _, ftp_port = ftp_host.partition(':')
ftp_port = int(ftp_port) if ftp_port.isdigit() else 21
ftp_fs = FTPFS(
ftp_host,
port=ftp_port,
user=parse_result.username,
passwd=parse_result.password,
)
ftp_fs = (
ftp_fs.opendir(dir_path)
if dir_path else
ftp_fs
)
return ftp_fs
|
/*! Copyright (c) 2018-2021 Nicolas Barriquand <nicolas.barriquand@outlook.fr>. MIT licensed. */
/**
* This module is a simple wrapper of the poppy-robot-core module in order to:
*
* - Automatically manage a set of common flags dedicated to robot connections via node cli,
* - Read poppy rc file created neither by hand or using the cli tool.
*
* The flags in order to set the connection to poppy:
*
* option | desc | value | default
* --- | --- | --- | ---
* -i/--ip | Set the Poppy hostname/ip | string | poppy.local
* -p/--port | Set the port to the REST API served by the http server on Poppy | integer | 8080
*
* Note it re-exports all the exported features of interest of the poppy-robot-core module.
*
* @module poppy-robot-cli
* @typicalname P
* @version 8.0.3
* @see {@link https://github.com/nbarikipoulos/poppy-robot-core.git}
*/
'use strict'
const yargs = require('yargs')
const {
createScript, Script, Poppy,
ExtMotorRequest, RawMotorRequest, PoppyRequestHandler
} = require('poppy-robot-core')
const { addPoppyConnectionOptions } = require('./cli/cli-helper')
const { createPoppy, createRequestHandler, createDescriptor } = require('./lib/ext-poppy-factory')
// ////////////////////////////////
// Automatically add CLI options for
// Poppy configuration to any script
// ////////////////////////////////
yargs
.locale('en')
.alias('h', 'help')
.help('h')
// Add common cli options for poppy settings
addPoppyConnectionOptions()
yargs
.wrap(yargs.terminalWidth())
.parse()
// ////////////////////////////////
// ////////////////////////////////
// Public API
// ////////////////////////////////
// ////////////////////////////////
module.exports = {
createPoppy,
createRequestHandler,
createScript,
createDescriptor,
Script,
Poppy,
ExtMotorRequest,
RawMotorRequest,
PoppyRequestHandler
}
|
var debug = require('debug')('bindings');
var events = require('events');
var util = require('util');
var AclStream = require('./acl-stream');
var Gatt = require('./gatt');
var Gap = require('./gap');
var Hci = require('./hci');
var NobleBindings = function() {
this._state = null;
this._addresses = {};
this._addresseTypes = {};
this._connectable = {};
this._pendingConnection = false;
this._connectionQueue = [];
this._handles = {};
this._gatts = {};
this._aclStreams = {};
this._hci = new Hci();
this._gap = new Gap(this._hci);
};
util.inherits(NobleBindings, events.EventEmitter);
NobleBindings.prototype.startScanning = function(serviceUuids, allowDuplicates) {
this._scanServiceUuids = serviceUuids || [];
this._gap.startScanning(allowDuplicates);
};
NobleBindings.prototype.stopScanning = function() {
this._gap.stopScanning();
};
NobleBindings.prototype.connect = function(peripheralUuid) {
var address = this._addresses[peripheralUuid];
var addressType = this._addresseTypes[peripheralUuid];
if (!this._pendingConnection) {
this._pendingConnection = true;
this._hci.createLeConn(address, addressType);
} else {
this._connectionQueue.push(peripheralUuid);
}
};
NobleBindings.prototype.disconnect = function(peripheralUuid) {
this._hci.disconnect(this._handles[peripheralUuid]);
};
NobleBindings.prototype.updateRssi = function(peripheralUuid) {
this._hci.readRssi(this._handles[peripheralUuid]);
};
NobleBindings.prototype.init = function() {
this.onSigIntBinded = this.onSigInt.bind(this);
process.on('SIGINT', this.onSigIntBinded);
process.on('exit', this.onExit.bind(this));
this._gap.on('scanStart', this.onScanStart.bind(this));
this._gap.on('scanStop', this.onScanStop.bind(this));
this._gap.on('discover', this.onDiscover.bind(this));
this._hci.on('stateChange', this.onStateChange.bind(this));
this._hci.on('leConnComplete', this.onLeConnComplete.bind(this));
this._hci.on('leConnUpdateComplete', this.onLeConnUpdateComplete.bind(this));
this._hci.on('rssiRead', this.onRssiRead.bind(this));
this._hci.on('disconnComplete', this.onDisconnComplete.bind(this));
this._hci.on('encryptChange', this.onEncryptChange.bind(this));
this._hci.on('aclDataPkt', this.onAclDataPkt.bind(this));
this._hci.init();
};
NobleBindings.prototype.onSigInt = function() {
var sigIntListeners = process.listeners('SIGINT');
if (sigIntListeners[sigIntListeners.length - 1] === this.onSigIntBinded) {
// we are the last listener, so exit
// this will trigger onExit, and clean up
process.exit(1);
}
};
NobleBindings.prototype.onExit = function() {
this.stopScanning();
for (var handle in this._aclStreams) {
this._hci.disconnect(handle);
}
};
NobleBindings.prototype.onStateChange = function(state) {
if (this._state === state) {
return;
}
this._state = state;
if (state === 'unauthorized') {
console.log('noble warning: adapter state unauthorized, please run as root or with sudo');
console.log(' or see README for information on running without root/sudo:');
console.log(' https://github.com/sandeepmistry/noble#running-on-linux');
} else if (state === 'unsupported') {
console.log('noble warning: adapter does not support Bluetooth Low Energy (BLE, Bluetooth Smart).');
console.log(' Try to run with environment variable:');
console.log(' [sudo] NOBLE_HCI_DEVICE_ID=x node ...');
}
this.emit('stateChange', state);
};
NobleBindings.prototype.onScanStart = function() {
this.emit('scanStart');
};
NobleBindings.prototype.onScanStop = function() {
this.emit('scanStop');
};
NobleBindings.prototype.onDiscover = function(status, address, addressType, connectable, advertisement, rssi) {
if (this._scanServiceUuids === undefined) {
return;
}
var serviceUuids = advertisement.serviceUuids;
var hasScanServiceUuids = (this._scanServiceUuids.length === 0);
if (!hasScanServiceUuids) {
for (var i in serviceUuids) {
hasScanServiceUuids = (this._scanServiceUuids.indexOf(serviceUuids[i]) !== -1);
if (hasScanServiceUuids) {
break;
}
}
}
if (hasScanServiceUuids) {
var uuid = address.split(':').join('');
this._addresses[uuid] = address;
this._addresseTypes[uuid] = addressType;
this._connectable[uuid] = connectable;
this.emit('discover', uuid, address, addressType, connectable, advertisement, rssi);
}
};
NobleBindings.prototype.onLeConnComplete = function(status, handle, role, addressType, address, interval, latency, supervisionTimeout, masterClockAccuracy) {
var uuid = address.split(':').join('').toLowerCase();
var error = null;
if (status === 0) {
var aclStream = new AclStream(this._hci, handle, this._hci.addressType, this._hci.address, addressType, address);
var gatt = new Gatt(address, aclStream);
this._gatts[uuid] = this._gatts[handle] = gatt;
this._aclStreams[handle] = aclStream;
this._handles[uuid] = handle;
this._handles[handle] = uuid;
this._gatts[handle].on('mtu', this.onMtu.bind(this));
this._gatts[handle].on('servicesDiscover', this.onServicesDiscovered.bind(this));
this._gatts[handle].on('includedServicesDiscover', this.onIncludedServicesDiscovered.bind(this));
this._gatts[handle].on('characteristicsDiscover', this.onCharacteristicsDiscovered.bind(this));
this._gatts[handle].on('read', this.onRead.bind(this));
this._gatts[handle].on('write', this.onWrite.bind(this));
this._gatts[handle].on('broadcast', this.onBroadcast.bind(this));
this._gatts[handle].on('notify', this.onNotify.bind(this));
this._gatts[handle].on('notification', this.onNotification.bind(this));
this._gatts[handle].on('descriptorsDiscover', this.onDescriptorsDiscovered.bind(this));
this._gatts[handle].on('valueRead', this.onValueRead.bind(this));
this._gatts[handle].on('valueWrite', this.onValueWrite.bind(this));
this._gatts[handle].on('handleRead', this.onHandleRead.bind(this));
this._gatts[handle].on('handleWrite', this.onHandleWrite.bind(this));
this._gatts[handle].on('handleNotify', this.onHandleNotify.bind(this));
this._gatts[handle].exchangeMtu(256);
} else {
error = new Error(Hci.STATUS_MAPPER[status] || ('Unknown (' + status + ')'));
}
this.emit('connect', uuid, error);
if (this._connectionQueue.length > 0) {
var peripheralUuid = this._connectionQueue.shift();
address = this._addresses[peripheralUuid];
addressType = this._addresseTypes[peripheralUuid];
this._hci.createLeConn(address, addressType);
} else {
this._pendingConnection = false;
}
};
NobleBindings.prototype.onLeConnUpdateComplete = function(handle, interval, latency, supervisionTimeout) {
// no-op
};
NobleBindings.prototype.onDisconnComplete = function(handle, reason) {
var uuid = this._handles[handle];
if (uuid) {
this._aclStreams[handle].push(null, null);
this._gatts[handle].removeAllListeners();
delete this._gatts[uuid];
delete this._gatts[handle];
delete this._aclStreams[handle];
delete this._handles[uuid];
delete this._handles[handle];
this.emit('disconnect', uuid); // TODO: handle reason?
} else {
console.warn('noble warning: unknown handle ' + handle + ' disconnected!');
}
};
NobleBindings.prototype.onEncryptChange = function(handle, encrypt) {
var aclStream = this._aclStreams[handle];
if (aclStream) {
aclStream.pushEncrypt(encrypt);
}
};
NobleBindings.prototype.onMtu = function(address, mtu) {
};
NobleBindings.prototype.onRssiRead = function(handle, rssi) {
this.emit('rssiUpdate', this._handles[handle], rssi);
};
NobleBindings.prototype.onAclDataPkt = function(handle, cid, data) {
var aclStream = this._aclStreams[handle];
if (aclStream) {
aclStream.push(cid, data);
}
};
NobleBindings.prototype.discoverServices = function(peripheralUuid, uuids) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.discoverServices(uuids || []);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onServicesDiscovered = function(address, serviceUuids) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('servicesDiscover', uuid, serviceUuids);
};
NobleBindings.prototype.discoverIncludedServices = function(peripheralUuid, serviceUuid, serviceUuids) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.discoverIncludedServices(serviceUuid, serviceUuids || []);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onIncludedServicesDiscovered = function(address, serviceUuid, includedServiceUuids) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('includedServicesDiscover', uuid, serviceUuid, includedServiceUuids);
};
NobleBindings.prototype.discoverCharacteristics = function(peripheralUuid, serviceUuid, characteristicUuids) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.discoverCharacteristics(serviceUuid, characteristicUuids || []);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onCharacteristicsDiscovered = function(address, serviceUuid, characteristics) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('characteristicsDiscover', uuid, serviceUuid, characteristics);
};
NobleBindings.prototype.read = function(peripheralUuid, serviceUuid, characteristicUuid) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.read(serviceUuid, characteristicUuid);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onRead = function(address, serviceUuid, characteristicUuid, data) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('read', uuid, serviceUuid, characteristicUuid, data, false);
};
NobleBindings.prototype.write = function(peripheralUuid, serviceUuid, characteristicUuid, data, withoutResponse) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.write(serviceUuid, characteristicUuid, data, withoutResponse);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onWrite = function(address, serviceUuid, characteristicUuid) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('write', uuid, serviceUuid, characteristicUuid);
};
NobleBindings.prototype.broadcast = function(peripheralUuid, serviceUuid, characteristicUuid, broadcast) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.broadcast(serviceUuid, characteristicUuid, broadcast);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onBroadcast = function(address, serviceUuid, characteristicUuid, state) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('broadcast', uuid, serviceUuid, characteristicUuid, state);
};
NobleBindings.prototype.notify = function(peripheralUuid, serviceUuid, characteristicUuid, notify) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.notify(serviceUuid, characteristicUuid, notify);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onNotify = function(address, serviceUuid, characteristicUuid, state) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('notify', uuid, serviceUuid, characteristicUuid, state);
};
NobleBindings.prototype.onNotification = function(address, serviceUuid, characteristicUuid, data) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('read', uuid, serviceUuid, characteristicUuid, data, true);
};
NobleBindings.prototype.discoverDescriptors = function(peripheralUuid, serviceUuid, characteristicUuid) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.discoverDescriptors(serviceUuid, characteristicUuid);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onDescriptorsDiscovered = function(address, serviceUuid, characteristicUuid, descriptorUuids) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('descriptorsDiscover', uuid, serviceUuid, characteristicUuid, descriptorUuids);
};
NobleBindings.prototype.readValue = function(peripheralUuid, serviceUuid, characteristicUuid, descriptorUuid) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.readValue(serviceUuid, characteristicUuid, descriptorUuid);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onValueRead = function(address, serviceUuid, characteristicUuid, descriptorUuid, data) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('valueRead', uuid, serviceUuid, characteristicUuid, descriptorUuid, data);
};
NobleBindings.prototype.writeValue = function(peripheralUuid, serviceUuid, characteristicUuid, descriptorUuid, data) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.writeValue(serviceUuid, characteristicUuid, descriptorUuid, data);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onValueWrite = function(address, serviceUuid, characteristicUuid, descriptorUuid) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('valueWrite', uuid, serviceUuid, characteristicUuid, descriptorUuid);
};
NobleBindings.prototype.readHandle = function(peripheralUuid, attHandle) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.readHandle(attHandle);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onHandleRead = function(address, handle, data) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('handleRead', uuid, handle, data);
};
NobleBindings.prototype.writeHandle = function(peripheralUuid, attHandle, data, withoutResponse) {
var handle = this._handles[peripheralUuid];
var gatt = this._gatts[handle];
if (gatt) {
gatt.writeHandle(attHandle, data, withoutResponse);
} else {
console.warn('noble warning: unknown peripheral ' + peripheralUuid);
}
};
NobleBindings.prototype.onHandleWrite = function(address, handle) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('handleWrite', uuid, handle);
};
NobleBindings.prototype.onHandleNotify = function(address, handle, data) {
var uuid = address.split(':').join('').toLowerCase();
this.emit('handleNotify', uuid, handle, data);
};
module.exports = new NobleBindings();
|
/*
* Driver for PC-speaker like devices found on various Sparc systems.
*
* Copyright (c) 2002 Vojtech Pavlik
* Copyright (c) 2002, 2006, 2008 David S. Miller (davem@davemloft.net)
*/
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/init.h>
#include <linux/input.h>
#include <linux/of_device.h>
#include <linux/slab.h>
#include <asm/io.h>
MODULE_AUTHOR("David S. Miller <davem@davemloft.net>");
MODULE_DESCRIPTION("Sparc Speaker beeper driver");
MODULE_LICENSE("GPL");
struct grover_beep_info {
void __iomem *freq_regs;
void __iomem *enable_reg;
};
struct bbc_beep_info {
u32 clock_freq;
void __iomem *regs;
};
struct sparcspkr_state {
const char *name;
int (*event)(struct input_dev *dev, unsigned int type, unsigned int code, int value);
spinlock_t lock;
struct input_dev *input_dev;
union {
struct grover_beep_info grover;
struct bbc_beep_info bbc;
} u;
};
static u32 bbc_count_to_reg(struct bbc_beep_info *info, unsigned int count)
{
u32 val, clock_freq = info->clock_freq;
int i;
if (!count)
return 0;
if (count <= clock_freq >> 20)
return 1 << 18;
if (count >= clock_freq >> 12)
return 1 << 10;
val = 1 << 18;
for (i = 19; i >= 11; i--) {
val >>= 1;
if (count <= clock_freq >> i)
break;
}
return val;
}
static int bbc_spkr_event(struct input_dev *dev, unsigned int type, unsigned int code, int value)
{
struct sparcspkr_state *state = dev_get_drvdata(dev->dev.parent);
struct bbc_beep_info *info = &state->u.bbc;
unsigned int count = 0;
unsigned long flags;
if (type != EV_SND)
return -1;
switch (code) {
case SND_BELL: if (value) value = 1000;
case SND_TONE: break;
default: return -1;
}
if (value > 20 && value < 32767)
count = 1193182 / value;
count = bbc_count_to_reg(info, count);
spin_lock_irqsave(&state->lock, flags);
if (count) {
outb(0x01, info->regs + 0);
outb(0x00, info->regs + 2);
outb((count >> 16) & 0xff, info->regs + 3);
outb((count >> 8) & 0xff, info->regs + 4);
outb(0x00, info->regs + 5);
} else {
outb(0x00, info->regs + 0);
}
spin_unlock_irqrestore(&state->lock, flags);
return 0;
}
static int grover_spkr_event(struct input_dev *dev, unsigned int type, unsigned int code, int value)
{
struct sparcspkr_state *state = dev_get_drvdata(dev->dev.parent);
struct grover_beep_info *info = &state->u.grover;
unsigned int count = 0;
unsigned long flags;
if (type != EV_SND)
return -1;
switch (code) {
case SND_BELL: if (value) value = 1000;
case SND_TONE: break;
default: return -1;
}
if (value > 20 && value < 32767)
count = 1193182 / value;
spin_lock_irqsave(&state->lock, flags);
if (count) {
/* enable counter 2 */
outb(inb(info->enable_reg) | 3, info->enable_reg);
/* set command for counter 2, 2 byte write */
outb(0xB6, info->freq_regs + 1);
/* select desired HZ */
outb(count & 0xff, info->freq_regs + 0);
outb((count >> 8) & 0xff, info->freq_regs + 0);
} else {
/* disable counter 2 */
outb(inb_p(info->enable_reg) & 0xFC, info->enable_reg);
}
spin_unlock_irqrestore(&state->lock, flags);
return 0;
}
static int sparcspkr_probe(struct device *dev)
{
struct sparcspkr_state *state = dev_get_drvdata(dev);
struct input_dev *input_dev;
int error;
input_dev = input_allocate_device();
if (!input_dev)
return -ENOMEM;
input_dev->name = state->name;
input_dev->phys = "sparc/input0";
input_dev->id.bustype = BUS_ISA;
input_dev->id.vendor = 0x001f;
input_dev->id.product = 0x0001;
input_dev->id.version = 0x0100;
input_dev->dev.parent = dev;
input_dev->evbit[0] = BIT_MASK(EV_SND);
input_dev->sndbit[0] = BIT_MASK(SND_BELL) | BIT_MASK(SND_TONE);
input_dev->event = state->event;
error = input_register_device(input_dev);
if (error) {
input_free_device(input_dev);
return error;
}
state->input_dev = input_dev;
return 0;
}
static void sparcspkr_shutdown(struct platform_device *dev)
{
struct sparcspkr_state *state = platform_get_drvdata(dev);
struct input_dev *input_dev = state->input_dev;
/* turn off the speaker */
state->event(input_dev, EV_SND, SND_BELL, 0);
}
static int bbc_beep_probe(struct platform_device *op)
{
struct sparcspkr_state *state;
struct bbc_beep_info *info;
struct device_node *dp;
int err = -ENOMEM;
state = kzalloc(sizeof(*state), GFP_KERNEL);
if (!state)
goto out_err;
state->name = "Sparc BBC Speaker";
state->event = bbc_spkr_event;
spin_lock_init(&state->lock);
dp = of_find_node_by_path("/");
err = -ENODEV;
if (!dp)
goto out_free;
info = &state->u.bbc;
info->clock_freq = of_getintprop_default(dp, "clock-frequency", 0);
if (!info->clock_freq)
goto out_free;
info->regs = of_ioremap(&op->resource[0], 0, 6, "bbc beep");
if (!info->regs)
goto out_free;
platform_set_drvdata(op, state);
err = sparcspkr_probe(&op->dev);
if (err)
goto out_clear_drvdata;
return 0;
out_clear_drvdata:
of_iounmap(&op->resource[0], info->regs, 6);
out_free:
kfree(state);
out_err:
return err;
}
static int bbc_remove(struct platform_device *op)
{
struct sparcspkr_state *state = platform_get_drvdata(op);
struct input_dev *input_dev = state->input_dev;
struct bbc_beep_info *info = &state->u.bbc;
/* turn off the speaker */
state->event(input_dev, EV_SND, SND_BELL, 0);
input_unregister_device(input_dev);
of_iounmap(&op->resource[0], info->regs, 6);
kfree(state);
return 0;
}
static const struct of_device_id bbc_beep_match[] = {
{
.name = "beep",
.compatible = "SUNW,bbc-beep",
},
{},
};
static struct platform_driver bbc_beep_driver = {
.driver = {
.name = "bbcbeep",
.owner = THIS_MODULE,
.of_match_table = bbc_beep_match,
},
.probe = bbc_beep_probe,
.remove = bbc_remove,
.shutdown = sparcspkr_shutdown,
};
static int grover_beep_probe(struct platform_device *op)
{
struct sparcspkr_state *state;
struct grover_beep_info *info;
int err = -ENOMEM;
state = kzalloc(sizeof(*state), GFP_KERNEL);
if (!state)
goto out_err;
state->name = "Sparc Grover Speaker";
state->event = grover_spkr_event;
spin_lock_init(&state->lock);
info = &state->u.grover;
info->freq_regs = of_ioremap(&op->resource[2], 0, 2, "grover beep freq");
if (!info->freq_regs)
goto out_free;
info->enable_reg = of_ioremap(&op->resource[3], 0, 1, "grover beep enable");
if (!info->enable_reg)
goto out_unmap_freq_regs;
platform_set_drvdata(op, state);
err = sparcspkr_probe(&op->dev);
if (err)
goto out_clear_drvdata;
return 0;
out_clear_drvdata:
of_iounmap(&op->resource[3], info->enable_reg, 1);
out_unmap_freq_regs:
of_iounmap(&op->resource[2], info->freq_regs, 2);
out_free:
kfree(state);
out_err:
return err;
}
static int grover_remove(struct platform_device *op)
{
struct sparcspkr_state *state = platform_get_drvdata(op);
struct grover_beep_info *info = &state->u.grover;
struct input_dev *input_dev = state->input_dev;
/* turn off the speaker */
state->event(input_dev, EV_SND, SND_BELL, 0);
input_unregister_device(input_dev);
of_iounmap(&op->resource[3], info->enable_reg, 1);
of_iounmap(&op->resource[2], info->freq_regs, 2);
kfree(state);
return 0;
}
static const struct of_device_id grover_beep_match[] = {
{
.name = "beep",
.compatible = "SUNW,smbus-beep",
},
{},
};
static struct platform_driver grover_beep_driver = {
.driver = {
.name = "groverbeep",
.owner = THIS_MODULE,
.of_match_table = grover_beep_match,
},
.probe = grover_beep_probe,
.remove = grover_remove,
.shutdown = sparcspkr_shutdown,
};
static int __init sparcspkr_init(void)
{
int err = platform_driver_register(&bbc_beep_driver);
if (!err) {
err = platform_driver_register(&grover_beep_driver);
if (err)
platform_driver_unregister(&bbc_beep_driver);
}
return err;
}
static void __exit sparcspkr_exit(void)
{
platform_driver_unregister(&bbc_beep_driver);
platform_driver_unregister(&grover_beep_driver);
}
module_init(sparcspkr_init);
module_exit(sparcspkr_exit);
|
const merge = require('webpack-merge');
const baseConfig = require('./base.config');
const config = require('./config');
const ErrorOverlayPlugin = require('error-overlay-webpack-plugin');
module.exports = merge(baseConfig, {
devServer: {
contentBase: config.buildPath,
noInfo: true,
overlay: true,
hot: true,
},
devtool: 'cheap-module-source-map',
plugins: [
new ErrorOverlayPlugin(),
],
module: {
rules: [
{ test: /\.tsx?$/, loader: 'awesome-typescript-loader' },
{ enforce: 'pre', test: /\.js$/, loader: 'source-map-loader' },
{
test: /\.jsx?$/,
exclude: /node_modules/,
use: [
{
loader: 'babel-loader',
options: {
presets: ['react'],
},
},
],
},
{ test: /\.css$/, use: ['style-loader', 'css-loader'] },
{
test: /\.scss$/,
use: [
'style-loader',
{
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[name]__[local]___[hash:base64:5]',
},
},
'sass-loader',
{
loader: 'sass-resources-loader',
options: {
resources: config.baseSassFiles,
sourceMap: true,
},
},
],
},
{
test: /\.(html)$/,
use: { loader: 'html-loader', options: { attrs: [':data-src', 'img:src'] } },
},
{ test: /\.(png|jpg|gif|svg|woff(2)?|ttf|eot)$/, loader: 'file-loader' },
],
},
});
|
/*
You can modify its contents.
*/
const extend = require('js-base/core/extend');
const PickerContainerDesign = require('library/PickerContainer');
const PickerContainer = extend(PickerContainerDesign)(
//constructor
function(_super, props, pageName){
// initalizes super class for this scope
_super(this, props || PickerContainerDesign.defaults );
this.pageName = pageName;
}
);
module && (module.exports = PickerContainer);
|
import torch
from mmaction.models import build_localizer
from ..base import get_localizer_cfg
def test_pem():
model_cfg, _, _ = get_localizer_cfg(
'bsn/bsn_pem_400x100_1x16_20e_activitynet_feature.py')
localizer_pem = build_localizer(model_cfg)
bsp_feature = torch.rand(8, 100, 32)
reference_temporal_iou = torch.rand(8, 100)
losses = localizer_pem(bsp_feature, reference_temporal_iou)
assert isinstance(losses, dict)
# Test forward test
tmin = torch.rand(100)
tmax = torch.rand(100)
tmin_score = torch.rand(100)
tmax_score = torch.rand(100)
video_meta = [
dict(
video_name='v_test',
duration_second=100,
duration_frame=1000,
annotations=[{
'segment': [0.3, 0.6],
'label': 'Rock climbing'
}],
feature_frame=900)
]
with torch.no_grad():
for one_bsp_feature in bsp_feature:
one_bsp_feature = one_bsp_feature.reshape(1, 100, 32)
localizer_pem(
one_bsp_feature,
tmin=tmin,
tmax=tmax,
tmin_score=tmin_score,
tmax_score=tmax_score,
video_meta=video_meta,
return_loss=False)
|
module.exports = {
extends: 'eslint:recommended',
rules: {
// A temporary hack related to IDE not resolving correct package.json
'import/no-extraneous-dependencies': 'off',
"import/prefer-default-export": "off"
},
parserOptions: {
ecmaVersion: 2020,
sourceType: 'module',
project: './tsconfig.json',
tsconfigRootDir: __dirname,
createDefaultProgram: true,
},
settings: {
'import/resolver': {
// See https://github.com/benmosher/eslint-plugin-import/issues/1396#issuecomment-575727774 for line below
node: {},
webpack: {
config: require.resolve('./configs/webpack.config.eslint.js'),
},
},
'import/parsers': {
'@typescript-eslint/parser': ['.ts', '.tsx'],
},
},
};
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Open-source TensorFlow Inception v3 Example."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
import tensorflow as tf
from tensorflow.contrib import slim
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.contrib.slim.nets import inception
flags.DEFINE_float('learning_rate', 0.02, 'Learning rate.')
flags.DEFINE_float('depth_multiplier', 1.0, 'Depth Multiplier on Inception')
flags.DEFINE_integer('train_steps', 800,
'Total number of steps. Note that the actual number of '
'steps is the next multiple of --iterations greater '
'than this value.')
flags.DEFINE_integer('save_checkpoints_secs', None,
'Seconds between checkpoint saves')
flags.DEFINE_bool('use_tpu', True, 'Use TPUs rather than plain CPUs')
flags.DEFINE_string('use_data', 'fake', 'Data from "fake","real"')
flags.DEFINE_string('data_dir', '', 'Path of the data (for use_data=real)')
flags.DEFINE_string('master', 'local',
'BNS name of the TensorFlow master to use.')
flags.DEFINE_string('model_dir', None, 'Estimator model_dir')
flags.DEFINE_integer('iterations', 40,
'Number of iterations per TPU training loop.')
flags.DEFINE_string('optimizer', 'momentum',
'optimizer (one of sgd, rms, momentum)')
flags.DEFINE_integer('num_shards', 8, 'Number of shards (TPU chips).')
flags.DEFINE_integer('batch_size', 64,
'Global batch_size, not the per-shard batch_size')
flags.DEFINE_integer('num_labels', 1024, 'number of classes to distinguish')
flags.DEFINE_integer('width', 304, 'width of input image')
flags.DEFINE_integer('height', 304, 'height of input image')
FLAGS = flags.FLAGS
def inception_v3_arg_scope(is_training=True,
weight_decay=0.00004,
stddev=0.1,
batch_norm_var_collection='moving_vars'):
"""Defines the default InceptionV3 arg scope.
Args:
is_training: Whether or not we're training the model.
weight_decay: The weight decay to use for regularizing the model.
stddev: The standard deviation of the trunctated normal weight initializer.
batch_norm_var_collection: The name of the collection for the batch norm
variables.
Returns:
An `arg_scope` to use for the inception v3 model.
"""
batch_norm_params = {
'is_training': is_training,
# Decay for the moving averages.
'decay': 0.9997,
# epsilon to prevent 0s in variance.
'epsilon': 0.001,
# collection containing the moving mean and moving variance.
'variables_collections': {
'beta': None,
'gamma': None,
'moving_mean': [batch_norm_var_collection],
'moving_variance': [batch_norm_var_collection],
}
}
# Set weight_decay for weights in Conv and FC layers.
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay)):
with slim.arg_scope(
[slim.conv2d],
weights_initializer=tf.truncated_normal_initializer(stddev=stddev),
activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def model_fn(features, labels, mode, params):
"""Inception v3 model using Estimator API."""
del params
if mode != tf.estimator.ModeKeys.TRAIN:
raise RuntimeError('mode {} is not supported yet'.format(mode))
num_labels = FLAGS.num_labels
with slim.arg_scope(inception_v3_arg_scope(is_training=True)):
logits, end_points = inception.inception_v3(
features,
num_labels,
is_training=True,
depth_multiplier=FLAGS.depth_multiplier)
onehot_labels = tf.one_hot(
indices=tf.cast(labels, tf.int32), depth=num_labels)
if 'AuxLogits' in end_points:
tf.losses.softmax_cross_entropy(end_points['AuxLogits'],
onehot_labels,
label_smoothing=0.1,
weights=0.4,
scope='aux_loss')
tf.losses.softmax_cross_entropy(logits,
onehot_labels,
label_smoothing=0.1,
weights=1.0)
loss = tf.losses.get_total_loss()
if FLAGS.optimizer == 'sgd':
tf.logging.info('Using SGD optimizer')
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=FLAGS.learning_rate)
elif FLAGS.optimizer == 'momentum':
tf.logging.info('Using Momentum optimizer')
optimizer = tf.train.MomentumOptimizer(
learning_rate=FLAGS.learning_rate, momentum=0.9)
else:
tf.logging.fatal('Unknown optimizer:', FLAGS.optimizer)
if FLAGS.use_tpu:
optimizer = contrib_tpu.CrossShardOptimizer(optimizer)
train_op = optimizer.minimize(
loss, global_step=tf.train.get_or_create_global_step())
return contrib_tpu.TPUEstimatorSpec(mode=mode, loss=loss, train_op=train_op)
def input_fn(params):
"""Create a single batch of input data for the model."""
batch_size = params['batch_size']
height = FLAGS.height
width = FLAGS.width
def preprocess(image, bbox):
"""Preprocesses the image by resizing and rescaling it."""
del bbox
# Convert to float32
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Crop the central region of the image with an area containing 87.5% of
# the original image.
# TODO(jhseu): Distortion
image = tf.image.central_crop(image, central_fraction=0.875)
# Resize the image to the original height and width.
image = tf.expand_dims(image, 0)
image = tf.image.resize_bilinear(image, [height, width],
align_corners=False)
image = tf.squeeze(image, [0])
# Rescale to [-1,1] instead of [0, 1)
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
return image
def parser(value):
"""Parse an Imagenet record from value."""
keys_to_features = {
'image/encoded':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/format':
tf.FixedLenFeature((), tf.string, default_value='jpeg'),
'image/class/label':
tf.FixedLenFeature([], dtype=tf.int64, default_value=-1),
'image/class/text':
tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/object/bbox/xmin':
tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin':
tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax':
tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax':
tf.VarLenFeature(dtype=tf.float32),
'image/object/class/label':
tf.VarLenFeature(dtype=tf.int64),
}
parsed = tf.parse_single_example(value, keys_to_features)
encoded_image = tf.reshape(
parsed['image/encoded'], shape=[], name='encoded_image')
image_format = parsed['image/format']
xmin = tf.expand_dims(parsed['image/object/bbox/xmin'].values, 0)
ymin = tf.expand_dims(parsed['image/object/bbox/ymin'].values, 0)
xmax = tf.expand_dims(parsed['image/object/bbox/xmax'].values, 0)
ymax = tf.expand_dims(parsed['image/object/bbox/ymax'].values, 0)
# Note that we impose an ordering of (y, x) just to make life difficult.
bbox = tf.concat([ymin, xmin, ymax, xmax], 0)
# Force the variable number of bounding boxes into the shape
# [1, num_boxes, coords].
bbox = tf.expand_dims(bbox, 0)
bbox = tf.transpose(bbox, [0, 2, 1])
def decode_png():
return tf.image.decode_png(encoded_image, 3)
def decode_jpg():
return tf.image.decode_jpeg(encoded_image, 3)
# If image format is PNG, use decode_png, default to jpg.
pred_fn_pairs = {
tf.logical_or(
tf.equal(image_format, 'png'), tf.equal(image_format, 'PNG')):
decode_png
}
image = tf.case(pred_fn_pairs, default=decode_jpg, exclusive=True)
image.set_shape([None, None, 3])
image = preprocess(image, bbox)
label = tf.cast(
tf.reshape(parsed['image/class/label'], shape=[]),
dtype=tf.int32,
name='cast_label')
label = tf.reshape(label, [1])
return tf.cast(image, tf.float32), label
if FLAGS.use_data == 'real':
data_dir = FLAGS.data_dir
filenames = [
os.path.join(data_dir, 'train-%05d-of-01024' % i)
for i in range(0, 984)
]
dataset = tf.data.TFRecordDataset(filenames)
dataset = dataset.repeat().map(parser).batch(batch_size)
images, labels = dataset.make_one_shot_iterator().get_next()
else:
images = tf.random_uniform(
[batch_size, height, width, 3], minval=-1, maxval=1)
labels = tf.random_uniform(
[batch_size], minval=0, maxval=999, dtype=tf.int32)
# Reshape to give inputs statically known shapes.
return (
tf.reshape(images, [batch_size, height, width, 3]),
tf.reshape(labels, [batch_size]),
)
def main(unused_argv):
del unused_argv # Unused
tf.logging.set_verbosity(tf.logging.INFO)
run_config = contrib_tpu.RunConfig(
master=FLAGS.master,
model_dir=FLAGS.model_dir,
save_checkpoints_secs=FLAGS.save_checkpoints_secs,
session_config=tf.ConfigProto(),
tpu_config=contrib_tpu.TPUConfig(FLAGS.iterations, FLAGS.num_shards),
)
estimator = contrib_tpu.TPUEstimator(
model_fn=model_fn,
use_tpu=FLAGS.use_tpu,
config=run_config,
train_batch_size=FLAGS.batch_size)
estimator.train(input_fn=input_fn, max_steps=FLAGS.train_steps)
if __name__ == '__main__':
app.run(main)
|
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Classes for making VMware VI SOAP calls.
"""
import httplib
import urllib2
from oslo.config import cfg
import suds
from nova.openstack.common.gettextutils import _
from nova import utils
from nova.virt.vmwareapi import error_util
RESP_NOT_XML_ERROR = 'Response is "text/html", not "text/xml"'
CONN_ABORT_ERROR = 'Software caused connection abort'
ADDRESS_IN_USE_ERROR = 'Address already in use'
vmwareapi_wsdl_loc_opt = cfg.StrOpt('wsdl_location',
help='Optional VIM Service WSDL Location '
'e.g http://<server>/vimService.wsdl. '
'Optional over-ride to default location for bug work-arounds')
CONF = cfg.CONF
CONF.register_opt(vmwareapi_wsdl_loc_opt, 'vmware')
def get_moref(value, type):
"""Get managed object reference."""
moref = suds.sudsobject.Property(value)
moref._type = type
return moref
def object_to_dict(obj, list_depth=1):
"""Convert Suds object into serializable format.
The calling function can limit the amount of list entries that
are converted.
"""
d = {}
for k, v in suds.sudsobject.asdict(obj).iteritems():
if hasattr(v, '__keylist__'):
d[k] = object_to_dict(v, list_depth=list_depth)
elif isinstance(v, list):
d[k] = []
used = 0
for item in v:
used = used + 1
if used > list_depth:
break
if hasattr(item, '__keylist__'):
d[k].append(object_to_dict(item, list_depth=list_depth))
else:
d[k].append(item)
else:
d[k] = v
return d
class VIMMessagePlugin(suds.plugin.MessagePlugin):
def addAttributeForValue(self, node):
# suds does not handle AnyType properly.
# VI SDK requires type attribute to be set when AnyType is used
if node.name == 'value':
node.set('xsi:type', 'xsd:string')
def marshalled(self, context):
"""suds will send the specified soap envelope.
Provides the plugin with the opportunity to prune empty
nodes and fixup nodes before sending it to the server.
"""
# suds builds the entire request object based on the wsdl schema.
# VI SDK throws server errors if optional SOAP nodes are sent
# without values, e.g. <test/> as opposed to <test>test</test>
context.envelope.prune()
context.envelope.walk(self.addAttributeForValue)
class Vim:
"""The VIM Object."""
def __init__(self,
protocol="https",
host="localhost",
port=443):
"""Creates the necessary Communication interfaces and gets the
ServiceContent for initiating SOAP transactions.
protocol: http or https
host : ESX IPAddress or Hostname
port : port for connection
"""
if not suds:
raise Exception(_("Unable to import suds."))
self._protocol = protocol
self._host_name = host
self.wsdl_url = Vim.get_wsdl_url(protocol, host, port)
self.url = Vim.get_soap_url(protocol, host, port)
self.client = suds.client.Client(self.wsdl_url, location=self.url,
plugins=[VIMMessagePlugin()])
self._service_content = self.retrieve_service_content()
def retrieve_service_content(self):
return self.RetrieveServiceContent("ServiceInstance")
@staticmethod
def get_wsdl_url(protocol, host_name, port):
"""Allows override of the wsdl location, making this static
means we can test the logic outside of the constructor
without forcing the test environment to have multiple valid
wsdl locations to test against.
:param protocol: https or http
:param host_name: localhost or other server name
:param port: port for connection
:return: string to WSDL location for vSphere WS Management API
"""
# optional WSDL location over-ride for work-arounds
if CONF.vmware.wsdl_location:
return CONF.vmware.wsdl_location
# calculate default WSDL location if no override supplied
return Vim.get_soap_url(protocol, host_name, port) + "/vimService.wsdl"
@staticmethod
def get_soap_url(protocol, host_name, port):
"""Calculates the location of the SOAP services
for a particular server. Created as a static
method for testing.
:param protocol: https or http
:param host_name: localhost or other vSphere server name
:param port: port for connection
:return: the url to the active vSphere WS Management API
"""
if utils.is_valid_ipv6(host_name):
return '%s://[%s]:%d/sdk' % (protocol, host_name, port)
return '%s://%s:%d/sdk' % (protocol, host_name, port)
def get_service_content(self):
"""Gets the service content object."""
return self._service_content
def __getattr__(self, attr_name):
"""Makes the API calls and gets the result."""
def vim_request_handler(managed_object, **kwargs):
"""Builds the SOAP message and parses the response for fault
checking and other errors.
managed_object : Managed Object Reference or Managed
Object Name
**kwargs : Keyword arguments of the call
"""
# Dynamic handler for VI SDK Calls
try:
request_mo = self._request_managed_object_builder(
managed_object)
request = getattr(self.client.service, attr_name)
response = request(request_mo, **kwargs)
# To check for the faults that are part of the message body
# and not returned as Fault object response from the ESX
# SOAP server
if hasattr(error_util.FaultCheckers,
attr_name.lower() + "_fault_checker"):
fault_checker = getattr(error_util.FaultCheckers,
attr_name.lower() + "_fault_checker")
fault_checker(response)
return response
# Catch the VimFaultException that is raised by the fault
# check of the SOAP response
except error_util.VimFaultException:
raise
except suds.MethodNotFound:
raise
except suds.WebFault as excep:
doc = excep.document
fault_string = doc.childAtPath("/Envelope/Body/Fault/"
"faultstring").getText()
detail = doc.childAtPath("/Envelope/Body/Fault/detail")
fault_list = []
details = {}
if detail:
for fault in detail.getChildren():
fault_list.append(fault.get("type"))
for child in fault.getChildren():
details[child.name] = child.getText()
raise error_util.VimFaultException(fault_list, fault_string,
details)
except AttributeError as excep:
raise error_util.VimAttributeError(_("No such SOAP method "
"'%s' provided by VI SDK") % (attr_name), excep)
except (httplib.CannotSendRequest,
httplib.ResponseNotReady,
httplib.CannotSendHeader) as excep:
raise error_util.SessionOverLoadException(_("httplib "
"error in %s: ") % (attr_name), excep)
except (urllib2.URLError,
urllib2.HTTPError) as excep:
raise error_util.SessionConnectionException(_("urllib2 "
"error in %s: ") % (attr_name), excep)
except Exception as excep:
# Socket errors which need special handling for they
# might be caused by ESX API call overload
if (str(excep).find(ADDRESS_IN_USE_ERROR) != -1 or
str(excep).find(CONN_ABORT_ERROR)) != -1:
raise error_util.SessionOverLoadException(_("Socket "
"error in %s: ") % (attr_name), excep)
# Type error that needs special handling for it might be
# caused by ESX host API call overload
elif str(excep).find(RESP_NOT_XML_ERROR) != -1:
raise error_util.SessionOverLoadException(_("Type "
"error in %s: ") % (attr_name), excep)
else:
raise error_util.VimException(
_("Exception in %s ") % (attr_name), excep)
return vim_request_handler
def _request_managed_object_builder(self, managed_object):
"""Builds the request managed object."""
# Request Managed Object Builder
if isinstance(managed_object, str):
mo = suds.sudsobject.Property(managed_object)
mo._type = managed_object
else:
mo = managed_object
return mo
def __repr__(self):
return "VIM Object"
def __str__(self):
return "VIM Object"
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.superrpc = {}));
})(this, (function (exports) { 'use strict';
/**
* Stores proxy objects/functions that represent remote objects - **used internally**.
*
* On the other side the corresponding "host" object/function is held in a registry by a strong reference,
* and in order to be able to remove it and not leak the reference, we need a way to inform the other side
* when the proxy object is "no longer used". For this we use the WeakRef and FinalizationRegistry features.
*
* We hold the proxy object/function with a weak reference, and when it is garbage collected, we can be sure that
* it will not be used (called) anymore, so we remove it from our object registry and send a message
* to the other side to remove the corresponding local object from the hostObjectRegistry as well.
* @module
* @internal
*/
const rpc_disposed = Symbol('rpc_disposed');
const rpc_disposeFunc = Symbol('rpc_dispose');
class ProxyObjectRegistry {
registry = new Map();
objectFinalized = new FinalizationRegistry((rpc_dispose) => rpc_dispose());
/**
* Register an object.
* @param dispose Called when the object is removed from the registry (either explicitly or by the GC)
*/
register(objId, obj, dispose) {
const unregToken = {};
obj[rpc_disposed] = false;
obj[rpc_disposeFunc] = () => {
this.remoteObjectDisposed(objId, unregToken);
obj[rpc_disposed] = true;
dispose?.();
};
this.objectFinalized.register(obj, obj[rpc_disposeFunc], unregToken);
this.registry.set(objId, new WeakRef(obj));
}
has(objId) {
return this.registry.has(objId);
}
delete(objId) {
this.registry.delete(objId);
}
get(objId) {
return this.registry.get(objId)?.deref();
}
remoteObjectDisposed(objId, uregToken) {
this.objectFinalized.unregister(uregToken);
this.registry.delete(objId);
}
}
/**
* The descriptors are used to describe what properties/functions to expose on an object
* and what are the function return behaviors.
* @module
*/
// util functions
function getPropName(descriptor) {
return typeof descriptor === 'string' ? descriptor : descriptor.name || '';
}
function getArgumentDescriptor(descriptor, idx) {
return typeof descriptor === 'object' ? descriptor.arguments?.find(arg => arg.idx == null || arg.idx === idx) : undefined;
}
function getFunctionDescriptor(descriptor, funcName) {
return descriptor?.functions?.find(func => typeof func === 'object' && func.name === funcName);
}
function getPropertyDescriptor(descriptor, propName) {
return descriptor?.proxiedProperties?.find(prop => typeof prop === 'object' && prop.name === propName);
}
function getEventDescriptor(descriptor, eventName) {
return descriptor?.events?.find(evt => typeof evt === 'object' && evt.name === eventName);
}
function isFunctionDescriptor(descriptor) {
return descriptor?.type === 'function';
}
function processFunctionDescriptor(descriptor, func) {
if (typeof descriptor === 'string')
descriptor = { name: descriptor, type: 'function' };
descriptor ??= { type: 'function' };
descriptor.name ??= func.name;
return descriptor;
}
function processObjectDescriptor(descriptor, obj) {
descriptor ??= { type: 'object' };
if (obj && descriptor.functions) {
for (const [idx, fdescr] of descriptor.functions.entries()) {
descriptor.functions[idx] = processFunctionDescriptor(fdescr, obj[getPropName(fdescr)]);
}
}
return descriptor;
}
const hostObjectId = Symbol('hostObjectId');
const proxyObjectId = Symbol('proxyObjectId');
const classIdSym = Symbol('classId');
/**
* The SuperRPC is the central piece. An instance must be created on both sides.
*
* Objects, functions or classes can be registered on the "host" side
* (see [[registerHostObject]], [[registerHostClass]]) and then functions/properties can be
* called from the "client" side (see [[getProxyObject]], [[getProxyClass]]).
*
* The RPC service is symmetric, so depending on the use-case (and the channel),
* both side can be "host" and "client" at the same time.
*
* The constructor needs a function to generate unique IDs for objects.
* In order to have no dependencies this needs to be passed in.
* For convenience the examples use [nanoid](https://www.npmjs.com/package/nanoid).
*/
class SuperRPC {
objectIdGenerator;
channel;
remoteObjectDescriptors;
remoteFunctionDescriptors;
remoteClassDescriptors;
remoteDescriptorsCallbacks;
asyncCallbacks = new Map();
callId = 0;
proxyObjectRegistry = new ProxyObjectRegistry();
proxyClassRegistry = new Map();
hostObjectRegistry = new Map();
hostFunctionRegistry = new Map();
hostClassRegistry = new Map();
/**
* @param objectIdGenerator A function to generate a unique ID for an object.
*
* When sending an object to the other side that can not be serialized, we
* generate an ID and send that instead. The other side creates a proxy object
* that represents the remote object.
*/
constructor(objectIdGenerator) {
this.objectIdGenerator = objectIdGenerator;
}
/**
* Stores the current "context" object that is passed to the callback of the [[RPCChannel.receive]] function.
*/
currentContext;
/**
* Connect the service to a channel.
*/
connect(channel) {
this.channel = channel;
channel.receive?.(this.messageReceived.bind(this));
}
/**
* Register an object in the service to be called remotely.
* @param objId An ID that the "client" side uses to identify this object.
* @param target The target object
* @param descriptor Describes which functions/properties to expose
*/
registerHostObject(objId, target, descriptor) {
descriptor.type = 'object';
target[hostObjectId] = objId;
this.hostObjectRegistry.set(objId, { target, descriptor });
}
/**
* Register a function in the service to be called remotely.
* @param objId An ID that the "client" side uses to identify this function.
* @param target The target function
* @param descriptor Describes arguments and return behavior ([[FunctionReturnBehavior]])
*/
registerHostFunction(objId, target, descriptor = {}) {
descriptor.type = 'function';
target[hostObjectId] = objId;
this.hostFunctionRegistry.set(objId, { target, descriptor });
}
/**
* Register a class in the service.
*
* When an instance of this class is passed to the other side, only the "readonlyProperties" are sent (see [[ClassDescriptor]]).
* Functions and proxied properties are generated there and those call back to the original object.
*
* Even the constructor can be proxied.
*
* Note: static functions/properties act as if the class was a normal host object.
*
* @param classId An ID to identify the class on the client side.
* @param classCtor The class itself (its constructor function)
* @param descriptor What properties/functions to expose
*/
registerHostClass(classId, classCtor, descriptor) {
descriptor.type = 'class';
descriptor.classId = classId;
if (descriptor.static) {
this.registerHostObject(classId, classCtor, descriptor.static);
}
if (descriptor.ctor) {
this.registerHostFunction(classId, classCtor, descriptor.ctor);
}
classCtor[classIdSym] = classId;
this.hostClassRegistry.set(classId, { target: classCtor, descriptor });
}
/**
* Send a request to get the descriptors for the registered host objects from the other side.
* Uses synchronous communication if possible and returns `true`/`false` based on if the descriptors were received.
* If sync is not available, it uses async messaging and returns a Promise.
*/
requestRemoteDescriptors() {
if (this.channel?.sendSync) {
const response = this.sendSync({ action: 'get_descriptors' });
return this.setRemoteDescriptors(response);
}
return new Promise((resolve, reject) => {
this.sendAsync({ action: 'get_descriptors' });
this.remoteDescriptorsCallbacks = { resolve, reject };
});
}
setRemoteDescriptors(response) {
if (typeof response !== 'object')
return false;
if (response.classes) {
this.remoteClassDescriptors = response.classes;
}
if (response.objects) {
this.remoteObjectDescriptors = response.objects;
}
if (response.functions) {
this.remoteFunctionDescriptors = response.functions;
}
return true;
}
/**
* Send the descriptors for the registered host objects to the other side.
* If possible, the message is sent synchronously.
* This is a "push" style message, for "pull" see [[requestRemoteDescriptors]].
*/
sendRemoteDescriptors(replyChannel = this.channel) {
this.sendSyncIfPossible({
action: 'descriptors',
objects: this.getLocalDescriptors(this.hostObjectRegistry, processObjectDescriptor),
functions: this.getLocalDescriptors(this.hostFunctionRegistry, processFunctionDescriptor),
classes: this.getLocalDescriptors(this.hostClassRegistry),
}, replyChannel);
}
getLocalDescriptors(registry, processFn) {
const descriptors = {};
for (const key of registry.keys()) {
// .get() could return undefined, but we know it will never do that, since we iterate over existing keys
// therefore it is safe to cast it to the entry types
const entry = registry.get(key);
if (!entry.descriptor)
continue;
let descr = { ...entry.descriptor };
descr = processFn?.(descr, entry.target) ?? descr;
descriptors[key] = descr;
if (entry.descriptor.type === 'object' && entry.descriptor.readonlyProperties) {
const props = {};
for (const prop of entry.descriptor.readonlyProperties) {
props[prop] = entry.target[prop];
}
descr.props = props;
}
}
return descriptors;
}
sendSync(message, channel = this.channel) {
this.addMarker(message);
return channel?.sendSync?.(message);
}
sendAsync(message, channel = this.channel) {
this.addMarker(message);
channel?.sendAsync?.(message);
}
sendSyncIfPossible(message, channel = this.channel) {
return channel?.sendSync ? this.sendSync(message, channel) : this.sendAsync(message, channel);
}
sendAsyncIfPossible(message, channel = this.channel) {
return channel?.sendAsync ? this.sendAsync(message, channel) : this.sendSync(message, channel);
}
addMarker(message) {
message.rpc_marker = 'srpc';
}
checkMarker(message) {
return typeof message === 'object' && message.rpc_marker === 'srpc';
}
callTargetFunction(msg, replyChannel = this.channel) {
const entry = (msg.action === 'fn_call' || msg.action === 'ctor_call' ? this.hostFunctionRegistry : this.hostObjectRegistry).get(msg.objId);
let result;
let success = true;
try {
if (!entry)
throw new Error(`No object found with ID '${msg.objId}'`);
let scope = null;
let { descriptor, target } = entry;
let args;
switch (msg.action) {
case 'prop_get': {
result = target[msg.prop];
break;
}
case 'prop_set': {
const descr = getPropertyDescriptor(descriptor, msg.prop);
const result = this.processAfterDeserialization(msg.args[0], replyChannel, descr?.set?.arguments?.[0]);
// special case for when the property getter is async and the setter gets a Promise
if (result?.constructor === Promise && (descr?.get?.returns === 'async' || !replyChannel.sendSync)) {
result.then((value) => target[msg.prop] = value);
}
else {
target[msg.prop] = result;
}
break;
}
case 'method_call': {
scope = target;
descriptor = getFunctionDescriptor(descriptor, msg.prop);
if (!descriptor && !target[msg.prop]) {
// check if it's an event (add_EvtName or remove_EvtName)
// map it to addEventListener/removeEventListener(eventName, listener)
const [addOrRemove, eventName] = msg.prop.split('_');
if (eventName && (addOrRemove === 'add' || addOrRemove === 'remove') &&
typeof (target = target[addOrRemove + 'EventListener']) === 'function') {
const evtDescriptor = getEventDescriptor(descriptor, eventName);
args = [eventName, ...this.deserializeFunctionArgs(evtDescriptor, msg.args, replyChannel)];
}
}
else {
target = target[msg.prop];
}
if (typeof target !== 'function')
throw new Error(`Property ${msg.prop} is not a function on object ${msg.objId}`);
// NO break here!
}
// eslint-disable-next-line no-fallthrough
case 'fn_call': {
args ??= this.deserializeFunctionArgs(descriptor, msg.args, replyChannel);
result = target.apply(scope, args);
break;
}
case 'ctor_call': {
args = this.deserializeFunctionArgs(descriptor, msg.args, replyChannel);
result = new target(...args);
break;
}
}
if (msg.callType === 'async') {
Promise.resolve(result)
.then(value => result = this.processBeforeSerialization(value, replyChannel), err => { result = err?.toString?.(); success = false; })
.then(() => this.sendAsync({ action: 'fn_reply', callType: 'async', success, result, callId: msg.callId }, replyChannel));
}
else if (msg.callType === 'sync') {
result = this.processBeforeSerialization(result, replyChannel);
}
}
catch (err) {
success = false;
result = err?.toString?.();
}
if (msg.callType === 'sync') {
this.sendSync({ action: 'fn_reply', callType: 'sync', success, result }, replyChannel);
}
else if (msg.callType === 'async' && !success) {
this.sendAsync({ action: 'fn_reply', callType: 'async', success, result, callId: msg.callId }, replyChannel);
}
}
messageReceived(message, replyChannel = this.channel, context) {
this.currentContext = context;
if (this.checkMarker(message)) {
switch (message.action) {
case 'get_descriptors': {
this.sendRemoteDescriptors(replyChannel);
break;
}
case 'descriptors': {
const success = this.setRemoteDescriptors(message);
this.remoteDescriptorsCallbacks?.[success ? 'resolve' : 'reject']();
this.remoteDescriptorsCallbacks = undefined;
break;
}
case 'prop_get':
case 'prop_set':
case 'ctor_call':
case 'fn_call':
case 'method_call': {
this.callTargetFunction(message, replyChannel);
break;
}
case 'obj_died': {
this.hostObjectRegistry.delete(message.objId);
this.hostFunctionRegistry.delete(message.objId);
break;
}
case 'fn_reply': {
if (message.callType === 'async') {
const result = this.processAfterDeserialization(message.result, replyChannel);
const callbacks = this.asyncCallbacks.get(message.callId);
callbacks?.[message.success ? 'resolve' : 'reject'](result);
this.asyncCallbacks.delete(message.callId);
}
break;
}
}
}
}
serializeFunctionArgs(func, args, replyChannel) {
return args.map((arg, idx) => this.processBeforeSerialization(arg, replyChannel, getArgumentDescriptor(func, idx)));
}
deserializeFunctionArgs(func, args, replyChannel) {
return args.map((arg, idx) => this.processAfterDeserialization(arg, replyChannel, getArgumentDescriptor(func, idx)));
}
createVoidProxyFunction(objId, func, action, replyChannel) {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const _this = this;
const fn = function (...args) {
if (fn[rpc_disposed])
throw new Error('Remote function has been disposed');
_this.sendAsyncIfPossible({
action,
callType: 'void',
objId: objId ?? this[proxyObjectId],
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
prop: func.name,
args: _this.serializeFunctionArgs(func, args, replyChannel)
}, replyChannel);
};
return fn;
}
createSyncProxyFunction(objId, func, action, replyChannel) {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const _this = this;
const fn = function (...args) {
if (fn[rpc_disposed])
throw new Error('Remote function has been disposed');
const response = _this.sendSync({
action,
callType: 'sync',
objId: objId ?? this[proxyObjectId],
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
prop: func.name,
args: _this.serializeFunctionArgs(func, args, replyChannel)
}, replyChannel);
if (!response)
throw new Error('No response received');
if (!_this.checkMarker(response))
throw new Error(`Invalid response ${JSON.stringify(response)}`);
if (!response.success)
throw new Error(response.result);
return _this.processAfterDeserialization(response.result, replyChannel);
};
return fn;
}
createAsyncProxyFunction(objId, func, action, replyChannel) {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const _this = this;
const fn = function (...args) {
return new Promise((resolve, reject) => {
if (fn[rpc_disposed])
throw new Error('Remote function has been disposed');
_this.callId++;
_this.sendAsync({
action,
callType: 'async',
objId: objId ?? this[proxyObjectId],
callId: _this.callId.toString(),
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
prop: func.name,
args: _this.serializeFunctionArgs(func, args, replyChannel)
}, replyChannel);
_this.asyncCallbacks.set(_this.callId.toString(), { resolve, reject });
});
};
return fn;
}
createProxyFunction(objId, prop, action, defaultCallType = 'async', replyChannel = this.channel) {
const descriptor = (typeof prop === 'object') ? prop : { name: prop };
let callType = descriptor?.returns || defaultCallType;
if (callType === 'async' && !replyChannel.sendAsync)
callType = 'sync';
if (callType === 'sync' && !replyChannel.sendSync)
callType = 'async';
switch (callType) {
case 'void': return this.createVoidProxyFunction(objId, descriptor, action, replyChannel);
case 'sync': return this.createSyncProxyFunction(objId, descriptor, action, replyChannel);
default: return this.createAsyncProxyFunction(objId, descriptor, action, replyChannel);
}
}
/**
* Gets or creates a proxy object that represents a host object from the other side.
*
* This side must have the descriptor for the object.
* See [[sendRemoteDescriptors]], [[requestRemoteDescriptors]].
*/
getProxyObject(objId) {
let obj = this.proxyObjectRegistry.get(objId);
if (obj)
return obj;
const descriptor = this.remoteObjectDescriptors?.[objId];
if (!descriptor) {
throw new Error(`No object registered with ID '${objId}'`);
}
obj = this.createProxyObject(objId, descriptor);
this.proxyObjectRegistry.register(objId, obj);
return obj;
}
/**
* Gets or creates a proxy function that represents a host object from the other side.
*
* This side must have the descriptor for the function.
* See [[sendRemoteDescriptors]], [[requestRemoteDescriptors]].
*/
getProxyFunction(objId) {
let obj = this.proxyObjectRegistry.get(objId);
if (obj)
return obj;
const descriptor = this.remoteFunctionDescriptors?.[objId];
if (!descriptor) {
throw new Error(`No function registered with ID '${objId}'`);
}
obj = this.createProxyFunction(objId, descriptor, 'fn_call');
this.proxyObjectRegistry.register(objId, obj);
return obj;
}
/**
* Gets or creates a proxy "class" that will serve multiple purposes.
* - Static functions/properties on the class are proxied the same way as on a regular "host" object
* - If specified the constructor actually constructs an instance of the registered host class on the other side
* and the returned instance will represent the remote instance, with the specified functions/properties working
* on its prototype as expected.
* - If an instance of the registered host class is being sent from the other side,
* an instance of this proxy class will be created and passed on this side.
*/
getProxyClass(classId) {
let clazz = this.proxyClassRegistry.get(classId);
if (clazz)
return clazz;
const descriptor = this.remoteClassDescriptors?.[classId];
if (!descriptor) {
throw new Error(`No class registered with ID '${classId}'`);
}
clazz = (descriptor.ctor ? this.createProxyFunction(classId, descriptor.ctor, 'ctor_call', 'sync')
: function () { throw new Error(`Constructor of class '${classId}' is not defined`); });
// create the proxy functions/properties on the prototype with no objId, so each function will look up "proxyObjectId" on "this"
// so the prototype will work with multiple instances
this.createProxyObject(null, descriptor.instance, clazz.prototype);
// add static functions/props
const staticDescr = descriptor.static ?? {};
const objDescr = this.remoteObjectDescriptors?.[classId];
if (!isFunctionDescriptor(objDescr)) {
staticDescr.props = objDescr?.props;
}
this.createProxyObject(classId, staticDescr, clazz);
this.proxyClassRegistry.set(classId, clazz);
return clazz;
}
createProxyObject(objId, descriptor, obj = {}) {
Object.assign(obj, descriptor?.props);
for (const prop of descriptor?.functions ?? []) {
obj[getPropName(prop)] = this.createProxyFunction(objId, prop, 'method_call');
}
const setterCallType = this.channel.sendSync ? 'sync' : 'void';
for (const prop of descriptor?.proxiedProperties ?? []) {
const descr = typeof prop === 'string' ? { name: prop } : prop;
Object.defineProperty(obj, descr.name, {
get: this.createProxyFunction(objId, { ...descr.get, name: descr.name }, 'prop_get', 'sync'),
set: descr.getOnly ? undefined : this.createProxyFunction(objId, { ...descr.set, name: descr.name }, 'prop_set', setterCallType)
});
}
if (descriptor?.events && descriptor.events.length > 0) {
const eventNames = descriptor.events.map(descr => typeof descr === 'object' ? descr.name : descr);
const addListenerFunctions = new Map();
const removeListenerFunctions = new Map();
// eslint-disable-next-line @typescript-eslint/no-this-alias
const _this = this;
obj.addEventListener = function (eventName, listener) {
if (!eventNames.includes(eventName))
throw new Error(`No "${eventName}" event found on object "${objId}".`);
let proxyFunc = addListenerFunctions.get(eventName);
if (!proxyFunc) {
const descr = { ...getEventDescriptor(descriptor, eventName), name: 'add_' + eventName };
proxyFunc = _this.createProxyFunction(objId, descr, 'method_call');
addListenerFunctions.set(eventName, proxyFunc);
}
proxyFunc(listener);
};
obj.removeEventListener = function (eventName, listener) {
if (!eventNames.includes(eventName))
throw new Error(`No "${eventName}" event found on object "${objId}".`);
let proxyFunc = removeListenerFunctions.get(eventName);
if (!proxyFunc) {
const descr = { ...getEventDescriptor(descriptor, eventName), name: 'remove_' + eventName };
proxyFunc = _this.createProxyFunction(objId, descr, 'method_call');
removeListenerFunctions.set(eventName, proxyFunc);
}
proxyFunc(listener);
};
}
obj[proxyObjectId] = objId;
return obj;
}
registerLocalObj(obj, descriptor) {
let objId = obj[hostObjectId];
if (!this.hostObjectRegistry.has(objId)) {
objId = this.objectIdGenerator();
this.hostObjectRegistry.set(objId, { target: obj, descriptor });
obj[hostObjectId] = objId;
}
return objId;
}
registerLocalFunc(obj, descriptor) {
let objId = obj[hostObjectId];
if (!this.hostFunctionRegistry.has(objId)) {
objId = this.objectIdGenerator();
this.hostFunctionRegistry.set(objId, { target: obj, descriptor });
obj[hostObjectId] = objId;
}
return objId;
}
processBeforeSerialization(obj, replyChannel, descriptor) {
if (obj?.[proxyObjectId]) {
return { _rpc_type: 'host' + (typeof obj), objId: obj[proxyObjectId] };
}
switch (typeof obj) {
case 'object': {
if (!obj)
break;
// special case for Promise
if (obj.constructor === Promise) {
if (!this.hostObjectRegistry.has(obj[hostObjectId])) {
let result;
let success;
obj.then((value) => { result = value; success = true; }, (value) => { result = value; success = false; }).finally(() => this.sendAsyncIfPossible({ action: 'fn_reply', callType: 'async', success, result, callId: objId }, replyChannel));
}
const objId = this.registerLocalObj(obj, {});
return { _rpc_type: 'object', objId, classId: 'Promise' };
}
const entry = this.hostClassRegistry.get(obj.constructor?.[classIdSym]);
if (entry) {
const objId = this.registerLocalObj(obj, entry.descriptor.instance ?? {});
const props = {};
for (const prop of entry.descriptor.instance?.readonlyProperties ?? []) {
const propName = getPropName(prop);
props[propName] = this.processBeforeSerialization(obj[propName], replyChannel);
}
return { _rpc_type: 'object', classId: entry.descriptor.classId, props, objId };
}
for (const key of Object.keys(obj)) {
obj[key] = this.processBeforeSerialization(obj[key], replyChannel);
}
break;
}
case 'function': {
const objId = this.registerLocalFunc(obj, descriptor);
return { _rpc_type: 'function', objId };
}
}
return obj;
}
processAfterDeserialization(obj, replyChannel, descriptor) {
if (typeof obj !== 'object' || !obj)
return obj;
switch (obj._rpc_type) {
case 'object': {
return this.getOrCreateProxyInstance(obj.objId, obj.classId, obj.props, replyChannel);
}
case 'function': {
return this.getOrCreateProxyFunction(obj.objId, replyChannel, descriptor);
}
case 'hostobject': {
return this.hostObjectRegistry.get(obj.objId)?.target;
}
case 'hostfunction': {
return this.hostFunctionRegistry.get(obj.objId)?.target;
}
}
for (const key of Object.keys(obj)) {
obj[key] = this.processAfterDeserialization(obj[key], replyChannel, getPropertyDescriptor(descriptor, key));
}
return obj;
}
sendObjectDied(objId, replyChannel = this.channel) {
this.sendAsyncIfPossible({ action: 'obj_died', objId }, replyChannel);
}
getOrCreateProxyInstance(objId, classId, props, replyChannel) {
let obj = this.proxyObjectRegistry.get(objId);
if (obj)
return obj;
obj = props ?? {};
// special case for Promise
if (classId === 'Promise') {
obj = new Promise((resolve, reject) => this.asyncCallbacks.set(objId, { resolve, reject }));
}
else {
obj[proxyObjectId] = objId;
const clazz = this.getProxyClass(classId);
Object.setPrototypeOf(obj, clazz.prototype);
}
this.proxyObjectRegistry.register(objId, obj, () => this.sendObjectDied(objId, replyChannel));
return obj;
}
getOrCreateProxyFunction(objId, replyChannel, descriptor) {
let fn = this.proxyObjectRegistry.get(objId);
if (fn)
return fn;
if (descriptor)
descriptor.type = 'function';
fn = this.createProxyFunction(objId, descriptor, 'fn_call', 'async', replyChannel);
fn[proxyObjectId] = objId;
this.proxyObjectRegistry.register(objId, fn, () => this.sendObjectDied(objId, replyChannel));
return fn;
}
}
exports.SuperRPC = SuperRPC;
exports.getArgumentDescriptor = getArgumentDescriptor;
exports.getEventDescriptor = getEventDescriptor;
exports.getFunctionDescriptor = getFunctionDescriptor;
exports.getPropName = getPropName;
exports.getPropertyDescriptor = getPropertyDescriptor;
exports.isFunctionDescriptor = isFunctionDescriptor;
exports.processFunctionDescriptor = processFunctionDescriptor;
exports.processObjectDescriptor = processObjectDescriptor;
Object.defineProperty(exports, '__esModule', { value: true });
}));
//# sourceMappingURL=super-rpc.umd.js.map
|
import omemo
from omemo_backend_signal import BACKEND as SignalBackend
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "tests")))
from deletingotpkpolicy import DeletingOTPKPolicy
from dr_chat import mainLoop
import example_data
X3DHDoubleRatchet = omemo.make_X3DHDoubleRatchet(SignalBackend)
def main(who, use_wireformat = False):
alice_state = X3DHDoubleRatchet()
bob_state = X3DHDoubleRatchet()
alice_public_bundle = alice_state.getPublicBundle()
bob_public_bundle = bob_state.getPublicBundle()
if use_wireformat:
# Ask for the initial message to send
initial_message = input("Initial message: ")
if who == "a":
# Prepare the session init data and the DoubleRatchet from the active part
session_init_data = alice_state.getSharedSecretActive(bob_public_bundle)
alice_dr = session_init_data["dr"]
session_init_data = session_init_data["to_other"]
if use_wireformat:
# Encrypt the initial message
initial_message_encrypted = alice_dr.encryptMessage(
initial_message.encode("UTF-8")
)
# Prepare the message
initial_message_serialized = SignalBackend.WireFormat.messageToWire(
initial_message_encrypted["ciphertext"],
initial_message_encrypted["header"],
{
"DoubleRatchet": initial_message_encrypted["additional"]
}
)
# Bundle the session init data and the initial message into a pre_key packet
pre_key_message_serialized = SignalBackend.WireFormat.preKeyMessageToWire(
session_init_data,
initial_message_serialized,
{
"DoubleRatchet": initial_message_encrypted["additional"]
}
)
# Send to the receiver...
# Unpack the session init data into the initial message
pre_key_message = SignalBackend.WireFormat.preKeyMessageFromWire(
pre_key_message_serialized
)
initial_message_serialized = pre_key_message["message"]
# Unpack the contained message
initial_message_encrypted = SignalBackend.WireFormat.messageFromWire(
initial_message_serialized
)
# Create the session for the passive part
bob_dr = bob_state.getSharedSecretPassive(
pre_key_message["session_init_data"],
example_data.ALICE_BARE_JID,
example_data.ALICE_DEVICE_ID,
DeletingOTPKPolicy
)
# Decrypt the initial message
initial_message_plaintext = bob_dr.decryptMessage(
initial_message_encrypted["ciphertext"],
initial_message_encrypted["header"]
)
# Check the authentication
SignalBackend.WireFormat.finalizeMessageFromWire(
initial_message_serialized,
{
"DoubleRatchet": initial_message_plaintext["additional"],
"WireFormat": initial_message_encrypted["additional"]
}
)
initial_message_plaintext = initial_message_plaintext["plaintext"]
initial_message_plaintext = initial_message_plaintext.decode("UTF-8")
else:
# Otherwise, just initialize the passive session directly
bob_dr = bob_state.getSharedSecretPassive(
session_init_data,
example_data.ALICE_BARE_JID,
example_data.ALICE_DEVICE_ID,
DeletingOTPKPolicy
)
if who == "b":
session_init_data = bob_state.getSharedSecretActive(alice_public_bundle)
bob_dr = session_init_data["dr"]
session_init_data = session_init_data["to_other"]
if use_wireformat:
# Encrypt the initial message
initial_message_encrypted = bob_dr.encryptMessage(
initial_message.encode("UTF-8")
)
# Prepare the message
initial_message_serialized = SignalBackend.WireFormat.messageToWire(
initial_message_encrypted["ciphertext"],
initial_message_encrypted["header"],
{
"DoubleRatchet": initial_message_encrypted["additional"]
}
)
# Bundle the session init data and the initial message into a pre_key packet
pre_key_message_serialized = SignalBackend.WireFormat.preKeyMessageToWire(
session_init_data,
initial_message_serialized,
{
"DoubleRatchet": initial_message_encrypted["additional"]
}
)
# Send to the receiver...
# Unpack the session init data into the initial message
pre_key_message = SignalBackend.WireFormat.preKeyMessageFromWire(
pre_key_message_serialized
)
initial_message_serialized = pre_key_message["message"]
# Unpack the contained message
initial_message_encrypted = SignalBackend.WireFormat.messageFromWire(
initial_message_serialized
)
# Create the session for the passive part
alice_dr = alice_state.getSharedSecretPassive(
pre_key_message["session_init_data"],
example_data.BOB_BARE_JID,
example_data.BOB_DEVICE_ID,
DeletingOTPKPolicy
)
# Decrypt the initial message
initial_message_plaintext = alice_dr.decryptMessage(
initial_message_encrypted["ciphertext"],
initial_message_encrypted["header"]
)
# Check the authentication
SignalBackend.WireFormat.finalizeMessageFromWire(
initial_message_serialized,
{
"DoubleRatchet": initial_message_plaintext["additional"],
"WireFormat": initial_message_encrypted["additional"]
}
)
initial_message_plaintext = initial_message_plaintext["plaintext"]
initial_message_plaintext = initial_message_plaintext.decode("UTF-8")
else:
# Otherwise, just initialize the passive session directly
alice_dr = alice_state.getSharedSecretPassive(
session_init_data,
example_data.BOB_BARE_JID,
example_data.BOB_DEVICE_ID,
DeletingOTPKPolicy
)
if use_wireformat:
print("Initial message received: " + initial_message_plaintext)
mainLoop(alice_dr, bob_dr, use_wireformat)
if __name__ == "__main__":
if len(sys.argv) < 3:
while True:
who = input("Who should actively initialize the session? (a or b): ")
if who in ["a", "b"]:
break
while True:
use_wireformat = input("Use the wireformat? (y or n): ")
if use_wireformat in ["y", "n"]:
break
else:
who = sys.argv[1]
use_wireformat = sys.argv[2]
main(who, use_wireformat == "y")
|
# Lint as: python3
# Copyright 2021 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Supporting functions for data loading."""
import numpy as np
from PIL import Image
import tensorflow as tf
from delf.python import utils as image_loading_utils
def pil_imagenet_loader(path, imsize, bounding_box=None, preprocess=True):
"""Pillow loader for the images.
Args:
path: Path to image to be loaded.
imsize: Integer, defines the maximum size of longer image side.
bounding_box: (x1,y1,x2,y2) tuple to crop the query image.
preprocess: Bool, whether to preprocess the images in respect to the
ImageNet dataset.
Returns:
image: `Tensor`, image in ImageNet suitable format.
"""
img = image_loading_utils.RgbLoader(path)
if bounding_box is not None:
imfullsize = max(img.size)
img = img.crop(bounding_box)
imsize = imsize * max(img.size) / imfullsize
# Unlike `resize`, `thumbnail` resizes to the largest size that preserves
# the aspect ratio, making sure that the output image does not exceed the
# original image size and the size specified in the arguments of thumbnail.
img.thumbnail((imsize, imsize), Image.ANTIALIAS)
img = np.array(img)
if preprocess:
# Preprocessing for ImageNet data. Converts the images from RGB to BGR,
# then zero-centers each color channel with respect to the ImageNet
# dataset, without scaling.
tf.keras.applications.imagenet_utils.preprocess_input(img, mode='caffe')
return img
def default_loader(path, imsize, bounding_box=None, preprocess=True):
"""Default loader for the images is using Pillow.
Args:
path: Path to image to be loaded.
imsize: Integer, defines the maximum size of longer image side.
bounding_box: (x1,y1,x2,y2) tuple to crop the query image.
preprocess: Bool, whether to preprocess the images in respect to the
ImageNet dataset.
Returns:
image: `Tensor`, image in ImageNet suitable format.
"""
img = pil_imagenet_loader(path, imsize, bounding_box, preprocess)
return img
|
const express = require('express');
const mongoose = require('mongoose');
const bodyParser = require('body-parser');
const constants = require('./config/constants');
//Connect to MongoDB
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.connect(
constants.mongoURI,
{
keepAlive: true,
reconnectTries: Number.MAX_VALUE,
useMongoClient: true,
},
);
require('./models/UrlShorten');
const app = express();
app.use(bodyParser.json());
app.use(function(req, res, next) {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE');
res.header('Access-Control-Allow-Headers', 'Content-type,Accept,x-access-token,X-Key');
if (req.method == 'OPTIONS') {
res.status(200).end();
} else {
next();
}
});
require('./routes/urlshorten')(app);
require('./services/cache');
const PORT = 6500;
app.listen(PORT, () => {
console.log(`Server started on port`, PORT);
});
|
#!/usr/bin/env python
#
# Copyright (c) 2015 - 2021, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
Run Pennant with the monitor agent.
'''
import argparse
from experiment.monitor import monitor
from experiment import machine
from apps.pennant import pennant
if __name__ == '__main__':
parser = argparse.ArgumentParser()
monitor.setup_run_args(parser)
pennant.setup_run_args(parser)
args, extra_args = parser.parse_known_args()
mach = machine.init_output_dir(args.output_dir)
app_conf = pennant.create_appconf(mach, args)
monitor.launch(app_conf=app_conf, args=args,
experiment_cli_args=extra_args)
|
/*
* Copyright (C) Roman Arutyunyan
* Copyright (C) Nginx, Inc.
*/
#include <ngx_config.h>
#include <ngx_core.h>
#include <ngx_http.h>
typedef struct {
ngx_array_t *mirror;
ngx_flag_t request_body;
} ngx_http_mirror_loc_conf_t;
typedef struct {
ngx_int_t status;
} ngx_http_mirror_ctx_t;
static ngx_int_t ngx_http_mirror_handler(ngx_http_request_t *r);
static void ngx_http_mirror_body_handler(ngx_http_request_t *r);
static ngx_int_t ngx_http_mirror_handler_internal(ngx_http_request_t *r);
static void *ngx_http_mirror_create_loc_conf(ngx_conf_t *cf);
static char *ngx_http_mirror_merge_loc_conf(ngx_conf_t *cf, void *parent,
void *child);
static char *ngx_http_mirror(ngx_conf_t *cf, ngx_command_t *cmd, void *conf);
static ngx_int_t ngx_http_mirror_init(ngx_conf_t *cf);
static ngx_command_t ngx_http_mirror_commands[] = {
{ ngx_string("mirror"),
NGX_HTTP_MAIN_CONF|NGX_HTTP_SRV_CONF|NGX_HTTP_LOC_CONF|NGX_CONF_TAKE1,
ngx_http_mirror,
NGX_HTTP_LOC_CONF_OFFSET,
0,
NULL },
{ ngx_string("mirror_request_body"),
NGX_HTTP_MAIN_CONF|NGX_HTTP_SRV_CONF|NGX_HTTP_LOC_CONF|NGX_CONF_FLAG,
ngx_conf_set_flag_slot,
NGX_HTTP_LOC_CONF_OFFSET,
offsetof(ngx_http_mirror_loc_conf_t, request_body),
NULL },
ngx_null_command
};
static ngx_http_module_t ngx_http_mirror_module_ctx = {
NULL, /* preconfiguration */
ngx_http_mirror_init, /* postconfiguration */
NULL, /* create main configuration */
NULL, /* init main configuration */
NULL, /* create server configuration */
NULL, /* merge server configuration */
ngx_http_mirror_create_loc_conf, /* create location configuration */
ngx_http_mirror_merge_loc_conf /* merge location configuration */
};
ngx_module_t ngx_http_mirror_module = {
NGX_MODULE_V1,
&ngx_http_mirror_module_ctx, /* module context */
ngx_http_mirror_commands, /* module directives */
NGX_HTTP_MODULE, /* module type */
NULL, /* init master */
NULL, /* init module */
NULL, /* init process */
NULL, /* init thread */
NULL, /* exit thread */
NULL, /* exit process */
NULL, /* exit master */
NGX_MODULE_V1_PADDING
};
static ngx_int_t
ngx_http_mirror_handler(ngx_http_request_t *r)
{
ngx_int_t rc;
ngx_http_mirror_ctx_t *ctx;
ngx_http_mirror_loc_conf_t *mlcf;
/*当前请求非主请求,或者当前作用域并未配置 mirror 指令的话,不处理当前请求。*/
if (r != r->main) {
return NGX_DECLINED;
}
mlcf = ngx_http_get_module_loc_conf(r, ngx_http_mirror_module);
if (mlcf->mirror == NULL) {
return NGX_DECLINED;
}
ngx_log_debug0(NGX_LOG_DEBUG_HTTP, r->connection->log, 0, "mirror handler");
/*如果需要连同请求包体一起复制,那么在创建子请求之前,Nginx 需要接收完整请求包体。*/
if (mlcf->request_body) {
ctx = ngx_http_get_module_ctx(r, ngx_http_mirror_module);
if (ctx) {
return ctx->status;
}
ctx = ngx_pcalloc(r->pool, sizeof(ngx_http_mirror_ctx_t));
if (ctx == NULL) {
return NGX_ERROR;
}
ctx->status = NGX_DONE;
ngx_http_set_ctx(r, ctx, ngx_http_mirror_module);
/*读取请求包体,开启新的异步*/
rc = ngx_http_read_client_request_body(r, ngx_http_mirror_body_handler);
if (rc >= NGX_HTTP_SPECIAL_RESPONSE) {
return rc;
}
ngx_http_finalize_request(r, NGX_DONE);
return NGX_DONE;
}
/*如果并不需要复制请求包体,Nginx 则直接调用函数 ngx_http_mirror_handler_internal 创建子请求开始请求复制流程,并恢复主请求正常处理流程。*/
return ngx_http_mirror_handler_internal(r);
}
/*请求包体收取完成后调用*/
static void
ngx_http_mirror_body_handler(ngx_http_request_t *r)
{
ngx_http_mirror_ctx_t *ctx;
ctx = ngx_http_get_module_ctx(r, ngx_http_mirror_module);
ctx->status = ngx_http_mirror_handler_internal(r);
/*防治删除产生的临时文件*/
r->preserve_body = 1;
r->write_event_handler = ngx_http_core_run_phases;
ngx_http_core_run_phases(r);
}
static ngx_int_t
ngx_http_mirror_handler_internal(ngx_http_request_t *r)
{
ngx_str_t *name;
ngx_uint_t i;
ngx_http_request_t *sr;
ngx_http_mirror_loc_conf_t *mlcf;
mlcf = ngx_http_get_module_loc_conf(r, ngx_http_mirror_module);
name = mlcf->mirror->elts;
/*为每个mirror创建一个子请求*/
for (i = 0; i < mlcf->mirror->nelts; i++) {
/*使用subrequest机制生成复制的请求*/
if (ngx_http_subrequest(r, &name[i], &r->args, &sr, NULL,
NGX_HTTP_SUBREQUEST_BACKGROUND)
!= NGX_OK)
{
return NGX_HTTP_INTERNAL_SERVER_ERROR;
}
sr->header_only = 1;
sr->method = r->method;
sr->method_name = r->method_name;
}
/*请求需要被发给本阶段的下一个处理器(handler)*/
return NGX_DECLINED;
}
static void *
ngx_http_mirror_create_loc_conf(ngx_conf_t *cf)
{
ngx_http_mirror_loc_conf_t *mlcf;
mlcf = ngx_pcalloc(cf->pool, sizeof(ngx_http_mirror_loc_conf_t));
if (mlcf == NULL) {
return NULL;
}
mlcf->mirror = NGX_CONF_UNSET_PTR;
mlcf->request_body = NGX_CONF_UNSET;
return mlcf;
}
static char *
ngx_http_mirror_merge_loc_conf(ngx_conf_t *cf, void *parent, void *child)
{
ngx_http_mirror_loc_conf_t *prev = parent;
ngx_http_mirror_loc_conf_t *conf = child;
/*mirror 配置指令可以在某个配置作用域中出现多次,它属于 「数组类配置项」 。 当内层作用域没有显式使用 mirror 配置项时,会从外层继承相关配置。*/
ngx_conf_merge_ptr_value(conf->mirror, prev->mirror, NULL);
ngx_conf_merge_value(conf->request_body, prev->request_body, 1);
return NGX_CONF_OK;
}
static char *
ngx_http_mirror(ngx_conf_t *cf, ngx_command_t *cmd, void *conf)
{
ngx_http_mirror_loc_conf_t *mlcf = conf;
ngx_str_t *value, *s;
value = cf->args->elts;
if (ngx_strcmp(value[1].data, "off") == 0) {
if (mlcf->mirror != NGX_CONF_UNSET_PTR) {
return "is duplicate";
}
mlcf->mirror = NULL;
return NGX_CONF_OK;
}
if (mlcf->mirror == NULL) {
return "is duplicate";
}
if (mlcf->mirror == NGX_CONF_UNSET_PTR) {
mlcf->mirror = ngx_array_create(cf->pool, 4, sizeof(ngx_str_t));
if (mlcf->mirror == NULL) {
return NGX_CONF_ERROR;
}
}
s = ngx_array_push(mlcf->mirror);
if (s == NULL) {
return NGX_CONF_ERROR;
}
*s = value[1];
return NGX_CONF_OK;
}
static ngx_int_t
ngx_http_mirror_init(ngx_conf_t *cf)
{
ngx_http_handler_pt *h;
ngx_http_core_main_conf_t *cmcf;
cmcf = ngx_http_conf_get_module_main_conf(cf, ngx_http_core_module);
h = ngx_array_push(&cmcf->phases[NGX_HTTP_PRECONTENT_PHASE].handlers);
if (h == NULL) {
return NGX_ERROR;
}
*h = ngx_http_mirror_handler;
return NGX_OK;
}
|
#!/usr/bin/env python
# Author: Brendan Le Foll <brendan.le.foll@intel.com>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
import mraa as m
import random as rand
# Excuse the super boring example, I was out of fun devices to play with, this
# will write and read the same data back to itself, a few 100 times, just short
# MISO & MOSI on your board
dev = m.Spi(0)
for x in range(0,100):
txbuf = bytearray(4)
for y in range(0,4):
txbuf[y] = rand.randrange(0, 256)
rxbuf = dev.write(txbuf)
if rxbuf != txbuf:
print("We have an error captain!")
break
exit(1)
|
"""
WSGI config for lochlanandcatherinecom project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lochlanandcatherinecom.settings")
application = get_wsgi_application()
|
const path = require('path');
const svgDim = require('svg-dimensions');
const { reportFileGenerateSuccess, reportFileGenerateError } = require('../utils/reportFileGenerate');
const { generateFileName } = require('../utils/generateFileName');
const { processVector } = require('./vector/processVector');
const generateVector = (hexo, currentManifest, imageCacheDir, file, hash) => {
const targetFileName = generateFileName(file, hash, '2x', 'svg');
return new Promise((resolve, reject) => {
svgDim.get(file, (error, dimensions) => {
if (!error) {
resolve(dimensions);
} else {
reject(error);
}
});
}).then(({ width, height }) => {
return processVector(file, path.join(imageCacheDir, targetFileName))
.then(() => {
currentManifest.dimensions = {
'2x': {
w: width,
h: height
}
};
currentManifest.originalType = 'svg';
currentManifest.files = {
default: targetFileName,
svg: {
'2x': targetFileName
}
};
reportFileGenerateSuccess(hexo, file, path.join(imageCacheDir, targetFileName), `image/svg`);
})
.catch((error) => reportFileGenerateError(hexo, file, targetFileName, error));
});
};
const vectorProcessor = (hexo, imageCacheDir, file, currentManifest, shortFileHash) => {
return generateVector(hexo, currentManifest, imageCacheDir, file, shortFileHash);
};
module.exports = { vectorProcessor };
|
import { Typography } from "antd";
import styled from "styled-components";
import { theme } from "constants/theme";
const { Text } = Typography;
const TextLabel = styled(Text)`
&.ant-typography {
font-family: ${theme.typography.heading.font};
color: ${(props) => props.color || "black"};
font-size: ${(props) => props.size || theme.typography.size.medium};
font-weight: ${(props) => props.weight || "normal"};
text-align: ${(props) => props.textAlign || "left"};
display: ${(props) => (!props.block ? "" : "block")};
}
`;
export default TextLabel;
|
from django.contrib import admin
from simple_history.admin import SimpleHistoryAdmin
from wiki.models import Article, Alias, Comment
@admin.register(Article)
class ArticleAdmin(SimpleHistoryAdmin, admin.ModelAdmin):
date_hierarchy = 'modified'
list_display = ('__str__', 'created', 'modified')
@admin.register(Alias)
class AliasAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'created', 'modified')
@admin.register(Comment)
class CommentAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
list_display = ('id', 'article', 'user', '__str__', 'removed')
list_display_links = ('id', '__str__')
list_filter = ('article', 'user')
def get_queryset(self, request):
qs = super().get_queryset(request)
return qs.prefetch_related('user', 'article')
actions = ['remove_comment']
def remove_comment(self, request, queryset):
queryset.update(
comment="Removed by {0}".format(request.user.username),
removed=True
)
remove_comment.short_description = 'Remove the comment'
|
/*jslint node: true */
'use strict';
/*
'use strict' is not required but helpful for turning syntactical errors into true errors in the program flow
http://www.w3schools.com/js/js_strict.asp
*/
/*
Modules make it possible to import JavaScript files into your application. Modules are imported
using 'require' statements that give you a reference to the module.
It is a good idea to list the modules that your application depends on in the package.json in the project root
*/
var util = require('util');
/*
Once you 'require' a module you can reference the things that it exports. These are defined in module.exports.
For a controller in a127 (which this is) you should export the functions referenced in your Swagger document by name.
Either:
- The HTTP Verb of the corresponding operation (get, put, post, delete, etc)
- Or the operationId associated with the operation in your Swagger document
In the starter/skeleton project the 'get' operation on the '/hello' path has an operationId named 'hello'. Here,
we specify that in the exports of this module that 'hello' maps to the function named 'hello'
*/
module.exports = {
hello: hello,
};
/*
Functions in a127 controllers used for operations should take two parameters:
Param 1: a handle to the request object
Param 2: a handle to the response object
*/
function hello(req, res) {
// variables defined in the Swagger document can be referenced using req.swagger.params.{parameter_name}
var name = req.swagger.params.name.value || 'stranger';
var helloname = util.format('Hello there, %s!', name);
// this sends back a JSON response which is a single string
res.json(helloname);
}
|
#!/usr/bin/python -tt
# List of token names. This is always required
from Number import *
from Infinity import *
import sys
import re
from SyntaxException import *
reserved = {
}
tokens = [
'FRAC',
'NUMBER',
'PLUS',
'MINUS',
'TIMES',
'DIVIDE',
'LPAREN',
'RPAREN',
'LBRACKET',
'RBRACKET',
'LBRACE',
'RBRACE',
'LFLOOR',
'RFLOOR',
'LCEIL',
'RCEIL',
'ASINH',
'SINH',
'ASIN',
'SIN',
'ACOSH',
'COSH',
'ACOS',
'COS',
'ATANH',
'TANH',
'ATAN',
'TAN',
'ASEC',
'SEC',
'ACSC',
'CSC',
'ACOTH',
'COTH',
'ACOT',
'COT',
'SQRT',
'LOG',
'LN',
'EXP',
'MOD',
'CARET',
'COMMA',
'ID',
'PIPE',
'INFINITY',
'UNDERLINE',
'INTEGRAL',
'DIFFERENTIAL',
'D',
'I',
'E',
'PARTIAL',
'SUM',
'PROD',
'IN',
'DOTS',
'EQ',
'NEQ',
'LT',
'LE',
'GT',
'GE',
'FACTORIAL',
'PERCENT',
'ETA_LOWER',
'ZETA_LOWER',
'PHI_LOWER',
'PSI_LOWER',
'SIGMA_LOWER',
'DELTA_LOWER',
'THETA_LOWER',
'LAMBDA_LOWER',
'EPSILON_LOWER',
'TAU_LOWER',
'KAPPA_LOWER',
'OMEGA_LOWER',
'ALPHA_LOWER',
'XI_LOWER',
'CHI_LOWER',
'NU_LOWER',
'RHO_LOWER',
'OMICRON_LOWER',
'UPSILON_LOWER',
'IOTA_LOWER',
'BETA_LOWER',
'GAMMA_LOWER',
'MU_LOWER',
'PI_UPPER',
'PI',
'BETA',
'GAMMA',
'MU',
'KAPPA',
'OMICRON',
'OMEGA',
'LAMBDA',
'IOTA',
'PSI',
'PHI',
'SIGMA',
'ETA',
'ZETA',
'THETA',
'EPSILON',
'TAU',
'ALPHA',
'XI',
'CHI',
'NU',
'RHO',
'UPSILON',
'LIMIT',
'TO',
'PRIME',
'GCD',
'DEG',
'CHOOSE',
'GRADIENT',
'LAPLACIAN',
'BEGIN_CASE',
'END_CASE',
'BACKSLASHES',
'BEGIN_BMATRIX',
'END_BMATRIX',
'BEGIN_PMATRIX',
'END_PMATRIX',
'BEGIN_VMATRIX',
'END_VMATRIX',
'BEGIN_NMATRIX',
'END_NMATRIX',
'AMPERSAND',
'DETERMINANT',
'CROSS',
'DOT'
] + list(reserved.values())
# Define a rule so we can track line numbers
def t_newline(t):
r'\n+'
t.lexer.lineno += len(t.value)
# A string containing ignored characters (spaces and tabs)
t_ignore = ' \t\r'
t_UNDERLINE = r'_'
def t_PRIME(t):
r'\\prime|\''
return t
def t_LIMIT(t):
r'\\lim'
return t
def t_TO(t):
r'\\to'
return t
t_PHI_LOWER = r'\\phi'
t_SIGMA_LOWER = r'\\sigma'
t_ETA_LOWER = r'\\eta'
t_ZETA_LOWER = r'\\zeta'
t_PSI_LOWER = r'\\psi'
t_DELTA_LOWER = r'\\delta'
t_THETA_LOWER = r'\\theta'
t_LAMBDA_LOWER = r'\\lambda'
t_EPSILON_LOWER = r'\\epsilon'
t_TAU_LOWER = r'\\tau'
t_KAPPA_LOWER = r'\\kappa'
t_OMEGA_LOWER = r'\\omega'
t_ALPHA_LOWER = r'\\alpha'
t_XI_LOWER = r'\\xi'
t_CHI_LOWER = r'\\chi'
t_NU_LOWER = r'\\nu'
t_RHO_LOWER = r'\\rho'
t_OMICRON_LOWER = r'\\omicron'
t_UPSILON_LOWER = r'\\upsilon'
t_IOTA_LOWER = r'\\iota'
t_BETA_LOWER = r'\\beta'
t_GAMMA_LOWER = r'\\gamma'
t_MU_LOWER = r'\\mu'
t_BETA = r'\\Beta'
t_GAMMA = r'\\Gamma'
t_KAPPA = r'\\Kappa'
t_MU = r'\\Mu'
t_IOTA = r'\\Iota'
t_OMICRON = r'\\Omicron'
t_LAMBDA = r'\\Lambda'
t_OMEGA = r'\\Omega'
t_PSI = r'\\Psi'
t_PHI = r'\\Phi'
t_SIGMA = r'\\Sigma'
t_ETA = r'\\Eta'
t_ZETA = r'\\Zeta'
t_THETA = r'\\Theta'
t_EPSILON = r'\\Epsilon'
t_TAU = r'\\Tau'
t_ALPHA = r'\\Alpha'
t_XI = r'\\Xi'
t_CHI = r'\\Chi'
t_NU = r'\\Nu'
t_RHO = r'\\Rho'
t_UPSILON = r'\\Upsilon'
t_PI = r'\\pi'
t_PI_UPPER = r'\\Pi'
t_EQ = r'='
t_NEQ = r'\\neq'
t_LE = r'\\leq'
t_LT = r'<'
t_GE = r'\\geq'
t_GT = r'>'
t_CROSS = r'\\times'
t_SUM = r'\\sum'
t_PROD = r'\\prod'
t_IN = r'\\in'
t_FACTORIAL = r'!'
def t_PERCENT(t):
r'\\text\{\s*\%\s*\}'
return t
def t_MOD(t):
r'\\mod|\\bmod'
return t
def t_DOTS(t):
r'\\cdots|\\ldots|\\dots|\.\.\.'
return t
def t_INFINITY(t):
r'\\infty'
t.value = Infinity()
return t
def t_COMMENT(t):
r'\%[^\n]*'
pass
# Regular expression rules for simple tokens
t_PLUS = r'\+'
t_MINUS = r'-'
def t_TIMES(t):
r'\*|\\ast'
return t
def t_DOT(t):
r'\\cdot'
return t
def t_DIVIDE(t):
r'/|\\div'
return t
def t_FRAC(t):
r'\\frac'
return t
t_LPAREN = r'\('
t_RPAREN = r'\)'
def t_LBRACKET(t):
r'\['
return t
def t_RBRACKET(t):
r'\]'
return t
def t_LBRACE(t):
r'\{'
return t
def t_RBRACE(t):
r'\}'
return t
t_CARET = r'\^'
t_LFLOOR = r'\\lfloor'
t_RFLOOR = r'\\rfloor'
t_LCEIL = r'\\lceil'
t_RCEIL = r'\\rceil'
t_ASINH = r'\\sinh\^\{-1\}'
t_ASIN = r'\\sin\^\{-1\}|\\arcsin'
t_SINH = r'\\sinh'
t_SIN = r'\\sin'
t_ACOSH = r'\\cosh\^\{-1\}'
t_ACOS = r'\\cos\^\{-1\}|\\arccos'
t_COSH = r'\\cosh'
t_COS = r'\\cos'
t_ATANH = r'\\tanh\^\{-1\}'
t_ATAN = r'\\tan\^\{-1\}|\\arctan'
t_TANH = r'\\tanh'
t_TAN = r'\\tan'
t_ASEC = r'\\sec\^\{-1\}'
t_SEC = r'\\sec'
t_ACSC = r'\\csc\^\{-1\}'
t_CSC = r'\\csc'
t_ACOTH = r'\\coth\^\{-1\}'
t_ACOT = r'\\cot\^\{-1\}'
t_COTH = r'\\coth'
t_COT = r'\\cot'
t_SQRT = r'\\sqrt'
t_LOG = r'\\log'
t_LN = r'\\ln'
t_EXP = r'\\exp'
t_GCD = r'\\gcd'
t_DEG = r'\\deg'
t_CHOOSE = r'\\choose'
t_GRADIENT = r'\\nabla'
t_LAPLACIAN = r'\\Delta'
t_AMPERSAND = r'&'
t_DETERMINANT = r'\\det'
def t_BEGIN_CASE(t):
r'\\begin\{cases\}'
return t
def t_END_CASE(t):
r'\\end\{cases\}'
return t
def t_BEGIN_BMATRIX(t):
r'\\begin\{bmatrix\}'
return t
def t_END_BMATRIX(t):
r'\\end\{bmatrix\}'
return t
def t_BEGIN_PMATRIX(t):
r'\\begin\{pmatrix\}'
return t
def t_END_PMATRIX(t):
r'\\end\{pmatrix\}'
return t
def t_BEGIN_VMATRIX(t):
r'\\begin\{vmatrix\}'
return t
def t_END_VMATRIX(t):
r'\\end\{vmatrix\}'
return t
def t_BEGIN_NMATRIX(t):
r'\\begin\{Vmatrix\}'
return t
def t_END_NMATRIX(t):
r'\\end\{Vmatrix\}'
return t
def t_BACKSLASHES(t):
r'\\\\'
return t
def t_INTEGRAL(t):
r'\\int'
return t
def t_D(t):
r'd(?!\\_|[a-zA-Z0-9])'
return t
def t_I(t):
r'i(?!\\_|[a-zA-Z0-9])'
return t
def t_E(t):
r'e(?!\\_|[a-zA-Z0-9])'
return t
def t_DIFFERENTIAL(t):
r'd[a-zA-Z](?!\\_|[a-zA-Z0-9])'
return t
def t_PARTIAL(t):
r'\\partial'
return t
def t_PIPE(t):
r'\\mid|\\vert|\|'
return t
def t_ignore_LIMITS(t):
r'\\limits'
pass
def t_ignore_BEGIN_EQUATION(t):
r'\\begin\{equation\}'
pass
def t_ignore_END_EQUATION(t):
r'\\end\{equation\}'
pass
def t_ignore_BEGIN_SPLIT(t):
r'\\begin\{split\}'
pass
def t_ignore_END_SPLIT(t):
r'\\end\{split\}'
pass
def t_ignore_DISPLAYSTYLE(t):
r'\\displaystyle'
pass
def t_ignore_QUAD(t):
r'\\quad'
pass
def t_ignore_MATHCLAP(t):
r'\\mathclap'
pass
def t_ignored_LEFT(t):
r'\\left'
pass
def t_ignored_RIGHT(t):
r'\\right'
pass
t_COMMA = r','
def t_ID(t):
r'\\text\{\s*(\\_)*[a-zA-Z]((\\_)*[a-zA-Z0-9]*)*\s*\}|(\\_)*[a-zA-Z]((\\_)*[a-zA-Z0-9]*)*'
t.type = reserved.get(t.value, 'ID') # Check for reserved words
m = re.search(r"\\text\{\s*(.+)\s*\}", t.value)
if m:
t.value = m.groups(0)[0]
t.value = t.value.replace("\\", "").strip()
return t
# A regular expression rule with some action code
def t_NUMBER(t):
r'[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?'
t.value = Number(t.value)
return t
def t_ignore_TEXT(t):
r'\\text\{\s*\}|\\text'
pass
# Error handling rule
def t_error(t):
sys.stderr.write( "Illegal character '%s'" % t.value[0] )
t.lexer.skip(1)
|
import operator
from numpy.testing import assert_raises
import numpy as np
from .. import ones, asarray, result_type, all, equal
from .._array_object import Array
from .._dtypes import (
_all_dtypes,
_boolean_dtypes,
_floating_dtypes,
_integer_dtypes,
_integer_or_boolean_dtypes,
_numeric_dtypes,
int8,
int16,
int32,
int64,
uint64,
)
def test_validate_index():
# The indexing tests in the official array API test suite test that the
# array object correctly handles the subset of indices that are required
# by the spec. But the NumPy array API implementation specifically
# disallows any index not required by the spec, via Array._validate_index.
# This test focuses on testing that non-valid indices are correctly
# rejected. See
# https://data-apis.org/array-api/latest/API_specification/indexing.html
# and the docstring of Array._validate_index for the exact indexing
# behavior that should be allowed. This does not test indices that are
# already invalid in NumPy itself because Array will generally just pass
# such indices directly to the underlying np.ndarray.
a = ones((3, 4))
# Out of bounds slices are not allowed
assert_raises(IndexError, lambda: a[:4])
assert_raises(IndexError, lambda: a[:-4])
assert_raises(IndexError, lambda: a[:3:-1])
assert_raises(IndexError, lambda: a[:-5:-1])
assert_raises(IndexError, lambda: a[4:])
assert_raises(IndexError, lambda: a[-4:])
assert_raises(IndexError, lambda: a[4::-1])
assert_raises(IndexError, lambda: a[-4::-1])
assert_raises(IndexError, lambda: a[...,:5])
assert_raises(IndexError, lambda: a[...,:-5])
assert_raises(IndexError, lambda: a[...,:5:-1])
assert_raises(IndexError, lambda: a[...,:-6:-1])
assert_raises(IndexError, lambda: a[...,5:])
assert_raises(IndexError, lambda: a[...,-5:])
assert_raises(IndexError, lambda: a[...,5::-1])
assert_raises(IndexError, lambda: a[...,-5::-1])
# Boolean indices cannot be part of a larger tuple index
assert_raises(IndexError, lambda: a[a[:,0]==1,0])
assert_raises(IndexError, lambda: a[a[:,0]==1,...])
assert_raises(IndexError, lambda: a[..., a[0]==1])
assert_raises(IndexError, lambda: a[[True, True, True]])
assert_raises(IndexError, lambda: a[(True, True, True),])
# Integer array indices are not allowed (except for 0-D)
idx = asarray([[0, 1]])
assert_raises(IndexError, lambda: a[idx])
assert_raises(IndexError, lambda: a[idx,])
assert_raises(IndexError, lambda: a[[0, 1]])
assert_raises(IndexError, lambda: a[(0, 1), (0, 1)])
assert_raises(IndexError, lambda: a[[0, 1]])
assert_raises(IndexError, lambda: a[np.array([[0, 1]])])
# np.newaxis is not allowed
assert_raises(IndexError, lambda: a[None])
assert_raises(IndexError, lambda: a[None, ...])
assert_raises(IndexError, lambda: a[..., None])
# Multiaxis indices must contain exactly as many indices as dimensions
assert_raises(IndexError, lambda: a[()])
assert_raises(IndexError, lambda: a[0,])
assert_raises(IndexError, lambda: a[0])
assert_raises(IndexError, lambda: a[:])
def test_operators():
# For every operator, we test that it works for the required type
# combinations and raises TypeError otherwise
binary_op_dtypes = {
"__add__": "numeric",
"__and__": "integer_or_boolean",
"__eq__": "all",
"__floordiv__": "numeric",
"__ge__": "numeric",
"__gt__": "numeric",
"__le__": "numeric",
"__lshift__": "integer",
"__lt__": "numeric",
"__mod__": "numeric",
"__mul__": "numeric",
"__ne__": "all",
"__or__": "integer_or_boolean",
"__pow__": "floating",
"__rshift__": "integer",
"__sub__": "numeric",
"__truediv__": "floating",
"__xor__": "integer_or_boolean",
}
# Recompute each time because of in-place ops
def _array_vals():
for d in _integer_dtypes:
yield asarray(1, dtype=d)
for d in _boolean_dtypes:
yield asarray(False, dtype=d)
for d in _floating_dtypes:
yield asarray(1.0, dtype=d)
for op, dtypes in binary_op_dtypes.items():
ops = [op]
if op not in ["__eq__", "__ne__", "__le__", "__ge__", "__lt__", "__gt__"]:
rop = "__r" + op[2:]
iop = "__i" + op[2:]
ops += [rop, iop]
for s in [1, 1.0, False]:
for _op in ops:
for a in _array_vals():
# Test array op scalar. From the spec, the following combinations
# are supported:
# - Python bool for a bool array dtype,
# - a Python int within the bounds of the given dtype for integer array dtypes,
# - a Python int or float for floating-point array dtypes
# We do not do bounds checking for int scalars, but rather use the default
# NumPy behavior for casting in that case.
if ((dtypes == "all"
or dtypes == "numeric" and a.dtype in _numeric_dtypes
or dtypes == "integer" and a.dtype in _integer_dtypes
or dtypes == "integer_or_boolean" and a.dtype in _integer_or_boolean_dtypes
or dtypes == "boolean" and a.dtype in _boolean_dtypes
or dtypes == "floating" and a.dtype in _floating_dtypes
)
# bool is a subtype of int, which is why we avoid
# isinstance here.
and (a.dtype in _boolean_dtypes and type(s) == bool
or a.dtype in _integer_dtypes and type(s) == int
or a.dtype in _floating_dtypes and type(s) in [float, int]
)):
# Only test for no error
getattr(a, _op)(s)
else:
assert_raises(TypeError, lambda: getattr(a, _op)(s))
# Test array op array.
for _op in ops:
for x in _array_vals():
for y in _array_vals():
# See the promotion table in NEP 47 or the array
# API spec page on type promotion. Mixed kind
# promotion is not defined.
if (x.dtype == uint64 and y.dtype in [int8, int16, int32, int64]
or y.dtype == uint64 and x.dtype in [int8, int16, int32, int64]
or x.dtype in _integer_dtypes and y.dtype not in _integer_dtypes
or y.dtype in _integer_dtypes and x.dtype not in _integer_dtypes
or x.dtype in _boolean_dtypes and y.dtype not in _boolean_dtypes
or y.dtype in _boolean_dtypes and x.dtype not in _boolean_dtypes
or x.dtype in _floating_dtypes and y.dtype not in _floating_dtypes
or y.dtype in _floating_dtypes and x.dtype not in _floating_dtypes
):
assert_raises(TypeError, lambda: getattr(x, _op)(y))
# Ensure in-place operators only promote to the same dtype as the left operand.
elif (
_op.startswith("__i")
and result_type(x.dtype, y.dtype) != x.dtype
):
assert_raises(TypeError, lambda: getattr(x, _op)(y))
# Ensure only those dtypes that are required for every operator are allowed.
elif (dtypes == "all" and (x.dtype in _boolean_dtypes and y.dtype in _boolean_dtypes
or x.dtype in _numeric_dtypes and y.dtype in _numeric_dtypes)
or (dtypes == "numeric" and x.dtype in _numeric_dtypes and y.dtype in _numeric_dtypes)
or dtypes == "integer" and x.dtype in _integer_dtypes and y.dtype in _numeric_dtypes
or dtypes == "integer_or_boolean" and (x.dtype in _integer_dtypes and y.dtype in _integer_dtypes
or x.dtype in _boolean_dtypes and y.dtype in _boolean_dtypes)
or dtypes == "boolean" and x.dtype in _boolean_dtypes and y.dtype in _boolean_dtypes
or dtypes == "floating" and x.dtype in _floating_dtypes and y.dtype in _floating_dtypes
):
getattr(x, _op)(y)
else:
assert_raises(TypeError, lambda: getattr(x, _op)(y))
unary_op_dtypes = {
"__abs__": "numeric",
"__invert__": "integer_or_boolean",
"__neg__": "numeric",
"__pos__": "numeric",
}
for op, dtypes in unary_op_dtypes.items():
for a in _array_vals():
if (
dtypes == "numeric"
and a.dtype in _numeric_dtypes
or dtypes == "integer_or_boolean"
and a.dtype in _integer_or_boolean_dtypes
):
# Only test for no error
getattr(a, op)()
else:
assert_raises(TypeError, lambda: getattr(a, op)())
# Finally, matmul() must be tested separately, because it works a bit
# different from the other operations.
def _matmul_array_vals():
for a in _array_vals():
yield a
for d in _all_dtypes:
yield ones((3, 4), dtype=d)
yield ones((4, 2), dtype=d)
yield ones((4, 4), dtype=d)
# Scalars always error
for _op in ["__matmul__", "__rmatmul__", "__imatmul__"]:
for s in [1, 1.0, False]:
for a in _matmul_array_vals():
if (type(s) in [float, int] and a.dtype in _floating_dtypes
or type(s) == int and a.dtype in _integer_dtypes):
# Type promotion is valid, but @ is not allowed on 0-D
# inputs, so the error is a ValueError
assert_raises(ValueError, lambda: getattr(a, _op)(s))
else:
assert_raises(TypeError, lambda: getattr(a, _op)(s))
for x in _matmul_array_vals():
for y in _matmul_array_vals():
if (x.dtype == uint64 and y.dtype in [int8, int16, int32, int64]
or y.dtype == uint64 and x.dtype in [int8, int16, int32, int64]
or x.dtype in _integer_dtypes and y.dtype not in _integer_dtypes
or y.dtype in _integer_dtypes and x.dtype not in _integer_dtypes
or x.dtype in _floating_dtypes and y.dtype not in _floating_dtypes
or y.dtype in _floating_dtypes and x.dtype not in _floating_dtypes
or x.dtype in _boolean_dtypes
or y.dtype in _boolean_dtypes
):
assert_raises(TypeError, lambda: x.__matmul__(y))
assert_raises(TypeError, lambda: y.__rmatmul__(x))
assert_raises(TypeError, lambda: x.__imatmul__(y))
elif x.shape == () or y.shape == () or x.shape[1] != y.shape[0]:
assert_raises(ValueError, lambda: x.__matmul__(y))
assert_raises(ValueError, lambda: y.__rmatmul__(x))
if result_type(x.dtype, y.dtype) != x.dtype:
assert_raises(TypeError, lambda: x.__imatmul__(y))
else:
assert_raises(ValueError, lambda: x.__imatmul__(y))
else:
x.__matmul__(y)
y.__rmatmul__(x)
if result_type(x.dtype, y.dtype) != x.dtype:
assert_raises(TypeError, lambda: x.__imatmul__(y))
elif y.shape[0] != y.shape[1]:
# This one fails because x @ y has a different shape from x
assert_raises(ValueError, lambda: x.__imatmul__(y))
else:
x.__imatmul__(y)
def test_python_scalar_construtors():
b = asarray(False)
i = asarray(0)
f = asarray(0.0)
assert bool(b) == False
assert int(i) == 0
assert float(f) == 0.0
assert operator.index(i) == 0
# bool/int/float should only be allowed on 0-D arrays.
assert_raises(TypeError, lambda: bool(asarray([False])))
assert_raises(TypeError, lambda: int(asarray([0])))
assert_raises(TypeError, lambda: float(asarray([0.0])))
assert_raises(TypeError, lambda: operator.index(asarray([0])))
# bool/int/float should only be allowed on arrays of the corresponding
# dtype
assert_raises(ValueError, lambda: bool(i))
assert_raises(ValueError, lambda: bool(f))
assert_raises(ValueError, lambda: int(b))
assert_raises(ValueError, lambda: int(f))
assert_raises(ValueError, lambda: float(b))
assert_raises(ValueError, lambda: float(i))
assert_raises(TypeError, lambda: operator.index(b))
assert_raises(TypeError, lambda: operator.index(f))
def test_device_property():
a = ones((3, 4))
assert a.device == 'cpu'
assert all(equal(a.to_device('cpu'), a))
assert_raises(ValueError, lambda: a.to_device('gpu'))
assert all(equal(asarray(a, device='cpu'), a))
assert_raises(ValueError, lambda: asarray(a, device='gpu'))
def test_array_properties():
a = ones((1, 2, 3))
b = ones((2, 3))
assert_raises(ValueError, lambda: a.T)
assert isinstance(b.T, Array)
assert b.T.shape == (3, 2)
assert isinstance(a.mT, Array)
assert a.mT.shape == (1, 3, 2)
assert isinstance(b.mT, Array)
assert b.mT.shape == (3, 2)
|
/* Examples */
(function($) {
var c4 = $('.forth.circle');
c4.circleProgress({
startAngle: -Math.PI / 2 * 9,
value: 0.5,
lineCap: 'round',
emptyFill: 'rgba(204, 204, 204,0.2)',
fill: {color: '#6259ca'},
lineCap: 'round'
});
setTimeout(function() { c4.circleProgress('value', 0.7); }, 1000);
setTimeout(function() { c4.circleProgress('value', 1.0); }, 1100);
setTimeout(function() { c4.circleProgress('value', 0.5); }, 2100);
})(jQuery);
|
/***** includes *****/
#include "lfds720_freelist_smrg_internal.h"
/***** private prototypes *****/
void lfds720_freelist_smrg_internal_state_cleaned_callback( struct lfds720_smrg_thread_state *smrgts,
struct lfds720_smrg_allocation_state *smrgas,
void *allocation,
void *smr_user_state,
void *allocation_user_state );
/****************************************************************************/
void lfds720_freelist_smrg_cleanup( struct lfds720_freelist_smrg_state *fsgs,
void (*element_cleanup_callback)( struct lfds720_freelist_smrg_state *fsgs, struct lfds720_freelist_smrg_element *fsge ),
void (*state_cleanup_callback)( struct lfds720_freelist_smrg_state *fsgs ),
struct lfds720_smrg_thread_state *smrgts )
{
LFDS720_PAL_ASSERT( fsgs != NULL );
// TRD : element_cleanup_callback can be NULL
// TRD : state_cleanup_callback can be NULL
LFDS720_PAL_ASSERT( smrgts != NULL );
LFDS720_MISC_BARRIER_LOAD;
fsgs->state_cleanup_callback = state_cleanup_callback;
fsgs->element_cleanup_callback = element_cleanup_callback;
// TRD : submit the freelist SMR state for cleanup - when it emerges, we're free to deallocate the whole list
lfds720_smrg_submit_dirty_allocation( fsgs->smrgs, smrgts, &fsgs->smrgas, lfds720_freelist_smrg_internal_state_cleaned_callback, fsgs, NULL );
return;
}
/****************************************************************************/
#pragma warning( disable : 4100 )
void lfds720_freelist_smrg_internal_state_cleaned_callback( struct lfds720_smrg_thread_state *smrgts,
struct lfds720_smrg_allocation_state *smrgas,
void *allocation,
void *smr_user_state,
void *allocation_user_state )
{
struct lfds720_freelist_smrg_element
*fsge,
*fsge_temp;
struct lfds720_freelist_smrg_state
*fsgs;
LFDS720_PAL_ASSERT( smrgts != NULL );
LFDS720_PAL_ASSERT( smrgas != NULL );
LFDS720_PAL_ASSERT( allocation != NULL );
LFDS720_PAL_ASSERT( smr_user_state == NULL );
LFDS720_PAL_ASSERT( allocation_user_state == NULL );
fsgs = (struct lfds720_freelist_smrg_state *) allocation;
if( fsgs->element_cleanup_callback != NULL )
{
fsge = fsgs->top;
while( fsge != NULL )
{
fsge_temp = fsge;
fsge = fsge->next;
fsgs->element_cleanup_callback( fsgs, fsge_temp );
}
}
if( fsgs->state_cleanup_callback != NULL )
fsgs->state_cleanup_callback( fsgs );
return;
}
#pragma warning( default : 4100 )
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,too-many-locals,unused-variable,unused-argument
"""cuda batch_matmul operators"""
import tvm
from tvm import autotvm
from tvm import te
from tvm.contrib import cublas
from tvm.autotvm.task.space import SplitEntity, OtherOptionEntity
from .. import nn, generic
from ..utils import traverse_inline, get_const_tuple, get_max_power2_factor
from .tensor_intrin import dp4a
@autotvm.register_topi_compute("batch_matmul.cuda")
def batch_matmul(cfg, x, y, out_shape=None, out_dtype=None, transpose_a=False, transpose_b=True):
"""Compute batch matrix multiplication of `tensor_a` and `tensor_b`.
Both `tensor_a` and `tensor_b` can be transposed. For legacy reason, we use NT format
(transpose_a=False, transpose_b=True) by default.
Parameters
----------
cfg : ConfigSpace
Autotvm tuning space config file.
tensor_a : tvm.te.Tensor
3-D with shape [batch, M, K] or [batch, K, M].
tensor_b : tvm.te.Tensor
3-D with shape [batch, K, N] or [batch, N, K].
out_shape : List[Optional]
Explicit intended output shape of the computation. Can be useful in cases
with dynamic input shapes.
out_dtype : Optional[str]
Specifies the output data type for mixed precision batch matmul.
transpose_a : Optional[bool] = False
Whether the first tensor is in transposed format.
transpose_b : Optional[bool] = True
Whether the second tensor is in transposed format.
Returns
-------
output : tvm.te.Tensor
3-D with shape [batch, M, N]
"""
return nn.batch_matmul(
x,
y,
oshape=out_shape,
out_dtype=out_dtype,
transpose_a=transpose_a,
transpose_b=transpose_b,
)
@autotvm.register_topi_schedule("batch_matmul.cuda")
def schedule_batch_matmul(cfg, outs):
"""Schedule for batch_matmul
Parameters
----------
outs: Array of Tensor
The computation graph description of batch_matmul
in the format of an array of tensors.
Returns
-------
s: Schedule
The computation schedule for the op.
"""
outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs
s = te.create_schedule([x.op for x in outs])
def _schedule(cfg, op):
C = op.output(0)
A, B = s[C].op.input_tensors
_, M, N = get_const_tuple(C.shape)
AA = s.cache_read(A, "shared", [C])
AL = s.cache_read(AA, "local", [C])
BB = s.cache_read(B, "shared", [C])
BL = s.cache_read(BB, "local", [C])
CC = s.cache_write(C, "local")
if op not in s.outputs:
s[C].compute_inline()
C = s.outputs[0].output(0)
b, y, x = s[C].op.axis
(k,) = s[CC].op.reduce_axis
cfg.define_split("tile_y", y, num_outputs=3)
cfg.define_split("tile_x", x, num_outputs=3)
cfg.define_split("tile_k", k, num_outputs=2)
cfg.define_knob("auto_unroll_max_step", [8, 16, 32, 64])
target = tvm.target.Target.current()
if target.kind.name in ["nvptx", "rocm"]:
# llvm-based backends cannot do non-explicit unrolling
cfg.define_knob("unroll_explicit", [1])
else:
cfg.define_knob("unroll_explicit", [0, 1])
if cfg.is_fallback:
y_bn = get_max_power2_factor(M, 64)
x_bn = get_max_power2_factor(N, 64)
y_nthreads = min(y_bn, 8)
x_nthreads = min(x_bn, 8)
cfg["tile_x"] = SplitEntity([-1, x_nthreads, x_bn // x_nthreads])
cfg["tile_y"] = SplitEntity([-1, y_nthreads, y_bn // y_nthreads])
cfg["tile_k"] = SplitEntity([-1, 8])
cfg["auto_unroll_max_step"] = OtherOptionEntity(16)
by, ty, yi = cfg["tile_y"].apply(s, C, y)
bx, tx, xi = cfg["tile_x"].apply(s, C, x)
thread_x = te.thread_axis("threadIdx.x")
thread_y = te.thread_axis("threadIdx.y")
s[C].reorder(b, by, bx, ty, tx, yi, xi)
s[C].bind(b, te.thread_axis("blockIdx.z"))
s[C].bind(by, te.thread_axis("blockIdx.y"))
s[C].bind(bx, te.thread_axis("blockIdx.x"))
s[C].bind(ty, thread_y)
s[C].bind(tx, thread_x)
s[C].pragma(yi, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val)
s[C].pragma(yi, "unroll_explicit", cfg["unroll_explicit"].val)
s[CC].compute_at(s[C], tx)
_, yi, xi = s[CC].op.axis
ko, ki = cfg["tile_k"].apply(s, CC, k)
s[CC].reorder(ko, ki, yi, xi)
s[CC].pragma(ki, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val)
s[CC].pragma(ki, "unroll_explicit", cfg["unroll_explicit"].val)
s[AA].compute_at(s[CC], ko)
s[AL].compute_at(s[CC], ki)
s[BB].compute_at(s[CC], ko)
s[BL].compute_at(s[CC], ki)
_, y, k = s[AA].op.axis
ty, yi = s[AA].split(y, nparts=cfg["tile_y"].size[1])
tx, ki = s[AA].split(k, nparts=cfg["tile_x"].size[1])
s[AA].reorder(ty, tx, yi, ki)
s[AA].bind(ty, thread_y)
s[AA].bind(tx, thread_x)
s[AA].pragma(yi, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val)
s[AA].pragma(yi, "unroll_explicit", cfg["unroll_explicit"].val)
_, x, k = s[BB].op.axis
ty, xi = s[BB].split(x, nparts=cfg["tile_y"].size[1])
tx, ki = s[BB].split(k, nparts=cfg["tile_x"].size[1])
s[BB].bind(ty, thread_y)
s[BB].bind(tx, thread_x)
s[BB].reorder(ty, tx, xi, ki)
s[BB].pragma(xi, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val)
s[BB].pragma(xi, "unroll_explicit", cfg["unroll_explicit"].val)
def _callback(op):
if "batch_matmul" in op.tag:
_schedule(cfg, op)
traverse_inline(s, outs[0].op, _callback)
return s
@autotvm.register_topi_compute("batch_matmul_cublas.cuda")
def batch_matmul_cublas(
cfg, x, y, out_shape=None, out_dtype=None, transpose_a=False, transpose_b=True
):
"""Compute batch matrix multiplication of `x` and `y`.
Both `x` and `y` can be transposed. For legacy reason, we use NT format
(transpose_a=False, transpose_b=True) by default.
Parameters
----------
cfg : ConfigSpace
Autotvm tuning space config file.
x : tvm.te.Tensor
3-D with shape [batch, M, K] or [batch, K, M].
y : tvm.te.Tensor
3-D with shape [batch, K, N] or [batch, N, K].
out_shape : List[Optional]
Explicit intended output shape of the computation. Can be useful in cases
with dynamic input shapes.
out_dtype : Optional[str]
Specifies the output data type for mixed precision batch matmul.
transpose_a : Optional[bool] = False
Whether the first tensor is in transposed format.
transpose_b : Optional[bool] = True
Whether the second tensor is in transposed format.
Returns
-------
output : tvm.te.Tensor
3-D with shape [batch, M, N]
"""
if transpose_a:
b, k, m = get_const_tuple(x.shape)
else:
b, m, k = get_const_tuple(x.shape)
if transpose_b:
b, n, k = get_const_tuple(y.shape)
else:
b, k, n = get_const_tuple(y.shape)
if all([isinstance(s, int) for s in [b, m, n, k]]):
cfg.add_flop(b * m * k * n * 2)
return cublas.batch_matmul(x, y, transa=transpose_a, transb=transpose_b)
@autotvm.register_topi_schedule("batch_matmul_cublas.cuda")
def schedule_batch_matmul_cublas(_, outs):
"""Schedule batch_matmul operator using CUBLAS"""
return generic.schedule_extern(outs)
@autotvm.register_topi_compute("batch_matmul_int8.cuda")
def batch_matmul_int8(
cfg, x, y, out_shape=None, out_dtype=None, transpose_a=False, transpose_b=True
):
"""Batch Matmul operator for int8 on CUDA.
Parameters
----------
cfg : ConfigSpace
Autotvm tuning space config file.
x : tvm.te.Tensor
3-D with shape [batch, M, K] or [batch, K, M].
y : tvm.te.Tensor
3-D with shape [batch, K, N] or [batch, N, K].
out_shape : List[Optional]
Explicit intended output shape of the computation. Can be useful in cases
with dynamic input shapes.
out_dtype : Optional[str]
Specifies the output data type for mixed precision batch matmul.
transpose_a : Optional[bool] = False
Whether the first tensor is in transposed format.
transpose_b : Optional[bool] = True
Whether the second tensor is in transposed format.
Returns
-------
output : tvm.te.Tensor
3-D with shape [batch, M, N]
"""
del out_shape
# TODO(jcf94): Deal with different transpose combinations
assert not transpose_a and transpose_b
if out_dtype is None:
out_dtype = x.dtype
x_shape = get_const_tuple(x.shape)
y_shape = get_const_tuple(y.shape)
assert len(x_shape) == 3 and len(y_shape) == 3, "only support 3-dim batch_matmul"
XB, M, XK = x.shape
YB, N, YK = y.shape
assert XB == YB or XB == 1 or YB == 1, "batch dimension doesn't match"
assert XK == YK, "shapes of x and y is inconsistent"
nB = tvm.te.max(XB, YB)
nK = ((XK + 3) // 4) * 4
reduce_k = te.reduce_axis((0, nK), name="k")
# pad for _dp4a vectorize
pad_x = te.compute(
(XB, M, nK),
lambda b, i, j: tvm.te.if_then_else(
j >= XK, tvm.runtime.convert(0).astype(x.dtype), x[b, i, j]
),
)
pad_y = te.compute(
(YB, N, nK),
lambda b, i, j: tvm.te.if_then_else(
j >= YK, tvm.runtime.convert(0).astype(y.dtype), y[b, i, j]
),
)
out = te.compute(
(nB, M, N),
lambda b, i, j: te.sum(
pad_x[b if XB != 1 else 0, i, reduce_k].astype(out_dtype)
* pad_y[b if YB != 1 else 0, j, reduce_k].astype(out_dtype),
axis=[reduce_k],
),
tag="batch_matmul_int8",
)
cfg.add_flop(XB * M * N * nK * 2)
return out
@autotvm.register_topi_schedule("batch_matmul_int8.cuda")
def schedule_batch_matmul_int8(cfg, outs):
"""Batch Matmul schedule for int8 on CUDA"""
outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs
s = te.create_schedule([x.op for x in outs])
def _callback(op):
if "batch_matmul_int8" in op.tag:
_schedule_batch_matmul_int8(cfg, s, op.output(0))
traverse_inline(s, outs[0].op, _callback)
return s
_dp4a = dp4a("shared", "shared", "local")
def _schedule_batch_matmul_int8(cfg, s, output):
input_x, input_y = s[output].op.input_tensors
B, M, K = get_const_tuple(input_x.shape)
_, N, _ = get_const_tuple(input_y.shape)
k_factor = 4
assert K % k_factor == 0, "Input dimension must divide {}".format(k_factor)
if K % 16 == 0:
k_factor = 16
cfg.define_split("tile_f", B, num_outputs=4)
cfg.define_split("tile_m", M, num_outputs=4)
cfg.define_split("tile_n", N, num_outputs=4)
cfg.define_split("tile_k", K // k_factor, num_outputs=2)
cfg.define_knob("auto_unroll_max_step", [0, 256, 512, 1024])
batch_matmul_op = s.outputs[0]
s[input_x].compute_inline()
s[input_y].compute_inline()
x_cache = s.cache_read(input_x, "shared", [batch_matmul_op])
y_cache = s.cache_read(input_y, "shared", [batch_matmul_op])
batch_matmul_cache = s.cache_write(batch_matmul_op.output(0), "local")
# tile reduce axis
ko = batch_matmul_cache.op.reduce_axis[0]
ko, ki = s[batch_matmul_cache].split(ko, factor=4)
ko, kt = cfg["tile_k"].apply(s, batch_matmul_cache, ko)
# dp4a tensorize
s[batch_matmul_cache].tensorize(ki, _dp4a)
# tile axis
f, m, n = batch_matmul_op.axis
kernel_scope, f = s[batch_matmul_op].split(f, nparts=1)
bf, vf, tf, fi = cfg["tile_f"].apply(s, batch_matmul_op, f)
bm, vm, tm, mi = cfg["tile_m"].apply(s, batch_matmul_op, m)
bn, vn, tn, ni = cfg["tile_n"].apply(s, batch_matmul_op, n)
s[batch_matmul_op].reorder(bf, bm, bn, vf, vm, vn, tf, tm, tn, fi, mi, ni)
# bind axis
s[batch_matmul_op].bind(bf, tvm.te.thread_axis("blockIdx.z"))
s[batch_matmul_op].bind(bm, tvm.te.thread_axis("blockIdx.y"))
s[batch_matmul_op].bind(bn, tvm.te.thread_axis("blockIdx.x"))
s[batch_matmul_op].bind(vf, tvm.te.thread_axis("vthread"))
s[batch_matmul_op].bind(vm, tvm.te.thread_axis("vthread"))
s[batch_matmul_op].bind(vn, tvm.te.thread_axis("vthread"))
s[batch_matmul_op].bind(tf, tvm.te.thread_axis("threadIdx.z"))
s[batch_matmul_op].bind(tm, tvm.te.thread_axis("threadIdx.y"))
s[batch_matmul_op].bind(tn, tvm.te.thread_axis("threadIdx.x"))
# cache compute at
s[batch_matmul_cache].compute_at(s[batch_matmul_op], tn)
fo, mo, no = batch_matmul_cache.op.axis[:3]
s[batch_matmul_cache].reorder(ko, kt, fo, mo, no, ki)
# for load in [splited_x_op, splited_y_op]
for load in [x_cache, y_cache]:
s[load].compute_at(s[batch_matmul_cache], ko)
outer, inner = s[load].split(s[load].op.axis[-1], factor=k_factor)
s[load].vectorize(inner)
fused = s[load].op.axis[:-1] + [outer]
fused = s[load].fuse(*fused)
fused, tx = s[load].split(fused, factor=cfg["tile_n"].size[2])
fused, ty = s[load].split(fused, factor=cfg["tile_m"].size[2])
fused, tz = s[load].split(fused, factor=cfg["tile_f"].size[2])
s[load].bind(tz, tvm.te.thread_axis("threadIdx.z"))
s[load].bind(ty, tvm.te.thread_axis("threadIdx.y"))
s[load].bind(tx, tvm.te.thread_axis("threadIdx.x"))
# max unroll
s[batch_matmul_op].pragma(kernel_scope, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val)
s[batch_matmul_op].pragma(kernel_scope, "unroll_explicit", False)
return s
|
"""
Django settings for app project.
Generated by 'django-admin startproject' using Django 2.1.15.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3r*=!7l=avc=^y9j$zndxnu%6zsqtup*5n%c3mei0b-)4$2rwr'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'link_shortener'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
|
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_CANCELABLE_TASK_H_
#define V8_CANCELABLE_TASK_H_
#include <map>
#include "include/v8-platform.h"
#include "src/base/atomic-utils.h"
#include "src/base/macros.h"
#include "src/base/platform/condition-variable.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
class Cancelable;
class Isolate;
// Keeps track of cancelable tasks. It is possible to register and remove tasks
// from any fore- and background task/thread.
class V8_EXPORT_PRIVATE CancelableTaskManager {
public:
CancelableTaskManager();
// Registers a new cancelable {task}. Returns the unique {id} of the task that
// can be used to try to abort a task by calling {Abort}.
// Must not be called after CancelAndWait.
uint32_t Register(Cancelable* task);
// Try to abort running a task identified by {id}. The possible outcomes are:
// (1) The task is already finished running or was canceled before and
// thus has been removed from the manager.
// (2) The task is currently running and cannot be canceled anymore.
// (3) The task is not yet running (or finished) so it is canceled and
// removed.
//
enum TryAbortResult { kTaskRemoved, kTaskRunning, kTaskAborted };
TryAbortResult TryAbort(uint32_t id);
// Cancels all remaining registered tasks and waits for tasks that are
// already running. This disallows subsequent Register calls.
void CancelAndWait();
private:
// Only called by {Cancelable} destructor. The task is done with executing,
// but needs to be removed.
void RemoveFinishedTask(uint32_t id);
// To mitigate the ABA problem, the api refers to tasks through an id.
uint32_t task_id_counter_;
// A set of cancelable tasks that are currently registered.
std::map<uint32_t, Cancelable*> cancelable_tasks_;
// Mutex and condition variable enabling concurrent register and removing, as
// well as waiting for background tasks on {CancelAndWait}.
base::ConditionVariable cancelable_tasks_barrier_;
base::Mutex mutex_;
bool canceled_;
friend class Cancelable;
DISALLOW_COPY_AND_ASSIGN(CancelableTaskManager);
};
class V8_EXPORT_PRIVATE Cancelable {
public:
explicit Cancelable(CancelableTaskManager* parent);
virtual ~Cancelable();
// Never invoke after handing over the task to the platform! The reason is
// that {Cancelable} is used in combination with {v8::Task} and handed to
// a platform. This step transfers ownership to the platform, which destroys
// the task after running it. Since the exact time is not known, we cannot
// access the object after handing it to a platform.
uint32_t id() { return id_; }
protected:
bool TryRun() { return status_.TrySetValue(kWaiting, kRunning); }
bool IsRunning() { return status_.Value() == kRunning; }
intptr_t CancelAttempts() { return cancel_counter_.Value(); }
private:
// Identifies the state a cancelable task is in:
// |kWaiting|: The task is scheduled and waiting to be executed. {TryRun} will
// succeed.
// |kCanceled|: The task has been canceled. {TryRun} will fail.
// |kRunning|: The task is currently running and cannot be canceled anymore.
enum Status {
kWaiting,
kCanceled,
kRunning,
};
// Use {CancelableTaskManager} to abort a task that has not yet been
// executed.
bool Cancel() {
if (status_.TrySetValue(kWaiting, kCanceled)) {
return true;
}
cancel_counter_.Increment(1);
return false;
}
CancelableTaskManager* parent_;
base::AtomicValue<Status> status_;
uint32_t id_;
// The counter is incremented for failing tries to cancel a task. This can be
// used by the task itself as an indication how often external entities tried
// to abort it.
base::AtomicNumber<intptr_t> cancel_counter_;
friend class CancelableTaskManager;
DISALLOW_COPY_AND_ASSIGN(Cancelable);
};
// Multiple inheritance can be used because Task is a pure interface.
class CancelableTask : public Cancelable, public Task {
public:
explicit CancelableTask(Isolate* isolate);
// Task overrides.
void Run() final {
if (TryRun()) {
RunInternal();
}
}
virtual void RunInternal() = 0;
Isolate* isolate() { return isolate_; }
private:
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(CancelableTask);
};
// Multiple inheritance can be used because IdleTask is a pure interface.
class CancelableIdleTask : public Cancelable, public IdleTask {
public:
explicit CancelableIdleTask(Isolate* isolate);
// IdleTask overrides.
void Run(double deadline_in_seconds) final {
if (TryRun()) {
RunInternal(deadline_in_seconds);
}
}
virtual void RunInternal(double deadline_in_seconds) = 0;
Isolate* isolate() { return isolate_; }
private:
Isolate* isolate_;
DISALLOW_COPY_AND_ASSIGN(CancelableIdleTask);
};
} // namespace internal
} // namespace v8
#endif // V8_CANCELABLE_TASK_H_
|
"""Support for sending data to an Influx database."""
from __future__ import annotations
from contextlib import suppress
from dataclasses import dataclass
import logging
import math
import queue
import threading
import time
from typing import Any, Callable
from influxdb import InfluxDBClient, exceptions
from influxdb_client import InfluxDBClient as InfluxDBClientV2
from influxdb_client.client.write_api import ASYNCHRONOUS, SYNCHRONOUS
from influxdb_client.rest import ApiException
import requests.exceptions
import urllib3.exceptions
import voluptuous as vol
from homeassistant.const import (
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TIMEOUT,
CONF_UNIT_OF_MEASUREMENT,
CONF_URL,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from homeassistant.helpers import event as event_helper, state as state_helper
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_values import EntityValues
from homeassistant.helpers.entityfilter import (
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
convert_include_exclude_filter,
)
from .const import (
API_VERSION_2,
BATCH_BUFFER_SIZE,
BATCH_TIMEOUT,
CATCHING_UP_MESSAGE,
CLIENT_ERROR_V1,
CLIENT_ERROR_V2,
CODE_INVALID_INPUTS,
COMPONENT_CONFIG_SCHEMA_CONNECTION,
CONF_API_VERSION,
CONF_BUCKET,
CONF_COMPONENT_CONFIG,
CONF_COMPONENT_CONFIG_DOMAIN,
CONF_COMPONENT_CONFIG_GLOB,
CONF_DB_NAME,
CONF_DEFAULT_MEASUREMENT,
CONF_HOST,
CONF_IGNORE_ATTRIBUTES,
CONF_MEASUREMENT_ATTR,
CONF_ORG,
CONF_OVERRIDE_MEASUREMENT,
CONF_PASSWORD,
CONF_PATH,
CONF_PORT,
CONF_PRECISION,
CONF_RETRY_COUNT,
CONF_SSL,
CONF_SSL_CA_CERT,
CONF_TAGS,
CONF_TAGS_ATTRIBUTES,
CONF_TOKEN,
CONF_USERNAME,
CONF_VERIFY_SSL,
CONNECTION_ERROR,
DEFAULT_API_VERSION,
DEFAULT_HOST_V2,
DEFAULT_MEASUREMENT_ATTR,
DEFAULT_SSL_V2,
DOMAIN,
EVENT_NEW_STATE,
INFLUX_CONF_FIELDS,
INFLUX_CONF_MEASUREMENT,
INFLUX_CONF_ORG,
INFLUX_CONF_STATE,
INFLUX_CONF_TAGS,
INFLUX_CONF_TIME,
INFLUX_CONF_VALUE,
QUERY_ERROR,
QUEUE_BACKLOG_SECONDS,
RE_DECIMAL,
RE_DIGIT_TAIL,
RESUMED_MESSAGE,
RETRY_DELAY,
RETRY_INTERVAL,
RETRY_MESSAGE,
TEST_QUERY_V1,
TEST_QUERY_V2,
TIMEOUT,
WRITE_ERROR,
WROTE_MESSAGE,
)
_LOGGER = logging.getLogger(__name__)
def create_influx_url(conf: dict) -> dict:
"""Build URL used from config inputs and default when necessary."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_SSL not in conf:
conf[CONF_SSL] = DEFAULT_SSL_V2
if CONF_HOST not in conf:
conf[CONF_HOST] = DEFAULT_HOST_V2
url = conf[CONF_HOST]
if conf[CONF_SSL]:
url = f"https://{url}"
else:
url = f"http://{url}"
if CONF_PORT in conf:
url = f"{url}:{conf[CONF_PORT]}"
if CONF_PATH in conf:
url = f"{url}{conf[CONF_PATH]}"
conf[CONF_URL] = url
return conf
def validate_version_specific_config(conf: dict) -> dict:
"""Ensure correct config fields are provided based on API version used."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_TOKEN not in conf:
raise vol.Invalid(
f"{CONF_TOKEN} and {CONF_BUCKET} are required when {CONF_API_VERSION} is {API_VERSION_2}"
)
if CONF_USERNAME in conf:
raise vol.Invalid(
f"{CONF_USERNAME} and {CONF_PASSWORD} are only allowed when {CONF_API_VERSION} is {DEFAULT_API_VERSION}"
)
else:
if CONF_TOKEN in conf:
raise vol.Invalid(
f"{CONF_TOKEN} and {CONF_BUCKET} are only allowed when {CONF_API_VERSION} is {API_VERSION_2}"
)
return conf
_CUSTOMIZE_ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string,
vol.Optional(CONF_IGNORE_ATTRIBUTES): vol.All(cv.ensure_list, [cv.string]),
}
)
_INFLUX_BASE_SCHEMA = INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
{
vol.Optional(CONF_RETRY_COUNT, default=0): cv.positive_int,
vol.Optional(CONF_DEFAULT_MEASUREMENT): cv.string,
vol.Optional(CONF_MEASUREMENT_ATTR, default=DEFAULT_MEASUREMENT_ATTR): vol.In(
["unit_of_measurement", "domain__device_class", "entity_id"]
),
vol.Optional(CONF_OVERRIDE_MEASUREMENT): cv.string,
vol.Optional(CONF_TAGS, default={}): vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_TAGS_ATTRIBUTES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_IGNORE_ATTRIBUTES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_COMPONENT_CONFIG, default={}): vol.Schema(
{cv.entity_id: _CUSTOMIZE_ENTITY_SCHEMA}
),
vol.Optional(CONF_COMPONENT_CONFIG_GLOB, default={}): vol.Schema(
{cv.string: _CUSTOMIZE_ENTITY_SCHEMA}
),
vol.Optional(CONF_COMPONENT_CONFIG_DOMAIN, default={}): vol.Schema(
{cv.string: _CUSTOMIZE_ENTITY_SCHEMA}
),
}
)
INFLUX_SCHEMA = vol.All(
_INFLUX_BASE_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION),
validate_version_specific_config,
create_influx_url,
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: INFLUX_SCHEMA},
extra=vol.ALLOW_EXTRA,
)
def _generate_event_to_json(conf: dict) -> Callable[[dict], str]:
"""Build event to json converter and add to config."""
entity_filter = convert_include_exclude_filter(conf)
tags = conf.get(CONF_TAGS)
tags_attributes = conf.get(CONF_TAGS_ATTRIBUTES)
default_measurement = conf.get(CONF_DEFAULT_MEASUREMENT)
measurement_attr = conf.get(CONF_MEASUREMENT_ATTR)
override_measurement = conf.get(CONF_OVERRIDE_MEASUREMENT)
global_ignore_attributes = set(conf[CONF_IGNORE_ATTRIBUTES])
component_config = EntityValues(
conf[CONF_COMPONENT_CONFIG],
conf[CONF_COMPONENT_CONFIG_DOMAIN],
conf[CONF_COMPONENT_CONFIG_GLOB],
)
def event_to_json(event: dict) -> str:
"""Convert event into json in format Influx expects."""
state = event.data.get(EVENT_NEW_STATE)
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
or not entity_filter(state.entity_id)
):
return
try:
_include_state = _include_value = False
_state_as_value = float(state.state)
_include_value = True
except ValueError:
try:
_state_as_value = float(state_helper.state_as_number(state))
_include_state = _include_value = True
except ValueError:
_include_state = True
include_uom = True
include_dc = True
entity_config = component_config.get(state.entity_id)
measurement = entity_config.get(CONF_OVERRIDE_MEASUREMENT)
if measurement in (None, ""):
if override_measurement:
measurement = override_measurement
else:
if measurement_attr == "entity_id":
measurement = state.entity_id
elif measurement_attr == "domain__device_class":
device_class = state.attributes.get("device_class")
if device_class is None:
# This entity doesn't have a device_class set, use only domain
measurement = state.domain
else:
measurement = f"{state.domain}__{device_class}"
include_dc = False
else:
measurement = state.attributes.get(measurement_attr)
if measurement in (None, ""):
if default_measurement:
measurement = default_measurement
else:
measurement = state.entity_id
else:
include_uom = measurement_attr != "unit_of_measurement"
json = {
INFLUX_CONF_MEASUREMENT: measurement,
INFLUX_CONF_TAGS: {
CONF_DOMAIN: state.domain,
CONF_ENTITY_ID: state.object_id,
},
INFLUX_CONF_TIME: event.time_fired,
INFLUX_CONF_FIELDS: {},
}
if _include_state:
json[INFLUX_CONF_FIELDS][INFLUX_CONF_STATE] = state.state
if _include_value:
json[INFLUX_CONF_FIELDS][INFLUX_CONF_VALUE] = _state_as_value
ignore_attributes = set(entity_config.get(CONF_IGNORE_ATTRIBUTES, []))
ignore_attributes.update(global_ignore_attributes)
for key, value in state.attributes.items():
if key in tags_attributes:
json[INFLUX_CONF_TAGS][key] = value
elif (
(key != CONF_UNIT_OF_MEASUREMENT or include_uom)
and (key != "device_class" or include_dc)
and key not in ignore_attributes
):
# If the key is already in fields
if key in json[INFLUX_CONF_FIELDS]:
key = f"{key}_"
# Prevent column data errors in influxDB.
# For each value we try to cast it as float
# But if we can not do it we store the value
# as string add "_str" postfix to the field key
try:
json[INFLUX_CONF_FIELDS][key] = float(value)
except (ValueError, TypeError):
new_key = f"{key}_str"
new_value = str(value)
json[INFLUX_CONF_FIELDS][new_key] = new_value
if RE_DIGIT_TAIL.match(new_value):
json[INFLUX_CONF_FIELDS][key] = float(
RE_DECIMAL.sub("", new_value)
)
# Infinity and NaN are not valid floats in InfluxDB
with suppress(KeyError, TypeError):
if not math.isfinite(json[INFLUX_CONF_FIELDS][key]):
del json[INFLUX_CONF_FIELDS][key]
json[INFLUX_CONF_TAGS].update(tags)
return json
return event_to_json
@dataclass
class InfluxClient:
"""An InfluxDB client wrapper for V1 or V2."""
data_repositories: list[str]
write: Callable[[str], None]
query: Callable[[str, str], list[Any]]
close: Callable[[], None]
def get_influx_connection(conf, test_write=False, test_read=False): # noqa: C901
"""Create the correct influx connection for the API version."""
kwargs = {
CONF_TIMEOUT: TIMEOUT,
}
precision = conf.get(CONF_PRECISION)
if conf[CONF_API_VERSION] == API_VERSION_2:
kwargs[CONF_URL] = conf[CONF_URL]
kwargs[CONF_TOKEN] = conf[CONF_TOKEN]
kwargs[INFLUX_CONF_ORG] = conf[CONF_ORG]
kwargs[CONF_VERIFY_SSL] = conf[CONF_VERIFY_SSL]
if CONF_SSL_CA_CERT in conf:
kwargs[CONF_SSL_CA_CERT] = conf[CONF_SSL_CA_CERT]
bucket = conf.get(CONF_BUCKET)
influx = InfluxDBClientV2(**kwargs)
query_api = influx.query_api()
initial_write_mode = SYNCHRONOUS if test_write else ASYNCHRONOUS
write_api = influx.write_api(write_options=initial_write_mode)
def write_v2(json):
"""Write data to V2 influx."""
data = {"bucket": bucket, "record": json}
if precision is not None:
data["write_precision"] = precision
try:
write_api.write(**data)
except (urllib3.exceptions.HTTPError, OSError) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except ApiException as exc:
if exc.status == CODE_INVALID_INPUTS:
raise ValueError(WRITE_ERROR % (json, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V2 % exc) from exc
def query_v2(query, _=None):
"""Query V2 influx."""
try:
return query_api.query(query)
except (urllib3.exceptions.HTTPError, OSError) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except ApiException as exc:
if exc.status == CODE_INVALID_INPUTS:
raise ValueError(QUERY_ERROR % (query, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V2 % exc) from exc
def close_v2():
"""Close V2 influx client."""
influx.close()
buckets = []
if test_write:
# Try to write b"" to influx. If we can connect and creds are valid
# Then invalid inputs is returned. Anything else is a broken config
with suppress(ValueError):
write_v2(b"")
write_api = influx.write_api(write_options=ASYNCHRONOUS)
if test_read:
tables = query_v2(TEST_QUERY_V2)
if tables and tables[0].records:
buckets = [bucket.values["name"] for bucket in tables[0].records]
else:
buckets = []
return InfluxClient(buckets, write_v2, query_v2, close_v2)
# Else it's a V1 client
if CONF_SSL_CA_CERT in conf and conf[CONF_VERIFY_SSL]:
kwargs[CONF_VERIFY_SSL] = conf[CONF_SSL_CA_CERT]
else:
kwargs[CONF_VERIFY_SSL] = conf[CONF_VERIFY_SSL]
if CONF_DB_NAME in conf:
kwargs[CONF_DB_NAME] = conf[CONF_DB_NAME]
if CONF_USERNAME in conf:
kwargs[CONF_USERNAME] = conf[CONF_USERNAME]
if CONF_PASSWORD in conf:
kwargs[CONF_PASSWORD] = conf[CONF_PASSWORD]
if CONF_HOST in conf:
kwargs[CONF_HOST] = conf[CONF_HOST]
if CONF_PATH in conf:
kwargs[CONF_PATH] = conf[CONF_PATH]
if CONF_PORT in conf:
kwargs[CONF_PORT] = conf[CONF_PORT]
if CONF_SSL in conf:
kwargs[CONF_SSL] = conf[CONF_SSL]
influx = InfluxDBClient(**kwargs)
def write_v1(json):
"""Write data to V1 influx."""
try:
influx.write_points(json, time_precision=precision)
except (
requests.exceptions.RequestException,
exceptions.InfluxDBServerError,
OSError,
) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except exceptions.InfluxDBClientError as exc:
if exc.code == CODE_INVALID_INPUTS:
raise ValueError(WRITE_ERROR % (json, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V1 % exc) from exc
def query_v1(query, database=None):
"""Query V1 influx."""
try:
return list(influx.query(query, database=database).get_points())
except (
requests.exceptions.RequestException,
exceptions.InfluxDBServerError,
OSError,
) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except exceptions.InfluxDBClientError as exc:
if exc.code == CODE_INVALID_INPUTS:
raise ValueError(QUERY_ERROR % (query, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V1 % exc) from exc
def close_v1():
"""Close the V1 Influx client."""
influx.close()
databases = []
if test_write:
write_v1([])
if test_read:
databases = [db["name"] for db in query_v1(TEST_QUERY_V1)]
return InfluxClient(databases, write_v1, query_v1, close_v1)
def setup(hass, config):
"""Set up the InfluxDB component."""
conf = config[DOMAIN]
try:
influx = get_influx_connection(conf, test_write=True)
except ConnectionError as exc:
_LOGGER.error(RETRY_MESSAGE, exc)
event_helper.call_later(hass, RETRY_INTERVAL, lambda _: setup(hass, config))
return True
event_to_json = _generate_event_to_json(conf)
max_tries = conf.get(CONF_RETRY_COUNT)
instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries)
instance.start()
def shutdown(event):
"""Shut down the thread."""
instance.queue.put(None)
instance.join()
influx.close()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
return True
class InfluxThread(threading.Thread):
"""A threaded event handler class."""
def __init__(self, hass, influx, event_to_json, max_tries):
"""Initialize the listener."""
threading.Thread.__init__(self, name=DOMAIN)
self.queue = queue.Queue()
self.influx = influx
self.event_to_json = event_to_json
self.max_tries = max_tries
self.write_errors = 0
self.shutdown = False
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener)
@callback
def _event_listener(self, event):
"""Listen for new messages on the bus and queue them for Influx."""
item = (time.monotonic(), event)
self.queue.put(item)
@staticmethod
def batch_timeout():
"""Return number of seconds to wait for more events."""
return BATCH_TIMEOUT
def get_events_json(self):
"""Return a batch of events formatted for writing."""
queue_seconds = QUEUE_BACKLOG_SECONDS + self.max_tries * RETRY_DELAY
count = 0
json = []
dropped = 0
with suppress(queue.Empty):
while len(json) < BATCH_BUFFER_SIZE and not self.shutdown:
timeout = None if count == 0 else self.batch_timeout()
item = self.queue.get(timeout=timeout)
count += 1
if item is None:
self.shutdown = True
else:
timestamp, event = item
age = time.monotonic() - timestamp
if age < queue_seconds:
event_json = self.event_to_json(event)
if event_json:
json.append(event_json)
else:
dropped += 1
if dropped:
_LOGGER.warning(CATCHING_UP_MESSAGE, dropped)
return count, json
def write_to_influxdb(self, json):
"""Write preprocessed events to influxdb, with retry."""
for retry in range(self.max_tries + 1):
try:
self.influx.write(json)
if self.write_errors:
_LOGGER.error(RESUMED_MESSAGE, self.write_errors)
self.write_errors = 0
_LOGGER.debug(WROTE_MESSAGE, len(json))
break
except ValueError as err:
_LOGGER.error(err)
break
except ConnectionError as err:
if retry < self.max_tries:
time.sleep(RETRY_DELAY)
else:
if not self.write_errors:
_LOGGER.error(err)
self.write_errors += len(json)
def run(self):
"""Process incoming events."""
while not self.shutdown:
count, json = self.get_events_json()
if json:
self.write_to_influxdb(json)
for _ in range(count):
self.queue.task_done()
def block_till_done(self):
"""Block till all events processed."""
self.queue.join()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## Project: Simple4All - October 2014 - www.simple4all.org
## Contact: Oliver Watts - owatts@staffmail.ed.ac.uk
## This is a lightweight replacement for the script previously with this name and now
## called train_static_vsm_gensim.py. It is more easily broken by large corpora, but is
## fine for several millions of words and settings of w and rank in the range we generally
## use.
##
## Differences include:
## -- sklearn's or numpy's SVD used instead of Gensim's incremental SVD
## -- only unseen method called B (absolute count threshold) supported
## -- removed usewords option -- always use the whole corpus
## -- read_text_corpus rewritten as generator read_text_corpus_to_triplets
## -- handle lines independently with padding_token
## -- option to normalise left and right count vectors
from __future__ import print_function
import sys
import os
import codecs
from argparse import ArgumentParser
import numpy
from numpy.linalg import svd as numpy_svd
from sklearn.decomposition import TruncatedSVD
## A couple of constants:
global padding_token, unseen_token, mean_token
padding_token = u'_END_'
unseen_token = u'_UNSEEN_'
mean_token = u'_MEAN_'
def main_work():
# ======== Get stuff from command line ==========
a = ArgumentParser()
a.add_argument('-corpus', required=True, help= "UTF-8 text file with space-delimited tokens")
a.add_argument('-output', required=True, help= "A text file of output features")
a.add_argument('-w', required=True, default="train", type=int, \
help="Number of feature words to use as context")
a.add_argument('-rank', required=True, type=int, \
help= "Number of dimensions of transformed space")
a.add_argument('-unseen', required=True, type=int, \
help="Tokens with count <= unseen are used to build an _UNSEEN_ model")
a.add_argument('-svd', default='randomized', \
help="SVD algorithm to use: randomized, arpack or exact")
a.add_argument('-norm_counts', default=False, action='store_true', \
help="Normalise left and right cooc vectors")
a.add_argument('-no_mean', dest='append_mean_vector', default=True, action='store_false', \
help="Suppress appending of mean vector")
opts = a.parse_args()
train_static_vsm(opts.corpus, opts.output, opts.w, opts.rank, opts.unseen, opts.svd, \
opts.norm_counts, append_mean_vector=opts.append_mean_vector)
# ================================================
def read_text_corpus_to_triplets(fname, nwords=float('inf')):
with codecs.open(fname, 'r', encoding='utf-8') as f:
for line in f:
line = line.strip("\n ")
line = line.split(" ")
for triplet in zip([padding_token] + line[:-1], line, line[1:] + [padding_token]):
yield triplet
def train_static_vsm(textfile_in, stored_model, w, rank, unseen_threshold, svd_type, \
norm_counts, append_mean_vector=True):
assert unseen_threshold > 0,'Unseen method A no longer supported'
## ------------------------------------------------
## 0) CHECK OUTPUT LOCATION
## ------------------------------------------------
## Check we will be able to make output where specified:
working,tail = os.path.split(stored_model)
if not os.path.isdir(working):
sys.exit('Path %s does not exist'%(working))
## ------------------------------------------------
## 1) FIRST PASS -- COUNTING
## ------------------------------------------------
print('Count types...')
## get target words, sorted by descending frequency
## count wordtypes:
counts={}
for (left_neighbour,word,right_neighbour) in read_text_corpus_to_triplets(textfile_in):
if word not in counts:
counts[word] = 0
counts[word] += 1
## sort by freq:
count_list = [(count,word) for (word,count) in counts.items()]
count_list.sort()
count_list.reverse()
## get the first w words we'll count coocurrances for ('feature words' in Biemann)
if len(count_list) < w:
w = len(count_list)
feature_words = [word for (count, word) in count_list[:w-2]] + [padding_token, unseen_token]
target_words = [unseen_token] + [word for (count, word) in count_list if count > unseen_threshold]
## Always count cooc with padding_token, but never use it as target word:
counts[padding_token] = unseen_threshold + 1
## ------------------------------------------------
## 2) SECOND PASS -- MAKE COOCC. MATRIX C
## ------------------------------------------------
print('Assemble cooccurance matrix...')
n = len(feature_words)
m = len(target_words)
C = numpy.zeros((m, n*2))
## make maps from surface forms of token to indexes:
left_feature_words_dict = dict(zip(feature_words, range(len(feature_words))))
right_feature_words_dict = dict(zip(feature_words, [val + len(feature_words) \
for val in range(len(feature_words))] ))
target_words_dict = dict(zip(target_words, range(len(target_words))))
#print target_words_dict
for (left,centre,right) in read_text_corpus_to_triplets(textfile_in):
## Handle unseen token rewriting:
if counts[centre] <= unseen_threshold:
centre = unseen_token
if counts[left] <= unseen_threshold:
left = unseen_token
if counts[right] <= unseen_threshold:
right = unseen_token
if left in left_feature_words_dict:
C[target_words_dict[centre], left_feature_words_dict[left]] += 1
if right in right_feature_words_dict:
C[target_words_dict[centre], right_feature_words_dict[right]] += 1
if norm_counts:
## threshold to avoid divide by zero errors (-> NaNs):
left_sums = numpy.sum(C[:, :n], axis=1).reshape(m,1)
left_sums[left_sums < 1.0] = 1.0 ## use mask to threshold
C[:, :n] /= left_sums
right_sums = numpy.sum(C[:, n:], axis=1).reshape(m,1)
right_sums[right_sums < 1.0] = 1.0 ## use mask to threshold
C[:, n:] /= right_sums
# tots = numpy.sum(C, axis=1)
## ------------------------------------------------
## 3) FACTORISATION
## ------------------------------------------------
print('Factorise cooccurance matrix...')
if svd_type == 'exact':
U,D,V = numpy_svd(C, full_matrices=False)
D = numpy.diag(D[:rank])
transformed_C = numpy.dot(U[:,:rank], D)
elif svd_type in ['randomized', 'arpack']:
svd = TruncatedSVD(n_components=rank, algorithm='randomized', random_state=999)
svd.fit(C)
transformed_C = svd.transform(C)
'''
## reconstruction -- singular values seem to be multiplied into U or V:
rec = numpy.dot(transformed_C, svd.components_)
print rec[:3, :]
print C[:3, :]
'''
else:
sys.exit('Unknown SVD type: %s'%(svd_type))
if append_mean_vector:
mean_vec = numpy.mean(transformed_C, axis=0)
transformed_C = numpy.vstack([transformed_C, mean_vec])
target_words = target_words + [mean_token]
## ------------------------------------------------
## 4) WRITE OUTPUT:
## ------------------------------------------------
stored_model += '.table' ## TODO: historical -- clean this up.
print('Write output to %s'%(stored_model))
f = codecs.open(stored_model, 'w')
for (lemma, feats) in zip(target_words, transformed_C):
line = [lemma] + [str(val) for val in feats]
line = ' '.join(line) + '\n'
f.write(line)
f.close()
if __name__=="__main__":
main_work()
|
#pragma once
#include "easlib.h"
#include "EASEmain.h"
#include "EASEents.h"
#include "EASElex.h"
#include "Synrule.h"
#include "Dict.h"
#include "EASEpars.h"
#include "EASEgen.h"
#define _SCRIPTELEM
struct SCRIPTELEM {
struct eInstHdr hdr;
struct DICTENTRY* DICTREF;
int SCRIPTCODE;
int SCRIPTACTION;
int TKNFORM;
int SCRIPTPREFIX;
struct SCRIPTELEM* S_SCRIPTELEMS;
};
#define Z_SCRIPTELEM sizeof(SCRIPTELEM)
void Script_sets();
void d_SCRIPTELEM(struct SCRIPTELEM*& EPTR);
struct SCRIPTELEM* c_SCRIPTELEM();
void SCRIPTREAD(struct STMT* ASTMT);
void SCRIPTWRITE(struct STMT* ASTMT);
void STMTSCRIPT(struct STMT* ASTMT);
void SCRIPTLOOP();
Etxt* SCRIPTLBL(Etxt* PREFIX, int NUM);
void SETNODEPTRS(struct NODE* ANODE);
void SCRIPT(int SCRIPTNO);
void READSCRIPT(int SCRIPTNO);
void READSCRIPTS(Etxt* FILENAME);
struct Script_sys {
struct eInstHdr hdr;
struct SCRIPTELEM* SCRIPTELEM;
struct SCRIPTELEM** SCRIPTS;
struct NODE** NODEPTRS;
Etxt** STRPTRS;
int* IPTRS;
int LBL;
int LV;
int SCRIPTLEVEL;
int CURRLOOPDEPTH;
struct STMT* LASTCTLSTMT;
struct STMT* LASTDOSTMT;
};
extern Script_sys* Script;
void Script_init();
|
#PyPoll
import os
import csv
pyPoll = []
candidates = []
candidate_ctr = 0
total_ctr = 0
votesList = []
candidateDict = {}
#Summary Table Stats
totalVotes = 0
candidateList = []
percentVotes = 0
winner = ' '
<<<<<<< HEAD
#csvpath = os.path.join('c:\users\TriciaToffey\desktop\githubs\python_challenge\pypoll\Resources','election_data.csv')
=======
#csvpath = os.path.join('c:\users\TriciaToffey\desktop\githubs\python_challenge\pybank\Resources','budget_data.csv')
>>>>>>> 4cacbaf70d845485ede9529d30c9ba35e5787238
os.chdir('./PyPoll/Resources')
with open('election_data.csv', encoding="ISO 8859-1") as csv_file:
csv_reader = csv.reader(csv_file, delimiter = ',')
csv_header = next(csv_file)
for row in csv_reader:
pyPoll.append(row)
totalVotes = len(pyPoll)
for row in pyPoll:
candidates.append(row[2])
#CANDIDATE NAMES
def unique_function(list1):
unique_list = []
for name in candidates:
if name not in unique_list:
unique_list.append(name)
return unique_list
candidateList = unique_function(candidates)
#print(candidateList)
for candidate in candidateList:
candidate_ctr = 0
for row in pyPoll:
if row[2] in candidate:
candidate_ctr += 1
percentVotes = round(((candidate_ctr / totalVotes) * 100) , 3)
percentVotes = "{:.3%}".format(percentVotes/100)
votesList.append({'Name': candidate, 'Votes':candidate_ctr, 'Percent': percentVotes})
#print(votesList)
def myFunc(e):
return e["Votes"]
votesList.sort(reverse=True, key=myFunc)
#print(votesList)
#ANALYSIS
line1 = 'Election Results\n'
lines = ['---------------------------- \n',
'Total Votes: ' + str(totalVotes) +'\n',
'---------------------------- \n',
votesList[0]['Name'] + ': ' + votesList[0]['Percent'] + '% (' + str(votesList[0]['Votes']) +')\n',
votesList[1]['Name'] + ': ' + votesList[1]['Percent'] + '% (' + str(votesList[1]['Votes']) +')\n',
votesList[2]['Name'] + ': ' + votesList[2]['Percent'] + '% (' + str(votesList[2]['Votes']) +')\n',
votesList[3]['Name'] + ': ' + votesList[3]['Percent'] + '% (' + str(votesList[3]['Votes']) +')\n',
'---------------------------- \n',
'Winner: ' + votesList[0]['Name'] + '\n',
'---------------------------- \n']
my_file = open('../Analysis/Analysis2.txt', 'w')
my_file.write(line1)
my_file.writelines(lines)
my_file.close()
#print("Writing Complete\n\n")
print(open('../Analysis/Analysis2.txt').read())
|
from artemis.experiments.experiment_record import *
import logging
logging.getLogger(__name__).warning('The module artemis.fileman.experiment_record is deprecated and will eventually be removed. Import artemis.experiments.experiment_record instead.')
|
/* eslint: off */
const fs = require('fs');
const parse = require('css').parse;
const re = /\.flag.flag-/;
const IN_FILE = './flags.css';
const OUT_FILE = './flags.json';
function filterRules(flag) {
const isRule = flag.type && flag.type === 'rule';
const isFlag = flag.selectors && re.test(flag.selectors[0]);
return isRule && isFlag;
}
function filterBackgroundPostion(declaration) {
const isDeclaration = declaration.type && declaration.type === 'declaration';
const isPostion = declaration.property && declaration.property === 'background-position';
return isDeclaration && isPostion && declaration.value;
}
function mapCountryAndValues(d) {
const code = d.selectors[0].replace(re, '');
const values = d.declarations.filter(filterBackgroundPostion).map(dd => dd.value.split(' '));
const [x, y] = Array.prototype.concat.apply([], values);
return { code, x, y };
}
function toFile(str, cb) {
fs.writeFile(OUT_FILE, str, 'utf8', cb);
}
function onErr(err) {
return err && process.stderr.write(err);
}
fs.readFile('./flags.css', 'utf8', (err, file) => {
if (err) return onErr(err);
const css = parse(file);
const flags = css.stylesheet.rules.filter(filterRules).map(mapCountryAndValues);
return toFile(JSON.stringify(flags), (e) => {
if (err) return onErr(e);
return process.stdout.write(`converted ${IN_FILE}, to ${OUT_FILE}.`);
});
});
|
/*******************************************************************************
*
* Copyright (c) 2013 - 2016, Freescale Semiconductor, Inc.
* Copyright 2016-2017 NXP
*
* SPDX-License-Identifier: BSD-3-Clause
*
*
****************************************************************************//*!
*
* @brief Parallel beta IPD controller with and without anti-wind-up protection
*
*******************************************************************************/
#ifndef _GFLIB_CTRLBETAIPDP_A32_H_
#define _GFLIB_CTRLBETAIPDP_A32_H_
#if defined(__cplusplus)
extern "C" {
#endif
/*******************************************************************************
* Includes
*******************************************************************************/
#include "mlib.h"
#include "gflib_types.h"
/*******************************************************************************
* Macros
*******************************************************************************/
#define GFLIB_CtrlBetaIPDpAW_F16_C(f16InReq, f16In, f16InErrD, pbStopIntegFlag, psParam) \
GFLIB_CtrlBetaIPDpAW_F16_FC(f16InReq, f16In, f16InErrD, pbStopIntegFlag, psParam)
#define GFLIB_CtrlBetaIPDpAWInit_F16_Ci(f16InitVal, psParam) \
GFLIB_CtrlBetaIPDpAWInit_F16_FCi(f16InitVal, psParam)
/*******************************************************************************
* Types
*******************************************************************************/
typedef struct
{
acc32_t a32PGain; /* Proportional Gain */
acc32_t a32IGain; /* Integral Gain */
acc32_t a32DGain; /* Derivative Gain */
frac32_t f32IAccK_1; /* State variable output at step k-1 */
frac16_t f16InErrK_1; /* State variable input error at step k-1 */
frac16_t f16UpperLim; /* Upper Limit of the controller */
frac16_t f16LowerLim; /* Lower Limit of the controller */
frac16_t f16InErrDK_1; /* State variable input error at step k-1 */
frac16_t f16BetaGain; /* Beta Gain for restricting the overshot */
bool_t bLimFlag; /* Limitation flag, if set to 1, the controller
output has reached either the UpperLimit or LowerLimit */
} GFLIB_CTRL_BETA_IPD_P_AW_T_A32;
/*******************************************************************************
* Exported function prototypes
*******************************************************************************/
extern frac16_t GFLIB_CtrlBetaIPDpAW_F16_FC(frac16_t f16InReq, frac16_t f16In, frac16_t f16InErrD,
const bool_t *pbStopIntegFlag,
GFLIB_CTRL_BETA_IPD_P_AW_T_A32 *psParam);
/***************************************************************************//*!
* @brief The function initializes the actual values of CtrlBetaIPDpAWInit controller.
*
* @params in frac16_t f16InitVal - Initial state value for Integrating part
* ptr GFLIB_CTRL_BETA_IPD_P_AW_T_A32 *psParam - Pointer to controller structure
*
* @return N/A
*
*******************************************************************************/
static inline void GFLIB_CtrlBetaIPDpAWInit_F16_FCi(frac16_t f16InitVal,
GFLIB_CTRL_BETA_IPD_P_AW_T_A32 *psParam)
{
psParam->f32IAccK_1 = (frac32_t)(f16InitVal << 16);
psParam->f16InErrK_1 = (frac16_t)0;
psParam->f16InErrDK_1= (frac16_t)0;
}
#if defined(__cplusplus)
}
#endif
#endif /* _GFLIB_CTRLBETAIPDP_A32_H_ */
|
/* Copyright Joyent, Inc. and other Node contributors. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
/* Caveat emptor: this file deviates from the libuv convention of returning
* negated errno codes. Most uv_fs_*() functions map directly to the system
* call of the same name. For more complex wrappers, it's easier to just
* return -1 with errno set. The dispatcher in uv__fs_work() takes care of
* getting the errno to the right place (req->result or as the return value.)
*/
#include "uv.h"
#include "internal.h"
#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <pthread.h>
#include <unistd.h>
#include <fcntl.h>
#include <utime.h>
#include <poll.h>
#if defined(__DragonFly__) || \
defined(__FreeBSD__) || \
defined(__OpenBSD__) || \
defined(__NetBSD__)
# define HAVE_PREADV 1
#elif defined(__linux__)
# include <linux/version.h>
# if defined(__GLIBC_PREREQ)
# if LINUX_VERSION_CODE >= KERNEL_VERSION(2,6,30) && \
__GLIBC_PREREQ(2,10)
# define HAVE_PREADV 1
# else
# define HAVE_PREADV 0
# endif
# else
# define HAVE_PREADV 0
# endif
#else
# define HAVE_PREADV 0
#endif
#if defined(__linux__) || defined(__sun)
# include <sys/sendfile.h>
#elif defined(__APPLE__) || defined(__FreeBSD__)
# include <sys/socket.h>
#endif
#if HAVE_PREADV || defined(__APPLE__)
# include <sys/uio.h>
#endif
#define INIT(type) \
do { \
uv__req_init((loop), (req), UV_FS); \
(req)->fs_type = UV_FS_ ## type; \
(req)->result = 0; \
(req)->ptr = NULL; \
(req)->loop = loop; \
(req)->path = NULL; \
(req)->new_path = NULL; \
(req)->cb = (cb); \
} \
while (0)
#define PATH \
do { \
(req)->path = strdup(path); \
if ((req)->path == NULL) \
return -ENOMEM; \
} \
while (0)
#define PATH2 \
do { \
size_t path_len; \
size_t new_path_len; \
path_len = strlen((path)) + 1; \
new_path_len = strlen((new_path)) + 1; \
(req)->path = malloc(path_len + new_path_len); \
if ((req)->path == NULL) \
return -ENOMEM; \
(req)->new_path = (req)->path + path_len; \
memcpy((void*) (req)->path, (path), path_len); \
memcpy((void*) (req)->new_path, (new_path), new_path_len); \
} \
while (0)
#define POST \
do { \
if ((cb) != NULL) { \
uv__work_submit((loop), &(req)->work_req, uv__fs_work, uv__fs_done); \
return 0; \
} \
else { \
uv__fs_work(&(req)->work_req); \
uv__fs_done(&(req)->work_req, 0); \
return (req)->result; \
} \
} \
while (0)
static ssize_t uv__fs_fdatasync(uv_fs_t* req) {
#if defined(__linux__) || defined(__sun) || defined(__NetBSD__)
return fdatasync(req->file);
#elif defined(__APPLE__) && defined(F_FULLFSYNC)
return fcntl(req->file, F_FULLFSYNC);
#else
return fsync(req->file);
#endif
}
static ssize_t uv__fs_futime(uv_fs_t* req) {
#if defined(__linux__)
/* utimesat() has nanosecond resolution but we stick to microseconds
* for the sake of consistency with other platforms.
*/
static int no_utimesat;
struct timespec ts[2];
struct timeval tv[2];
char path[sizeof("/proc/self/fd/") + 3 * sizeof(int)];
int r;
if (no_utimesat)
goto skip;
ts[0].tv_sec = req->atime;
ts[0].tv_nsec = (unsigned long)(req->atime * 1000000) % 1000000 * 1000;
ts[1].tv_sec = req->mtime;
ts[1].tv_nsec = (unsigned long)(req->mtime * 1000000) % 1000000 * 1000;
r = uv__utimesat(req->file, NULL, ts, 0);
if (r == 0)
return r;
if (errno != ENOSYS)
return r;
no_utimesat = 1;
skip:
tv[0].tv_sec = req->atime;
tv[0].tv_usec = (unsigned long)(req->atime * 1000000) % 1000000;
tv[1].tv_sec = req->mtime;
tv[1].tv_usec = (unsigned long)(req->mtime * 1000000) % 1000000;
snprintf(path, sizeof(path), "/proc/self/fd/%d", (int) req->file);
r = utimes(path, tv);
if (r == 0)
return r;
switch (errno) {
case ENOENT:
if (fcntl(req->file, F_GETFL) == -1 && errno == EBADF)
break;
/* Fall through. */
case EACCES:
case ENOTDIR:
errno = ENOSYS;
break;
}
return r;
#elif defined(__APPLE__) \
|| defined(__DragonFly__) \
|| defined(__FreeBSD__) \
|| defined(__NetBSD__) \
|| defined(__OpenBSD__) \
|| defined(__sun)
struct timeval tv[2];
tv[0].tv_sec = req->atime;
tv[0].tv_usec = (unsigned long)(req->atime * 1000000) % 1000000;
tv[1].tv_sec = req->mtime;
tv[1].tv_usec = (unsigned long)(req->mtime * 1000000) % 1000000;
# if defined(__sun)
return futimesat(req->file, NULL, tv);
# else
return futimes(req->file, tv);
# endif
#else
errno = ENOSYS;
return -1;
#endif
}
static ssize_t uv__fs_mkdtemp(uv_fs_t* req) {
return mkdtemp((char*) req->path) ? 0 : -1;
}
static ssize_t uv__fs_read(uv_fs_t* req) {
ssize_t result;
#if defined(_AIX)
struct stat buf;
if(fstat(req->file, &buf))
return -1;
if(S_ISDIR(buf.st_mode)) {
errno = EISDIR;
return -1;
}
#endif /* defined(_AIX) */
if (req->off < 0) {
if (req->nbufs == 1)
result = read(req->file, req->bufs[0].base, req->bufs[0].len);
else
result = readv(req->file, (struct iovec*) req->bufs, req->nbufs);
} else {
if (req->nbufs == 1) {
result = pread(req->file, req->bufs[0].base, req->bufs[0].len, req->off);
goto done;
}
#if HAVE_PREADV
result = preadv(req->file, (struct iovec*) req->bufs, req->nbufs, req->off);
#else
# if defined(__linux__)
static int no_preadv;
if (no_preadv)
# endif
{
off_t nread;
size_t index;
# if defined(__linux__)
retry:
# endif
nread = 0;
index = 0;
result = 1;
do {
if (req->bufs[index].len > 0) {
result = pread(req->file,
req->bufs[index].base,
req->bufs[index].len,
req->off + nread);
if (result > 0)
nread += result;
}
index++;
} while (index < req->nbufs && result > 0);
if (nread > 0)
result = nread;
}
# if defined(__linux__)
else {
result = uv__preadv(req->file,
(struct iovec*)req->bufs,
req->nbufs,
req->off);
if (result == -1 && errno == ENOSYS) {
no_preadv = 1;
goto retry;
}
}
# endif
#endif
}
done:
if (req->bufs != req->bufsml)
free(req->bufs);
return result;
}
#if defined(__OpenBSD__) || (defined(__APPLE__) && !defined(MAC_OS_X_VERSION_10_8))
static int uv__fs_scandir_filter(uv__dirent_t* dent) {
#else
static int uv__fs_scandir_filter(const uv__dirent_t* dent) {
#endif
return strcmp(dent->d_name, ".") != 0 && strcmp(dent->d_name, "..") != 0;
}
static ssize_t uv__fs_scandir(uv_fs_t* req) {
uv__dirent_t **dents;
int saved_errno;
int n;
dents = NULL;
n = scandir(req->path, &dents, uv__fs_scandir_filter, alphasort);
/* NOTE: We will use nbufs as an index field */
req->nbufs = 0;
if (n == 0)
goto out; /* osx still needs to deallocate some memory */
else if (n == -1)
return n;
req->ptr = dents;
return n;
out:
saved_errno = errno;
if (dents != NULL) {
int i;
for (i = 0; i < n; i++)
free(dents[i]);
free(dents);
}
errno = saved_errno;
req->ptr = NULL;
return n;
}
static ssize_t uv__fs_readlink(uv_fs_t* req) {
ssize_t len;
char* buf;
len = pathconf(req->path, _PC_PATH_MAX);
if (len == -1) {
#if defined(PATH_MAX)
len = PATH_MAX;
#else
len = 4096;
#endif
}
buf = malloc(len + 1);
if (buf == NULL) {
errno = ENOMEM;
return -1;
}
len = readlink(req->path, buf, len);
if (len == -1) {
free(buf);
return -1;
}
buf[len] = '\0';
req->ptr = buf;
return 0;
}
static ssize_t uv__fs_sendfile_emul(uv_fs_t* req) {
struct pollfd pfd;
int use_pread;
off_t offset;
ssize_t nsent;
ssize_t nread;
ssize_t nwritten;
size_t buflen;
size_t len;
ssize_t n;
int in_fd;
int out_fd;
char buf[8192];
len = req->bufsml[0].len;
in_fd = req->flags;
out_fd = req->file;
offset = req->off;
use_pread = 1;
/* Here are the rules regarding errors:
*
* 1. Read errors are reported only if nsent==0, otherwise we return nsent.
* The user needs to know that some data has already been sent, to stop
* them from sending it twice.
*
* 2. Write errors are always reported. Write errors are bad because they
* mean data loss: we've read data but now we can't write it out.
*
* We try to use pread() and fall back to regular read() if the source fd
* doesn't support positional reads, for example when it's a pipe fd.
*
* If we get EAGAIN when writing to the target fd, we poll() on it until
* it becomes writable again.
*
* FIXME: If we get a write error when use_pread==1, it should be safe to
* return the number of sent bytes instead of an error because pread()
* is, in theory, idempotent. However, special files in /dev or /proc
* may support pread() but not necessarily return the same data on
* successive reads.
*
* FIXME: There is no way now to signal that we managed to send *some* data
* before a write error.
*/
for (nsent = 0; (size_t) nsent < len; ) {
buflen = len - nsent;
if (buflen > sizeof(buf))
buflen = sizeof(buf);
do
if (use_pread)
nread = pread(in_fd, buf, buflen, offset);
else
nread = read(in_fd, buf, buflen);
while (nread == -1 && errno == EINTR);
if (nread == 0)
goto out;
if (nread == -1) {
if (use_pread && nsent == 0 && (errno == EIO || errno == ESPIPE)) {
use_pread = 0;
continue;
}
if (nsent == 0)
nsent = -1;
goto out;
}
for (nwritten = 0; nwritten < nread; ) {
do
n = write(out_fd, buf + nwritten, nread - nwritten);
while (n == -1 && errno == EINTR);
if (n != -1) {
nwritten += n;
continue;
}
if (errno != EAGAIN && errno != EWOULDBLOCK) {
nsent = -1;
goto out;
}
pfd.fd = out_fd;
pfd.events = POLLOUT;
pfd.revents = 0;
do
n = poll(&pfd, 1, -1);
while (n == -1 && errno == EINTR);
if (n == -1 || (pfd.revents & ~POLLOUT) != 0) {
errno = EIO;
nsent = -1;
goto out;
}
}
offset += nread;
nsent += nread;
}
out:
if (nsent != -1)
req->off = offset;
return nsent;
}
static ssize_t uv__fs_sendfile(uv_fs_t* req) {
int in_fd;
int out_fd;
in_fd = req->flags;
out_fd = req->file;
#if defined(__linux__) || defined(__sun)
{
off_t off;
ssize_t r;
off = req->off;
r = sendfile(out_fd, in_fd, &off, req->bufsml[0].len);
/* sendfile() on SunOS returns EINVAL if the target fd is not a socket but
* it still writes out data. Fortunately, we can detect it by checking if
* the offset has been updated.
*/
if (r != -1 || off > req->off) {
r = off - req->off;
req->off = off;
return r;
}
if (errno == EINVAL ||
errno == EIO ||
errno == ENOTSOCK ||
errno == EXDEV) {
errno = 0;
return uv__fs_sendfile_emul(req);
}
return -1;
}
#elif defined(__FreeBSD__) || defined(__APPLE__)
{
off_t len;
ssize_t r;
/* sendfile() on FreeBSD and Darwin returns EAGAIN if the target fd is in
* non-blocking mode and not all data could be written. If a non-zero
* number of bytes have been sent, we don't consider it an error.
*/
#if defined(__FreeBSD__)
len = 0;
r = sendfile(in_fd, out_fd, req->off, req->bufsml[0].len, NULL, &len, 0);
#else
/* The darwin sendfile takes len as an input for the length to send,
* so make sure to initialize it with the caller's value. */
len = req->bufsml[0].len;
r = sendfile(in_fd, out_fd, req->off, &len, NULL, 0);
#endif
if (r != -1 || len != 0) {
req->off += len;
return (ssize_t) len;
}
if (errno == EINVAL ||
errno == EIO ||
errno == ENOTSOCK ||
errno == EXDEV) {
errno = 0;
return uv__fs_sendfile_emul(req);
}
return -1;
}
#else
/* Squelch compiler warnings. */
(void) &in_fd;
(void) &out_fd;
return uv__fs_sendfile_emul(req);
#endif
}
static ssize_t uv__fs_utime(uv_fs_t* req) {
struct utimbuf buf;
buf.actime = req->atime;
buf.modtime = req->mtime;
return utime(req->path, &buf); /* TODO use utimes() where available */
}
static ssize_t uv__fs_write(uv_fs_t* req) {
ssize_t r;
/* Serialize writes on OS X, concurrent write() and pwrite() calls result in
* data loss. We can't use a per-file descriptor lock, the descriptor may be
* a dup().
*/
#if defined(__APPLE__)
static pthread_mutex_t lock = PTHREAD_MUTEX_INITIALIZER;
pthread_mutex_lock(&lock);
#endif
if (req->off < 0) {
if (req->nbufs == 1)
r = write(req->file, req->bufs[0].base, req->bufs[0].len);
else
r = writev(req->file, (struct iovec*) req->bufs, req->nbufs);
} else {
if (req->nbufs == 1) {
r = pwrite(req->file, req->bufs[0].base, req->bufs[0].len, req->off);
goto done;
}
#if HAVE_PREADV
r = pwritev(req->file, (struct iovec*) req->bufs, req->nbufs, req->off);
#else
# if defined(__linux__)
static int no_pwritev;
if (no_pwritev)
# endif
{
off_t written;
size_t index;
# if defined(__linux__)
retry:
# endif
written = 0;
index = 0;
r = 0;
do {
if (req->bufs[index].len > 0) {
r = pwrite(req->file,
req->bufs[index].base,
req->bufs[index].len,
req->off + written);
if (r > 0)
written += r;
}
index++;
} while (index < req->nbufs && r >= 0);
if (written > 0)
r = written;
}
# if defined(__linux__)
else {
r = uv__pwritev(req->file,
(struct iovec*) req->bufs,
req->nbufs,
req->off);
if (r == -1 && errno == ENOSYS) {
no_pwritev = 1;
goto retry;
}
}
# endif
#endif
}
done:
#if defined(__APPLE__)
pthread_mutex_unlock(&lock);
#endif
if (req->bufs != req->bufsml)
free(req->bufs);
return r;
}
static void uv__to_stat(struct stat* src, uv_stat_t* dst) {
dst->st_dev = src->st_dev;
dst->st_mode = src->st_mode;
dst->st_nlink = src->st_nlink;
dst->st_uid = src->st_uid;
dst->st_gid = src->st_gid;
dst->st_rdev = src->st_rdev;
dst->st_ino = src->st_ino;
dst->st_size = src->st_size;
dst->st_blksize = src->st_blksize;
dst->st_blocks = src->st_blocks;
#if defined(__APPLE__)
dst->st_atim.tv_sec = src->st_atimespec.tv_sec;
dst->st_atim.tv_nsec = src->st_atimespec.tv_nsec;
dst->st_mtim.tv_sec = src->st_mtimespec.tv_sec;
dst->st_mtim.tv_nsec = src->st_mtimespec.tv_nsec;
dst->st_ctim.tv_sec = src->st_ctimespec.tv_sec;
dst->st_ctim.tv_nsec = src->st_ctimespec.tv_nsec;
dst->st_birthtim.tv_sec = src->st_birthtimespec.tv_sec;
dst->st_birthtim.tv_nsec = src->st_birthtimespec.tv_nsec;
dst->st_flags = src->st_flags;
dst->st_gen = src->st_gen;
#elif !defined(_AIX) && \
(defined(_BSD_SOURCE) || defined(_SVID_SOURCE) || defined(_XOPEN_SOURCE))
dst->st_atim.tv_sec = src->st_atim.tv_sec;
dst->st_atim.tv_nsec = src->st_atim.tv_nsec;
dst->st_mtim.tv_sec = src->st_mtim.tv_sec;
dst->st_mtim.tv_nsec = src->st_mtim.tv_nsec;
dst->st_ctim.tv_sec = src->st_ctim.tv_sec;
dst->st_ctim.tv_nsec = src->st_ctim.tv_nsec;
# if defined(__DragonFly__) || \
defined(__FreeBSD__) || \
defined(__OpenBSD__) || \
defined(__NetBSD__)
dst->st_birthtim.tv_sec = src->st_birthtim.tv_sec;
dst->st_birthtim.tv_nsec = src->st_birthtim.tv_nsec;
dst->st_flags = src->st_flags;
dst->st_gen = src->st_gen;
# else
dst->st_birthtim.tv_sec = src->st_ctim.tv_sec;
dst->st_birthtim.tv_nsec = src->st_ctim.tv_nsec;
dst->st_flags = 0;
dst->st_gen = 0;
# endif
#else
dst->st_atim.tv_sec = src->st_atime;
dst->st_atim.tv_nsec = 0;
dst->st_mtim.tv_sec = src->st_mtime;
dst->st_mtim.tv_nsec = 0;
dst->st_ctim.tv_sec = src->st_ctime;
dst->st_ctim.tv_nsec = 0;
dst->st_birthtim.tv_sec = src->st_ctime;
dst->st_birthtim.tv_nsec = 0;
dst->st_flags = 0;
dst->st_gen = 0;
#endif
}
static int uv__fs_stat(const char *path, uv_stat_t *buf) {
struct stat pbuf;
int ret;
ret = stat(path, &pbuf);
uv__to_stat(&pbuf, buf);
return ret;
}
static int uv__fs_lstat(const char *path, uv_stat_t *buf) {
struct stat pbuf;
int ret;
ret = lstat(path, &pbuf);
uv__to_stat(&pbuf, buf);
return ret;
}
static int uv__fs_fstat(int fd, uv_stat_t *buf) {
struct stat pbuf;
int ret;
ret = fstat(fd, &pbuf);
uv__to_stat(&pbuf, buf);
return ret;
}
static void uv__fs_work(struct uv__work* w) {
int retry_on_eintr;
uv_fs_t* req;
ssize_t r;
#ifdef O_CLOEXEC
static int no_cloexec_support;
#endif /* O_CLOEXEC */
req = container_of(w, uv_fs_t, work_req);
retry_on_eintr = !(req->fs_type == UV_FS_CLOSE);
do {
errno = 0;
#define X(type, action) \
case UV_FS_ ## type: \
r = action; \
break;
switch (req->fs_type) {
X(ACCESS, access(req->path, req->flags));
X(CHMOD, chmod(req->path, req->mode));
X(CHOWN, chown(req->path, req->uid, req->gid));
X(CLOSE, close(req->file));
X(FCHMOD, fchmod(req->file, req->mode));
X(FCHOWN, fchown(req->file, req->uid, req->gid));
X(FDATASYNC, uv__fs_fdatasync(req));
X(FSTAT, uv__fs_fstat(req->file, &req->statbuf));
X(FSYNC, fsync(req->file));
X(FTRUNCATE, ftruncate(req->file, req->off));
X(FUTIME, uv__fs_futime(req));
X(LSTAT, uv__fs_lstat(req->path, &req->statbuf));
X(LINK, link(req->path, req->new_path));
X(MKDIR, mkdir(req->path, req->mode));
X(MKDTEMP, uv__fs_mkdtemp(req));
X(READ, uv__fs_read(req));
X(SCANDIR, uv__fs_scandir(req));
X(READLINK, uv__fs_readlink(req));
X(RENAME, rename(req->path, req->new_path));
X(RMDIR, rmdir(req->path));
X(SENDFILE, uv__fs_sendfile(req));
X(STAT, uv__fs_stat(req->path, &req->statbuf));
X(SYMLINK, symlink(req->path, req->new_path));
X(UNLINK, unlink(req->path));
X(UTIME, uv__fs_utime(req));
X(WRITE, uv__fs_write(req));
case UV_FS_OPEN:
#ifdef O_CLOEXEC
/* Try O_CLOEXEC before entering locks */
if (!no_cloexec_support) {
r = open(req->path, req->flags | O_CLOEXEC, req->mode);
if (r >= 0)
break;
if (errno != EINVAL)
break;
no_cloexec_support = 1;
}
#endif /* O_CLOEXEC */
if (req->cb != NULL)
uv_rwlock_rdlock(&req->loop->cloexec_lock);
r = open(req->path, req->flags, req->mode);
/*
* In case of failure `uv__cloexec` will leave error in `errno`,
* so it is enough to just set `r` to `-1`.
*/
if (r >= 0 && uv__cloexec(r, 1) != 0) {
r = uv__close(r);
if (r != 0 && r != -EINPROGRESS)
abort();
r = -1;
}
if (req->cb != NULL)
uv_rwlock_rdunlock(&req->loop->cloexec_lock);
break;
default: abort();
}
#undef X
}
while (r == -1 && errno == EINTR && retry_on_eintr);
if (r == -1)
req->result = -errno;
else
req->result = r;
if (r == 0 && (req->fs_type == UV_FS_STAT ||
req->fs_type == UV_FS_FSTAT ||
req->fs_type == UV_FS_LSTAT)) {
req->ptr = &req->statbuf;
}
}
static void uv__fs_done(struct uv__work* w, int status) {
uv_fs_t* req;
req = container_of(w, uv_fs_t, work_req);
uv__req_unregister(req->loop, req);
if (status == -ECANCELED) {
assert(req->result == 0);
req->result = -ECANCELED;
}
if (req->cb != NULL)
req->cb(req);
}
int uv_fs_access(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
int flags,
uv_fs_cb cb) {
INIT(ACCESS);
PATH;
req->flags = flags;
POST;
}
int uv_fs_chmod(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
int mode,
uv_fs_cb cb) {
INIT(CHMOD);
PATH;
req->mode = mode;
POST;
}
int uv_fs_chown(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
uv_uid_t uid,
uv_gid_t gid,
uv_fs_cb cb) {
INIT(CHOWN);
PATH;
req->uid = uid;
req->gid = gid;
POST;
}
int uv_fs_close(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb) {
INIT(CLOSE);
req->file = file;
POST;
}
int uv_fs_fchmod(uv_loop_t* loop,
uv_fs_t* req,
uv_file file,
int mode,
uv_fs_cb cb) {
INIT(FCHMOD);
req->file = file;
req->mode = mode;
POST;
}
int uv_fs_fchown(uv_loop_t* loop,
uv_fs_t* req,
uv_file file,
uv_uid_t uid,
uv_gid_t gid,
uv_fs_cb cb) {
INIT(FCHOWN);
req->file = file;
req->uid = uid;
req->gid = gid;
POST;
}
int uv_fs_fdatasync(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb) {
INIT(FDATASYNC);
req->file = file;
POST;
}
int uv_fs_fstat(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb) {
INIT(FSTAT);
req->file = file;
POST;
}
int uv_fs_fsync(uv_loop_t* loop, uv_fs_t* req, uv_file file, uv_fs_cb cb) {
INIT(FSYNC);
req->file = file;
POST;
}
int uv_fs_ftruncate(uv_loop_t* loop,
uv_fs_t* req,
uv_file file,
int64_t off,
uv_fs_cb cb) {
INIT(FTRUNCATE);
req->file = file;
req->off = off;
POST;
}
int uv_fs_futime(uv_loop_t* loop,
uv_fs_t* req,
uv_file file,
double atime,
double mtime,
uv_fs_cb cb) {
INIT(FUTIME);
req->file = file;
req->atime = atime;
req->mtime = mtime;
POST;
}
int uv_fs_lstat(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb) {
INIT(LSTAT);
PATH;
POST;
}
int uv_fs_link(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
const char* new_path,
uv_fs_cb cb) {
INIT(LINK);
PATH2;
POST;
}
int uv_fs_mkdir(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
int mode,
uv_fs_cb cb) {
INIT(MKDIR);
PATH;
req->mode = mode;
POST;
}
int uv_fs_mkdtemp(uv_loop_t* loop,
uv_fs_t* req,
const char* tpl,
uv_fs_cb cb) {
INIT(MKDTEMP);
req->path = strdup(tpl);
if (req->path == NULL)
return -ENOMEM;
POST;
}
int uv_fs_open(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
int flags,
int mode,
uv_fs_cb cb) {
INIT(OPEN);
PATH;
req->flags = flags;
req->mode = mode;
POST;
}
int uv_fs_read(uv_loop_t* loop, uv_fs_t* req,
uv_file file,
const uv_buf_t bufs[],
unsigned int nbufs,
int64_t off,
uv_fs_cb cb) {
INIT(READ);
req->file = file;
req->nbufs = nbufs;
req->bufs = req->bufsml;
if (nbufs > ARRAY_SIZE(req->bufsml))
req->bufs = malloc(nbufs * sizeof(*bufs));
if (req->bufs == NULL)
return -ENOMEM;
memcpy(req->bufs, bufs, nbufs * sizeof(*bufs));
req->off = off;
POST;
}
int uv_fs_scandir(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
int flags,
uv_fs_cb cb) {
INIT(SCANDIR);
PATH;
req->flags = flags;
POST;
}
int uv_fs_readlink(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
uv_fs_cb cb) {
INIT(READLINK);
PATH;
POST;
}
int uv_fs_rename(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
const char* new_path,
uv_fs_cb cb) {
INIT(RENAME);
PATH2;
POST;
}
int uv_fs_rmdir(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb) {
INIT(RMDIR);
PATH;
POST;
}
int uv_fs_sendfile(uv_loop_t* loop,
uv_fs_t* req,
uv_file out_fd,
uv_file in_fd,
int64_t off,
size_t len,
uv_fs_cb cb) {
INIT(SENDFILE);
req->flags = in_fd; /* hack */
req->file = out_fd;
req->off = off;
req->bufsml[0].len = len;
POST;
}
int uv_fs_stat(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb) {
INIT(STAT);
PATH;
POST;
}
int uv_fs_symlink(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
const char* new_path,
int flags,
uv_fs_cb cb) {
INIT(SYMLINK);
PATH2;
req->flags = flags;
POST;
}
int uv_fs_unlink(uv_loop_t* loop, uv_fs_t* req, const char* path, uv_fs_cb cb) {
INIT(UNLINK);
PATH;
POST;
}
int uv_fs_utime(uv_loop_t* loop,
uv_fs_t* req,
const char* path,
double atime,
double mtime,
uv_fs_cb cb) {
INIT(UTIME);
PATH;
req->atime = atime;
req->mtime = mtime;
POST;
}
int uv_fs_write(uv_loop_t* loop,
uv_fs_t* req,
uv_file file,
const uv_buf_t bufs[],
unsigned int nbufs,
int64_t off,
uv_fs_cb cb) {
INIT(WRITE);
req->file = file;
req->nbufs = nbufs;
req->bufs = req->bufsml;
if (nbufs > ARRAY_SIZE(req->bufsml))
req->bufs = malloc(nbufs * sizeof(*bufs));
if (req->bufs == NULL)
return -ENOMEM;
memcpy(req->bufs, bufs, nbufs * sizeof(*bufs));
req->off = off;
POST;
}
void uv_fs_req_cleanup(uv_fs_t* req) {
free((void*) req->path);
req->path = NULL;
req->new_path = NULL;
if (req->fs_type == UV_FS_SCANDIR && req->ptr != NULL)
uv__fs_scandir_cleanup(req);
if (req->ptr != &req->statbuf)
free(req->ptr);
req->ptr = NULL;
}
|
jQuery(document).ready(function(){ jQuery("#map1526815782435975717").simplegmaps({ MapOptions: { zoom: 13,scrollwheel: false,styles: [
{
"featureType": "administrative",
"elementType": "labels.text.fill",
"stylers": [
{
"color": "#444444"
}
]
},
{
"featureType": "landscape",
"elementType": "all",
"stylers": [
{
"color": "#f2f2f2"
}
]
},
{
"featureType": "poi",
"elementType": "all",
"stylers": [
{
"visibility": "off"
}
]
},
{
"featureType": "road",
"elementType": "all",
"stylers": [
{
"saturation": -100
},
{
"lightness": 45
}
]
},
{
"featureType": "road.highway",
"elementType": "all",
"stylers": [
{
"visibility": "simplified"
}
]
},
{
"featureType": "road.arterial",
"elementType": "labels.icon",
"stylers": [
{
"visibility": "off"
}
]
},
{
"featureType": "transit",
"elementType": "all",
"stylers": [
{
"visibility": "off"
}
]
},
{
"featureType": "water",
"elementType": "all",
"stylers": [
{
"color": "#46bcec"
},
{
"visibility": "on"
}
]
},
{
"featureType": "water",
"elementType": "geometry.fill",
"stylers": [
{
"color": "#2d488b"
}
]
}
], } }); });
|
// A script for correcting the indentation of HTML documents / fragments
code.style.html = hot.factory();
code.style.html.prototype.init = function(source)
{
var options = typeof arguments[1] !== 'undefined' ? arguments[1] : {};
options.lineLength = options.lineLength || 70;
options.softTabs = typeof options.softTabs !== 'undefined' ? options.softTabs : true;
options.tabWidth = options.tabWidth || 4;
options.compress = typeof options.compress !== 'undefined' ? options.compress : false;
if (isNaN(options.tabWidth))
{
options.tabWidth = 4;
}
this.hasEncodedEntity = function(source, counter)
{
var data = '';
for (var offset = 0; offset < source.length; offset++)
{
var character = source.charAt(counter + offset);
data += character;
if (character == ' ' && data.length == 2)
{
return false;
}
else if (character == ';')
{
var matches = data.match(/^\&\#?[A-Z|a-z|0-9]+\;/);
if (matches && matches.length)
{
return true;
}
else
{
return false;
}
}
}
return false;
};
this.splitDataIntoLines = function(data)
{
var maxCharacterLength = typeof arguments[1] != 'undefined' ? parseInt(arguments[1]) : 70;
var lastSpaceOffset = 0;
var spaceCounters = [];
for (var counter = 0, characterCounter = 0; counter < data.length; counter++)
{
var character = data.charAt(counter);
switch (character)
{
case ' ':
{
lastSpaceOffset = counter;
break;
}
}
if (characterCounter == maxCharacterLength)
{
spaceCounters.push(lastSpaceOffset);
characterCounter = 0;
}
characterCounter++;
}
$(spaceCounters).each(
function(key, spacePosition)
{
data = data.substring(0, spacePosition) + "\n" + data.substring(spacePosition + 1);
}
);
return data;
};
var isTag = false,
tag = '',
isTagType = false,
tagType = '',
items = [],
isData = false,
data = '';
// This long nest of booleans isn't used yet, but might be used in the future,
// if I decide I want more fine-grained control over the markup styling of
// HTML documents.
var is = {
html : false,
head : false,
meta : false,
link : false,
script : false,
noscript : false,
style : false,
title : false,
base : false,
body : false,
p : false,
div : false,
section : false,
nav : false,
article : false,
aside : false,
address : false,
main : false,
span : false,
a : false,
b : false,
i : false,
u : false,
strong : false,
em : false,
font : false,
center : false,
small : false,
sub : false,
sup : false,
ins : false,
del : false,
mark : false,
ruby : false,
rt : false,
rp : false,
bdi : false,
bdo : false,
s : false,
hr : false,
br : false,
wbr : false,
pre : false,
code : false,
command : false,
$var : false,
blockquote : false,
cite : false,
q : false,
header : false,
hgroup : false,
h1 : false,
h2 : false,
h3 : false,
h4 : false,
h5 : false,
h6 : false,
footer : false,
figure : false,
figcaption : false,
ul : false,
ol : false,
li : false,
dl : false,
dd : false,
dt : false,
dfn : false,
abbr : false,
data : false,
time : false,
block : false,
inlineBlock : false,
inline : false,
table : false,
colgroup : false,
col : false,
thead : false,
tr : false,
th : false,
tbody : false,
td : false,
tfoot : false,
caption : false,
samp : false,
kdb : false,
img : false,
iframe : false,
embed : false,
object : false,
param : false,
video : false,
audio : false,
source : false,
track : false,
canvas : false,
map : false,
area : false,
svg : false,
math : false,
fieldset : false,
legend : false,
label : false,
input : false,
button : false,
select : false,
datalist : false,
optgroup : false,
option : false,
textarea : false,
keygen : false,
output : false,
progress : false,
meter : false,
details : false,
summary : false,
menuitem : false,
menu : false
};
var html = {
html : {
type : 'block',
indentation : 0
},
head : {
type : 'block',
indentation : 1
},
meta : {
type : 'block',
indentation : 2
},
link : {
type : 'block',
indentation : 2
},
script : {
type : 'block',
indentation : 2
},
noscript : {
type : 'block',
indentation : 2
},
style : {
type : 'block',
indentation : 2
},
title : {
type : 'block',
indentation : 2
},
base : {
type : 'block',
indentation : 2
},
body : {
type : 'block',
indentation : 1
},
p : {
type : 'block',
indentation : 'auto'
},
div : {
type : 'block',
indentation : 'auto'
},
section : {
type : 'block',
indentation : 'auto'
},
nav : {
type : 'block',
indentation : 'auto'
},
article : {
type : 'block',
indentation : 'auto'
},
aside : {
type : 'block',
indentation : 'auto'
},
address : {
type : 'inline',
indentation : 0
},
main : {
type : 'block',
indentation : 'auto'
},
span : {
type : 'inline',
indentation : 0
},
a : {
type : 'inline',
indentation : 0
},
b : {
type : 'inline',
indentation : 0
},
i : {
type : 'inline',
indentation : 0
},
u : {
type : 'inline',
indentation : 0
},
strong : {
type : 'inline',
indentation : 0
},
em : {
type : 'inline',
indentation : 0
},
font : {
type : 'inline',
indentation : 0,
remove : true
},
center : {
type : 'inline',
indentation : 0,
remove : true
},
small : {
type : 'inline',
indentation : 0
},
sub : {
type : 'inline',
indentation : 0
},
sup : {
type : 'inline',
indentation : 0
},
ins : {
type : 'inline',
indentation : 0
},
del : {
type : 'inline',
indentation : 0
},
mark : {
type : 'inline',
indentation : 0
},
ruby : {
type : 'inline',
indentation : 0
},
rt : {
type : 'inline',
indentation : 0
},
rp : {
type : 'inline',
indentation : 0
},
bdi : {
type : 'inline',
indentation : 0
},
bdo : {
type : 'inline',
indentation : 0
},
s : {
type : 'inline',
indentation : 0
},
hr : {
type : 'block',
indentation : 'auto'
},
br : {
type : 'block',
indentation : 'auto'
},
wbr : {
type : 'block',
indentation : 'auto'
},
pre : {
type : 'block',
indentation : 'auto'
},
code : {
type : 'block',
indentation : 'auto'
},
command : {
type : 'block',
indentation : 'auto'
},
$var : {
type : 'inline',
indentation : 0
},
blockquote : {
type : 'block',
indentation : 'auto'
},
cite : {
type : 'block',
indentation : 'auto'
},
q : {
type : 'inline',
indentation : 0
},
header : {
type : 'block',
indentation : 'auto'
},
hgroup : {
type : 'block',
indentation : 'auto'
},
h1 : {
type : 'block',
indentation : 'auto'
},
h2 : {
type : 'block',
indentation : 'auto'
},
h3 : {
type : 'block',
indentation : 'auto'
},
h4 : {
type : 'block',
indentation : 'auto'
},
h5 : {
type : 'block',
indentation : 'auto'
},
h6 : {
type : 'block',
indentation : 'auto'
},
footer : {
type : 'block',
indentation : 'auto'
},
figure : {
type : 'block',
indentation : 'auto'
},
figcaption : {
type : 'block',
indentation : 'auto'
},
ul : {
type : 'block',
indentation : 'auto'
},
ol : {
type : 'block',
indentation : 'auto'
},
li : {
type : 'block',
indentation : 'auto'
},
dl : {
type : 'block',
indentation : 'auto'
},
dd : {
type : 'block',
indentation : 'auto'
},
dt : {
type : 'block',
indentation : 'auto'
},
dfn : {
type : 'block',
indentation : 'auto'
},
abbr : {
type : 'inline',
indentation : 0
},
data : {
type : 'inline',
indentation : 0
},
time : {
type : 'inline',
indentation : 0
},
table : {
type : 'block',
indentation : 'auto'
},
colgroup : {
type : 'block',
indentation : 'auto'
},
col : {
type : 'block',
indentation : 'auto'
},
thead : {
type : 'block',
indentation : 'auto'
},
tr : {
type : 'block',
indentation : 'auto'
},
th : {
type : 'block',
indentation : 'auto'
},
tbody : {
type : 'block',
indentation : 'auto'
},
td : {
type : 'block',
indentation : 'auto'
},
tfoot : {
type : 'block',
indentation : 'auto'
},
caption : {
type : 'block',
indentation : 'auto'
},
samp : {
type : 'inline',
indentation : 0
},
kdb : {
type : 'inline',
indentation : 0
},
img : {
type : 'inline',
indentation : 0
},
iframe : {
type : 'block',
indentation : 'auto'
},
embed : {
type : 'block',
indentation : 'auto'
},
object : {
type : 'block',
indentation : 'auto'
},
param : {
type : 'block',
indentation : 'auto'
},
video : {
type : 'block',
indentation : 'auto'
},
audio : {
type : 'block',
indentation : 'auto'
},
source : {
type : 'block',
indentation : 'auto'
},
track : {
type : 'block',
indentation : 'auto'
},
canvas : {
type : 'block',
indentation : 'auto'
},
map : {
type : 'block',
indentation : 'auto'
},
area : {
type : 'block',
indentation : 'auto'
},
svg : {
type : 'block',
indentation : 'auto'
},
math : {
type : 'block',
indentation : 'auto'
},
fieldset : {
type : 'block',
indentation : 'auto'
},
legend : {
type : 'block',
indentation : 'auto'
},
label : {
type : 'block',
indentation : 'auto'
},
input : {
type : 'block',
indentation : 'auto'
},
button : {
type : 'block',
indentation : 'auto'
},
select : {
type : 'block',
indentation : 'auto'
},
datalist : {
type : 'block',
indentation : 'auto'
},
optgroup : {
type : 'block',
indentation : 'auto'
},
option : {
type : 'block',
indentation : 'auto'
},
textarea : {
type : 'block',
indentation : 'auto'
},
keygen : {
type : 'block',
indentation : 'auto'
},
output : {
type : 'block',
indentation : 'auto'
},
progress : {
type : 'block',
indentation : 'auto'
},
meter : {
type : 'block',
indentation : 'auto'
},
details : {
type : 'block',
indentation : 'auto'
},
summary : {
type : 'block',
indentation : 'auto'
},
menuitem : {
type : 'block',
indentation : 'auto'
},
menu : {
type : 'block',
indentation : 'auto'
}
};
//var characters = source.split('');
for (var counter = 0; counter < source.length; counter++)
{
var character = source.charAt(counter);
switch (character)
{
case '<':
{
if (isData)
{
data = $.trim(data);
if (data && data.length)
{
data = data.replace(/\s+/g, ' ');
items.push({
type : 'text',
data : options.compress? data : this.splitDataIntoLines(data)
});
}
isData = false;
data = '';
}
data = '<';
isTag = true;
isTagType = true;
break;
}
case '/':
{
if (isTag || isData)
{
data += character;
}
break;
}
case '>':
{
if (isTag)
{
data += '>';
data = data.replace(/\s+/g, ' ');
items.push({
type : tagType,
data : data,
endTag : data.indexOf('</') != -1
});
isTag = false;
isTagType = false;
data = '';
tagType = '';
}
break;
}
case ' ':
case "\t":
case "\n":
case "\r":
case "\s":
{
if (isTagType)
{
isTagType = false;
}
if (isData || isTag)
{
data += character;
}
break;
}
case '&':
{
if (!isTag)
{
if (!this.hasEncodedEntity(source, counter))
{
data += '&';
}
else
{
data += character;
}
}
else
{
data += character;
}
break;
}
default:
{
if (isTag)
{
data += character;
}
if (isTagType)
{
tagType += character;
}
if (!isTag && !isTagType)
{
isData = true;
data += character;
}
}
}
}
var source = '';
var inBlock = false;
var blockCounter = 0;
var inlineCounter = 0;
$(items).each(
function(key, value)
{
if (options.compress)
{
source += this.data;
}
else
{
var indentation = '';
var applyIndentation = true;
var addNewLine = true;
var type = typeof html[this.type] != 'undefined' ? html[this.type].type : 'text';
var lastItem = typeof items[key - 1] != 'undefined' ? items[key - 1] : {};
if (typeof lastItem.type != 'undefined')
{
var lastType = typeof html[lastItem.type] != 'undefined' ? html[lastItem.type].type : lastItem.type;
}
else
{
var lastType = '';
}
var nextItem = typeof items[key + 1] != 'undefined' ? items[key + 1] : {};
if (typeof nextItem.type != 'undefined')
{
var nextType = typeof html[nextItem.type] != 'undefined' ? html[nextItem.type].type : nextItem.type;
}
else
{
var nextType = '';
}
var isEndTag = false;
if (type == 'inline' && !this.endTag)
{
inlineCounter++;
}
if (type == 'inline' && this.endTag)
{
inlineCounter--;
if (inlineCounter < 0)
{
inlineCounter = 0;
}
if (!inlineCounter)
{
isEndTag = true;
}
}
if (type == 'inline' && inlineCounter == 1 && !isEndTag)
{
applyIndentation = true;
addNewLine = false;
}
else if (type == 'text' && inlineCounter == 1 && !isEndTag)
{
applyIndentation = false;
addNewLine = false;
}
else if (inlineCounter > 1)
{
applyIndentation = false;
addNewLine = false;
}
else if (isEndTag)
{
applyIndentation = false;
addNewLine = true;
}
if (applyIndentation)
{
if (this.endTag && type != 'inline')
{
blockCounter--;
if (blockCounter < 0)
{
blockCounter = 0;
}
}
for (tabCounter = 0; tabCounter < blockCounter; tabCounter++)
{
if (!options.softTabs)
{
indentation += "\t";
}
else
{
for (spaceCounter = 0; spaceCounter < options.tabWidth; spaceCounter++)
{
indentation += ' ';
}
}
}
}
if (this.type == 'text')
{
var lines = this.data.split("\n");
for (var lineCounter = 0; lineCounter < lines.length; lineCounter++)
{
lines[lineCounter] = (applyIndentation ? indentation : '') + lines[lineCounter];
}
source += lines.join("\n");
}
else
{
source += (applyIndentation ? indentation : '') + this.data;
}
if (addNewLine)
{
if (this.type == 'text' || type == 'block' || this.endTag && type == 'inline')
{
source += "\n";
}
}
if (typeof html[this.type] != 'undefined' && html[this.type].type == 'block')
{
if (!this.endTag)
{
blockCounter++;
}
}
}
//switch (this.type)
//{
// case 'data':
// {
// source += items;
// break;
// }
// default:
// {
// sources += this.data;
// }
//}
}
);
console.log(source);
this.get = function()
{
return source;
};
};
|
# coding=utf-8
# Copyright 2022 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for power_table."""
from absl.testing import absltest
from absl.testing import parameterized
from balloon_learning_environment.env.balloon import power_table
class PowerTableTest(parameterized.TestCase):
@parameterized.named_parameters(
dict(testcase_name='low_pressure_ratio', pressure_ratio=0.98),
dict(testcase_name='high_pressure_ratio', pressure_ratio=5.01))
def test_invalid_pressure_ratios(self, pressure_ratio: float):
with self.assertRaises(AssertionError):
power_table.lookup(pressure_ratio, 1.0)
@parameterized.parameters(
(1.0, 0.2, 0),
(1.0, 0.3, 150),
(1.0, 0.4, 175),
(1.0, 0.5, 200),
(1.0, 0.6, 200),
(1.08, 0.2, 0),
(1.08, 0.3, 200),
(1.08, 0.4, 200),
(1.08, 0.7, 225),
(1.08, 0.8, 225),
(1.11, 0.2, 0),
(1.11, 0.3, 225),
(1.11, 0.4, 225),
(1.11, 0.6, 250),
(1.11, 0.7, 250),
(1.14, 0.2, 0),
(1.14, 0.3, 200),
(1.14, 0.4, 225),
(1.14, 0.5, 250),
(1.14, 0.6, 250),
(1.17, 0.2, 0),
(1.17, 0.3, 225),
(1.17, 0.4, 250),
(1.17, 0.5, 275),
(1.17, 0.6, 275),
(1.2, 0.3, 0),
(1.2, 0.4, 275),
(1.2, 0.5, 300),
(1.2, 0.6, 300),
(1.23, 0.4, 0),
(1.23, 0.5, 300),
(1.23, 0.6, 325),
(1.23, 0.7, 325),
(1.26, 0.4, 0),
(1.26, 0.5, 325),
(1.26, 0.6, 350),
(1.26, 0.7, 350))
def test_table_lookup(self, pressure_ratio, state_of_charge,
expected_power_to_use):
self.assertEqual(power_table.lookup(pressure_ratio, state_of_charge),
expected_power_to_use)
if __name__ == '__main__':
absltest.main()
|
# -*- coding: utf-8 -*-
try:
from urllib.parse import urljoin
except Exception:
from urlparse import urljoin
import requests
from .exceptions import JawboneException
class JawboneClient(object):
BASE_URL = 'https://jawbone.com/'
TOKEN = None
def __init__(self, token):
self.TOKEN = token
def __get_headers(self):
return {
'Authorization': 'Bearer {}'.format(self.TOKEN),
'Accept': 'application/json'
}
def __get(self, url, **params):
full_url = urljoin(self.BASE_URL, url)
headers = self.__get_headers()
r = requests.get(full_url, headers=headers, params=params)
if r.status_code == 200:
return r.json()
raise JawboneException(r.content)
# Body events (https://jawbone.com/up/developer/endpoints/body)
def get_body_events(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None, limit=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/body_events',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
limit=limit
)
def get_body_event(self, xid):
return self.__get('/nudge/api/v.1.1/users/@me/body_events/{}'.format(xid))
# Band events (https://jawbone.com/up/developer/endpoints/bandevents)
def get_band_events(self, date=None, start_time=None, end_time=None, created_after=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/bandevents',
date=date,
start_time=start_time,
end_time=end_time,
created_after=created_after
)
# Heart rates (https://jawbone.com/up/developer/endpoints/heartrate)
def get_heart_rates(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None, limit=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/heartrates',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
limit=limit
)
# Custom events (https://jawbone.com/up/developer/endpoints/custom)
def get_custom_events(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None, limit=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/generic_events',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
limit=limit
)
# Goals (https://jawbone.com/up/developer/endpoints/goals)
def get_goals(self):
return self.__get('/nudge/api/v.1.1/users/@me/goals')
# Meals (https://jawbone.com/up/developer/endpoints/meals)
def get_meals(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/meals',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
)
# Mood (https://jawbone.com/up/developer/endpoints/mood)
def get_mood(self, date=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/mood',
date=date
)
# Moves (/nudge/api/v.1.1/users/@me/moves)
def get_moves(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/moves',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
)
# Sleeps (https://jawbone.com/up/developer/endpoints/sleeps)
def get_sleeps(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/sleeps',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
)
# Timezone (https://jawbone.com/up/developer/endpoints/timezone)
def get_timezone(self, date=None, start_time=None, end_time=None,
timestamp=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/timezone',
date=date,
start_time=start_time,
end_time=end_time,
timestamp=timestamp,
)
# Trends (https://jawbone.com/up/developer/endpoints/trends)
def get_trends(self, end_date=None, bucket_size=None, num_buckets=100):
return self.__get(
'/nudge/api/v.1.1/users/@me/trends',
end_date=end_date,
bucket_size=bucket_size,
num_buckets=num_buckets
)
# User (https://jawbone.com/up/developer/endpoints/user)
def get_user(self):
return self.__get('/nudge/api/v.1.1/users/@me')
def get_friends(self):
return self.__get('/nudge/api/v.1.1/users/@me/friends')
# Workouts (https://jawbone.com/up/developer/endpoints/workouts)
def get_workouts(self, date=None, page_token=None, start_time=None,
end_time=None, updated_after=None):
return self.__get(
'/nudge/api/v.1.1/users/@me/workouts',
date=date,
page_token=page_token,
start_time=start_time,
end_time=end_time,
updated_after=updated_after,
)
|
#ifndef MAGNET_RENDER_BACKEND_PROTOCOL_TEXT_H_
#define MAGNET_RENDER_BACKEND_PROTOCOL_TEXT_H_
#include <string>
#include "protocol.h"
#include "data.h"
namespace magent {
namespace render {
class Text : public Base<std::string> {
private:
std::string encode(const render::AgentData & /*unused*/)const override;
std::string encode(const render::EventData & /*unused*/)const override;
std::string encode(const render::BreadData & /*unused*/)const override ;
public:
std::string encode(const render::Config & /*unused*/, unsigned int /*unused*/)const override;
std::string encode(const render::Frame & /*unused*/,
const render::Config & /*unused*/,
const render::Buffer & /*unused*/,
const render::Window & /*unused*/)const override;
std::string encodeError(const std::string & /*unused*/)const override;
Result decode(const std::string & /*unused*/)const override;
};
} // namespace render
} // namespace magent
#endif //MAGNET_RENDER_BACKEND_PROTOCOL_TEXT_H_
|
/*
* $NetBSD: xd.c,v 1.5 2002/01/26 13:18:58 aymeric Exp $
*
* Copyright (c) 1996 Ignatios Souvatzis.
* Copyright (c) 1995 Waldi Ravens.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by Waldi Ravens.
* 4. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <sys/types.h>
#include <stand.h>
#include <ufs.h>
#include <ustarfs.h>
#include "samachdep.h"
#include "amigaio.h"
#include "libstubs.h"
static int xdstrategy(void *, int, daddr_t, size_t, void *, size_t *);
static int xdopenclose(struct open_file *);
static int xdioctl(struct open_file *, u_long, void *);
u_int32_t aio_base;
static struct AmigaIO *aio_save;
static struct devsw devsw[] = {
{ "xd", xdstrategy, (void *)xdopenclose, (void *)xdopenclose, xdioctl }
};
struct fs_ops file_system[] = {
{ ufs_open, ufs_close, ufs_read, ufs_write, ufs_seek, ufs_stat },
{ ustarfs_open, ustarfs_close, ustarfs_read, ustarfs_write, ustarfs_seek,
ustarfs_stat },
};
int nfsys = sizeof(file_system)/sizeof(struct fs_ops);
/* called from configure */
void
xdinit(void *aio)
{
aio_save = aio;
aio_base = aio_save->offset;
}
/*
* Kernel ist loaded from device and partition the kickstart
* menu or boot priority has chosen:
*/
int
devopen(f, fname, file)
struct open_file *f;
const char *fname;
char **file;
{
f->f_devdata = aio_save;
f->f_dev = &devsw[0];
*file = (char *)fname;
return 0;
}
/* tell kickstart to do the real work */
static int
xdstrategy(void *devd, int flag, daddr_t dblk, size_t size, void *buf,
size_t *rsize)
{
struct AmigaIO *aio = (struct AmigaIO *)devd;
if (flag != F_READ)
return EIO;
aio->cmd = Cmd_Rd;
aio->length = size;
aio->offset = aio_base + (dblk << 9);
aio->buf = buf;
#ifdef XDDEBUG
printf("strategy called: %ld(%ld), %ld, 0x%lx\n",
(long)dblk, (long)aio->offset, (long)size, (unsigned long)buf);
#endif
DoIO(aio);
#ifdef XDDEBUG
printf("strategy got err %ld, rsize %ld\n", (long)aio->err, (long)aio->actual);
#endif
if (aio->err) {
*rsize = 0;
return EIO;
}
*rsize = aio->actual;
return 0;
}
/* nothing do do for these: */
static int
xdopenclose(struct open_file *f)
{
aio_save->offset = aio_base; /* Restore original offset */
return 0;
}
static int
xdioctl(struct open_file *f, u_long cmd, void *data)
{
return EIO;
}
#ifdef _PRIMARY_BOOT
void
xdreset(void)
{
aio_save->offset = aio_base; /* Restore original offset */
}
#endif
|
"""An AdaNet algorithm implementation in Tensorflow using a single graph.
Copyright 2018 The AdaNet Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from adanet.autoensemble import AutoEnsembleEstimator
from adanet.core import Ensemble
from adanet.core import Estimator
from adanet.core import Evaluator
from adanet.core import MixtureWeightType
from adanet.core import ReportMaterializer
from adanet.core import subnetwork
from adanet.core import Summary
from adanet.core import TPUEstimator
from adanet.core import WeightedSubnetwork
from adanet.core.subnetwork import Subnetwork
|
# coding: utf-8
from __future__ import unicode_literals
import asyncore
import email
from email.mime.text import MIMEText
import os
import shutil
import smtpd
import sys
import tempfile
import threading
from smtplib import SMTPException
from ssl import SSLError
from django.core import mail
from django.core.mail import (EmailMessage, mail_admins, mail_managers,
EmailMultiAlternatives, send_mail, send_mass_mail)
from django.core.mail.backends import console, dummy, locmem, filebased, smtp
from django.core.mail.message import BadHeaderError
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.utils.encoding import force_str, force_text
from django.utils.six import PY3, StringIO, string_types
from django.utils.translation import ugettext_lazy
class HeadersCheckMixin(object):
def assertMessageHasHeaders(self, message, headers):
"""
Check that :param message: has all :param headers: headers.
:param message: can be an instance of an email.Message subclass or a
string with the contens of an email message.
:param headers: should be a set of (header-name, header-value) tuples.
"""
if isinstance(message, string_types):
just_headers = message.split('\n\n', 1)[0]
hlist = just_headers.split('\n')
pairs = [hl.split(':', 1) for hl in hlist]
msg_headers = {(n, v.lstrip()) for (n, v) in pairs}
else:
msg_headers = set(message.items())
self.assertTrue(headers.issubset(msg_headers), msg='Message is missing '
'the following headers: %s' % (headers - msg_headers),)
class MailTests(HeadersCheckMixin, SimpleTestCase):
"""
Non-backend specific tests.
"""
def test_ascii(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], 'from@example.com')
self.assertEqual(message['To'], 'to@example.com')
def test_multiple_recipients(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com', 'other@example.com'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], 'from@example.com')
self.assertEqual(message['To'], 'to@example.com, other@example.com')
def test_cc(self):
"""Regression test for #7722"""
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'], cc=['cc@example.com'])
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'cc@example.com'])
# Test multiple CC with multiple To
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com', 'other@example.com'], cc=['cc@example.com', 'cc.other@example.com'])
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com, cc.other@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'other@example.com', 'cc@example.com', 'cc.other@example.com'])
# Testing with Bcc
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com', 'other@example.com'], cc=['cc@example.com', 'cc.other@example.com'], bcc=['bcc@example.com'])
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com, cc.other@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'other@example.com', 'cc@example.com', 'cc.other@example.com', 'bcc@example.com'])
def test_recipients_as_tuple(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ('to@example.com', 'other@example.com'), cc=('cc@example.com', 'cc.other@example.com'), bcc=('bcc@example.com',))
message = email.message()
self.assertEqual(message['Cc'], 'cc@example.com, cc.other@example.com')
self.assertEqual(email.recipients(), ['to@example.com', 'other@example.com', 'cc@example.com', 'cc.other@example.com', 'bcc@example.com'])
def test_header_injection(self):
email = EmailMessage('Subject\nInjection Test', 'Content', 'from@example.com', ['to@example.com'])
self.assertRaises(BadHeaderError, email.message)
email = EmailMessage(ugettext_lazy('Subject\nInjection Test'), 'Content', 'from@example.com', ['to@example.com'])
self.assertRaises(BadHeaderError, email.message)
def test_space_continuation(self):
"""
Test for space continuation character in long (ascii) subject headers (#7747)
"""
email = EmailMessage('Long subject lines that get wrapped should contain a space continuation character to get expected behavior in Outlook and Thunderbird', 'Content', 'from@example.com', ['to@example.com'])
message = email.message()
# Note that in Python 3, maximum line length has increased from 76 to 78
self.assertEqual(message['Subject'].encode(), b'Long subject lines that get wrapped should contain a space continuation\n character to get expected behavior in Outlook and Thunderbird')
def test_message_header_overrides(self):
"""
Specifying dates or message-ids in the extra headers overrides the
default values (#9233)
"""
headers = {"date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
email = EmailMessage('subject', 'content', 'from@example.com', ['to@example.com'], headers=headers)
self.assertMessageHasHeaders(email.message(), {
('Content-Transfer-Encoding', '7bit'),
('Content-Type', 'text/plain; charset="utf-8"'),
('From', 'from@example.com'),
('MIME-Version', '1.0'),
('Message-ID', 'foo'),
('Subject', 'subject'),
('To', 'to@example.com'),
('date', 'Fri, 09 Nov 2001 01:08:47 -0000'),
})
def test_from_header(self):
"""
Make sure we can manually set the From header (#9214)
"""
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
message = email.message()
self.assertEqual(message['From'], 'from@example.com')
def test_to_header(self):
"""
Make sure we can manually set the To header (#17444)
"""
email = EmailMessage('Subject', 'Content', 'bounce@example.com',
['list-subscriber@example.com', 'list-subscriber2@example.com'],
headers={'To': 'mailing-list@example.com'})
message = email.message()
self.assertEqual(message['To'], 'mailing-list@example.com')
self.assertEqual(email.to, ['list-subscriber@example.com', 'list-subscriber2@example.com'])
# If we don't set the To header manually, it should default to the `to` argument to the constructor
email = EmailMessage('Subject', 'Content', 'bounce@example.com',
['list-subscriber@example.com', 'list-subscriber2@example.com'])
message = email.message()
self.assertEqual(message['To'], 'list-subscriber@example.com, list-subscriber2@example.com')
self.assertEqual(email.to, ['list-subscriber@example.com', 'list-subscriber2@example.com'])
def test_multiple_message_call(self):
"""
Regression for #13259 - Make sure that headers are not changed when
calling EmailMessage.message()
"""
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
message = email.message()
self.assertEqual(message['From'], 'from@example.com')
message = email.message()
self.assertEqual(message['From'], 'from@example.com')
def test_unicode_address_header(self):
"""
Regression for #11144 - When a to/from/cc header contains unicode,
make sure the email addresses are parsed correctly (especially with
regards to commas)
"""
email = EmailMessage('Subject', 'Content', 'from@example.com', ['"Firstname Sürname" <to@example.com>', 'other@example.com'])
self.assertEqual(email.message()['To'], '=?utf-8?q?Firstname_S=C3=BCrname?= <to@example.com>, other@example.com')
email = EmailMessage('Subject', 'Content', 'from@example.com', ['"Sürname, Firstname" <to@example.com>', 'other@example.com'])
self.assertEqual(email.message()['To'], '=?utf-8?q?S=C3=BCrname=2C_Firstname?= <to@example.com>, other@example.com')
def test_unicode_headers(self):
email = EmailMessage("Gżegżółka", "Content", "from@example.com", ["to@example.com"],
headers={"Sender": '"Firstname Sürname" <sender@example.com>',
"Comments": 'My Sürname is non-ASCII'})
message = email.message()
self.assertEqual(message['Subject'], '=?utf-8?b?R8W8ZWfFvMOzxYJrYQ==?=')
self.assertEqual(message['Sender'], '=?utf-8?q?Firstname_S=C3=BCrname?= <sender@example.com>')
self.assertEqual(message['Comments'], '=?utf-8?q?My_S=C3=BCrname_is_non-ASCII?=')
def test_safe_mime_multipart(self):
"""
Make sure headers can be set with a different encoding than utf-8 in
SafeMIMEMultipart as well
"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', 'from@example.com', '"Sürname, Firstname" <to@example.com>'
text_content = 'This is an important message.'
html_content = '<p>This is an <strong>important</strong> message.</p>'
msg = EmailMultiAlternatives('Message from Firstname Sürname', text_content, from_email, [to], headers=headers)
msg.attach_alternative(html_content, "text/html")
msg.encoding = 'iso-8859-1'
self.assertEqual(msg.message()['To'], '=?iso-8859-1?q?S=FCrname=2C_Firstname?= <to@example.com>')
self.assertEqual(msg.message()['Subject'], '=?iso-8859-1?q?Message_from_Firstname_S=FCrname?=')
def test_encoding(self):
"""
Regression for #12791 - Encode body correctly with other encodings
than utf-8
"""
email = EmailMessage('Subject', 'Firstname Sürname is a great guy.', 'from@example.com', ['other@example.com'])
email.encoding = 'iso-8859-1'
message = email.message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable'),
('Subject', 'Subject'),
('From', 'from@example.com'),
('To', 'other@example.com')})
self.assertEqual(message.get_payload(), 'Firstname S=FCrname is a great guy.')
# Make sure MIME attachments also works correctly with other encodings than utf-8
text_content = 'Firstname Sürname is a great guy.'
html_content = '<p>Firstname Sürname is a <strong>great</strong> guy.</p>'
msg = EmailMultiAlternatives('Subject', text_content, 'from@example.com', ['to@example.com'])
msg.encoding = 'iso-8859-1'
msg.attach_alternative(html_content, "text/html")
payload0 = msg.message().get_payload(0)
self.assertMessageHasHeaders(payload0, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable')})
self.assertTrue(payload0.as_string().endswith('\n\nFirstname S=FCrname is a great guy.'))
payload1 = msg.message().get_payload(1)
self.assertMessageHasHeaders(payload1, {
('MIME-Version', '1.0'),
('Content-Type', 'text/html; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable')})
self.assertTrue(payload1.as_string().endswith('\n\n<p>Firstname S=FCrname is a <strong>great</strong> guy.</p>'))
def test_attachments(self):
"""Regression test for #9367"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', 'from@example.com', 'to@example.com'
text_content = 'This is an important message.'
html_content = '<p>This is an <strong>important</strong> message.</p>'
msg = EmailMultiAlternatives(subject, text_content, from_email, [to], headers=headers)
msg.attach_alternative(html_content, "text/html")
msg.attach("an attachment.pdf", b"%PDF-1.4.%...", mimetype="application/pdf")
msg_str = msg.message().as_string()
message = email.message_from_string(msg_str)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_content_type(), 'multipart/mixed')
self.assertEqual(message.get_default_type(), 'text/plain')
payload = message.get_payload()
self.assertEqual(payload[0].get_content_type(), 'multipart/alternative')
self.assertEqual(payload[1].get_content_type(), 'application/pdf')
def test_non_ascii_attachment_filename(self):
"""Regression test for #14964"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', 'from@example.com', 'to@example.com'
content = 'This is the message.'
msg = EmailMessage(subject, content, from_email, [to], headers=headers)
# Unicode in file name
msg.attach("une pièce jointe.pdf", b"%PDF-1.4.%...", mimetype="application/pdf")
msg_str = msg.message().as_string()
message = email.message_from_string(msg_str)
payload = message.get_payload()
self.assertEqual(payload[1].get_filename(), 'une pièce jointe.pdf')
def test_dummy_backend(self):
"""
Make sure that dummy backends returns correct number of sent messages
"""
connection = dummy.EmailBackend()
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
self.assertEqual(connection.send_messages([email, email, email]), 3)
def test_arbitrary_keyword(self):
"""
Make sure that get_connection() accepts arbitrary keyword that might be
used with custom backends.
"""
c = mail.get_connection(fail_silently=True, foo='bar')
self.assertTrue(c.fail_silently)
def test_custom_backend(self):
"""Test custom backend defined in this suite."""
conn = mail.get_connection('mail.custombackend.EmailBackend')
self.assertTrue(hasattr(conn, 'test_outbox'))
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
conn.send_messages([email])
self.assertEqual(len(conn.test_outbox), 1)
def test_backend_arg(self):
"""Test backend argument of mail.get_connection()"""
self.assertIsInstance(mail.get_connection('django.core.mail.backends.smtp.EmailBackend'), smtp.EmailBackend)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.locmem.EmailBackend'), locmem.EmailBackend)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.dummy.EmailBackend'), dummy.EmailBackend)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.console.EmailBackend'), console.EmailBackend)
tmp_dir = tempfile.mkdtemp()
try:
self.assertIsInstance(mail.get_connection('django.core.mail.backends.filebased.EmailBackend', file_path=tmp_dir), filebased.EmailBackend)
finally:
shutil.rmtree(tmp_dir)
self.assertIsInstance(mail.get_connection(), locmem.EmailBackend)
@override_settings(
EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend',
ADMINS=[('nobody', 'nobody@example.com')],
MANAGERS=[('nobody', 'nobody@example.com')])
def test_connection_arg(self):
"""Test connection argument to send_mail(), et. al."""
mail.outbox = []
# Send using non-default connection
connection = mail.get_connection('mail.custombackend.EmailBackend')
send_mail('Subject', 'Content', 'from@example.com', ['to@example.com'], connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, 'Subject')
connection = mail.get_connection('mail.custombackend.EmailBackend')
send_mass_mail([
('Subject1', 'Content1', 'from1@example.com', ['to1@example.com']),
('Subject2', 'Content2', 'from2@example.com', ['to2@example.com']),
], connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 2)
self.assertEqual(connection.test_outbox[0].subject, 'Subject1')
self.assertEqual(connection.test_outbox[1].subject, 'Subject2')
connection = mail.get_connection('mail.custombackend.EmailBackend')
mail_admins('Admin message', 'Content', connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, '[Django] Admin message')
connection = mail.get_connection('mail.custombackend.EmailBackend')
mail_managers('Manager message', 'Content', connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, '[Django] Manager message')
def test_dont_mangle_from_in_body(self):
# Regression for #13433 - Make sure that EmailMessage doesn't mangle
# 'From ' in message body.
email = EmailMessage('Subject', 'From the future', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
self.assertFalse('>From the future' in email.message().as_string())
def test_dont_base64_encode(self):
# Ticket #3472
# Shouldn't use Base64 encoding at all
msg = EmailMessage('Subject', 'UTF-8 encoded body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
self.assertFalse('Content-Transfer-Encoding: base64' in msg.message().as_string())
# Ticket #11212
# Shouldn't use quoted printable, should detect it can represent content with 7 bit data
msg = EmailMessage('Subject', 'Body with only ASCII characters.', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
s = msg.message().as_string()
self.assertFalse('Content-Transfer-Encoding: quoted-printable' in s)
self.assertTrue('Content-Transfer-Encoding: 7bit' in s)
# Shouldn't use quoted printable, should detect it can represent content with 8 bit data
msg = EmailMessage('Subject', 'Body with latin characters: àáä.', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
s = msg.message().as_string()
self.assertFalse(str('Content-Transfer-Encoding: quoted-printable') in s)
self.assertTrue(str('Content-Transfer-Encoding: 8bit') in s)
msg = EmailMessage('Subject', 'Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
s = msg.message().as_string()
self.assertFalse(str('Content-Transfer-Encoding: quoted-printable') in s)
self.assertTrue(str('Content-Transfer-Encoding: 8bit') in s)
def test_dont_base64_encode_message_rfc822(self):
# Ticket #18967
# Shouldn't use base64 encoding for a child EmailMessage attachment.
# Create a child message first
child_msg = EmailMessage('Child Subject', 'Some body of child message', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
child_s = child_msg.message().as_string()
# Now create a parent
parent_msg = EmailMessage('Parent Subject', 'Some parent body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
# Attach to parent as a string
parent_msg.attach(content=child_s, mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertTrue(str('Child Subject') in parent_s)
# Feature test: try attaching email.Message object directly to the mail.
parent_msg = EmailMessage('Parent Subject', 'Some parent body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
parent_msg.attach(content=child_msg.message(), mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertTrue(str('Child Subject') in parent_s)
# Feature test: try attaching Django's EmailMessage object directly to the mail.
parent_msg = EmailMessage('Parent Subject', 'Some parent body', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
parent_msg.attach(content=child_msg, mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertTrue(str('Child Subject') in parent_s)
class PythonGlobalState(SimpleTestCase):
"""
Tests for #12422 -- Django smarts (#2472/#11212) with charset of utf-8 text
parts shouldn't pollute global email Python package charset registry when
django.mail.message is imported.
"""
def test_utf8(self):
txt = MIMEText('UTF-8 encoded body', 'plain', 'utf-8')
self.assertTrue('Content-Transfer-Encoding: base64' in txt.as_string())
def test_7bit(self):
txt = MIMEText('Body with only ASCII characters.', 'plain', 'utf-8')
self.assertTrue('Content-Transfer-Encoding: base64' in txt.as_string())
def test_8bit_latin(self):
txt = MIMEText('Body with latin characters: àáä.', 'plain', 'utf-8')
self.assertTrue(str('Content-Transfer-Encoding: base64') in txt.as_string())
def test_8bit_non_latin(self):
txt = MIMEText('Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', 'plain', 'utf-8')
self.assertTrue(str('Content-Transfer-Encoding: base64') in txt.as_string())
class BaseEmailBackendTests(HeadersCheckMixin, object):
email_backend = None
def setUp(self):
self.settings_override = override_settings(EMAIL_BACKEND=self.email_backend)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def assertStartsWith(self, first, second):
if not first.startswith(second):
self.longMessage = True
self.assertEqual(first[:len(second)], second, "First string doesn't start with the second.")
def get_mailbox_content(self):
raise NotImplementedError
def flush_mailbox(self):
raise NotImplementedError
def get_the_message(self):
mailbox = self.get_mailbox_content()
self.assertEqual(len(mailbox), 1,
"Expected exactly one message, got %d.\n%r" % (len(mailbox), [
m.as_string() for m in mailbox]))
return mailbox[0]
def test_send(self):
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'])
num_sent = mail.get_connection().send_messages([email])
self.assertEqual(num_sent, 1)
message = self.get_the_message()
self.assertEqual(message["subject"], "Subject")
self.assertEqual(message.get_payload(), "Content")
self.assertEqual(message["from"], "from@example.com")
self.assertEqual(message.get_all("to"), ["to@example.com"])
def test_send_unicode(self):
email = EmailMessage('Chère maman', 'Je t\'aime très fort', 'from@example.com', ['to@example.com'])
num_sent = mail.get_connection().send_messages([email])
self.assertEqual(num_sent, 1)
message = self.get_the_message()
self.assertEqual(message["subject"], '=?utf-8?q?Ch=C3=A8re_maman?=')
self.assertEqual(force_text(message.get_payload()), 'Je t\'aime très fort')
def test_send_many(self):
email1 = EmailMessage('Subject', 'Content1', 'from@example.com', ['to@example.com'])
email2 = EmailMessage('Subject', 'Content2', 'from@example.com', ['to@example.com'])
num_sent = mail.get_connection().send_messages([email1, email2])
self.assertEqual(num_sent, 2)
messages = self.get_mailbox_content()
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].get_payload(), "Content1")
self.assertEqual(messages[1].get_payload(), "Content2")
def test_send_verbose_name(self):
email = EmailMessage("Subject", "Content", '"Firstname Sürname" <from@example.com>',
["to@example.com"])
email.send()
message = self.get_the_message()
self.assertEqual(message["subject"], "Subject")
self.assertEqual(message.get_payload(), "Content")
self.assertEqual(message["from"], "=?utf-8?q?Firstname_S=C3=BCrname?= <from@example.com>")
def test_plaintext_send_mail(self):
"""
Test send_mail without the html_message
regression test for adding html_message parameter to send_mail()
"""
send_mail('Subject', 'Content', 'sender@example.com', ['nobody@example.com'])
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get_all('to'), ['nobody@example.com'])
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message.get_content_type(), 'text/plain')
def test_html_send_mail(self):
"""Test html_message argument to send_mail"""
send_mail('Subject', 'Content', 'sender@example.com', ['nobody@example.com'], html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get_all('to'), ['nobody@example.com'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(MANAGERS=[('nobody', 'nobody@example.com')])
def test_html_mail_managers(self):
"""Test html_message argument to mail_managers"""
mail_managers('Subject', 'Content', html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.assertEqual(message.get_all('to'), ['nobody@example.com'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(ADMINS=[('nobody', 'nobody@example.com')])
def test_html_mail_admins(self):
"""Test html_message argument to mail_admins """
mail_admins('Subject', 'Content', html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.assertEqual(message.get_all('to'), ['nobody@example.com'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(
ADMINS=[('nobody', 'nobody+admin@example.com')],
MANAGERS=[('nobody', 'nobody+manager@example.com')])
def test_manager_and_admin_mail_prefix(self):
"""
String prefix + lazy translated subject = bad output
Regression for #13494
"""
mail_managers(ugettext_lazy('Subject'), 'Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.flush_mailbox()
mail_admins(ugettext_lazy('Subject'), 'Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
@override_settings(ADMINS=(), MANAGERS=())
def test_empty_admins(self):
"""
Test that mail_admins/mail_managers doesn't connect to the mail server
if there are no recipients (#9383)
"""
mail_admins('hi', 'there')
self.assertEqual(self.get_mailbox_content(), [])
mail_managers('hi', 'there')
self.assertEqual(self.get_mailbox_content(), [])
def test_message_cc_header(self):
"""
Regression test for #7722
"""
email = EmailMessage('Subject', 'Content', 'from@example.com', ['to@example.com'], cc=['cc@example.com'])
mail.get_connection().send_messages([email])
message = self.get_the_message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', '7bit'),
('Subject', 'Subject'),
('From', 'from@example.com'),
('To', 'to@example.com'),
('Cc', 'cc@example.com')})
self.assertIn('\nDate: ', message.as_string())
def test_idn_send(self):
"""
Regression test for #14301
"""
self.assertTrue(send_mail('Subject', 'Content', 'from@öäü.com', ['to@öäü.com']))
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), 'from@xn--4ca9at.com')
self.assertEqual(message.get('to'), 'to@xn--4ca9at.com')
self.flush_mailbox()
m = EmailMessage('Subject', 'Content', 'from@öäü.com',
['to@öäü.com'], cc=['cc@öäü.com'])
m.send()
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), 'from@xn--4ca9at.com')
self.assertEqual(message.get('to'), 'to@xn--4ca9at.com')
self.assertEqual(message.get('cc'), 'cc@xn--4ca9at.com')
def test_recipient_without_domain(self):
"""
Regression test for #15042
"""
self.assertTrue(send_mail("Subject", "Content", "tester", ["django"]))
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), "tester")
self.assertEqual(message.get('to'), "django")
def test_close_connection(self):
"""
Test that connection can be closed (even when not explicitely opened)
"""
conn = mail.get_connection(username='', password='')
try:
conn.close()
except Exception as e:
self.fail("close() unexpectedly raised an exception: %s" % e)
class LocmemBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.locmem.EmailBackend'
def get_mailbox_content(self):
return [m.message() for m in mail.outbox]
def flush_mailbox(self):
mail.outbox = []
def tearDown(self):
super(LocmemBackendTests, self).tearDown()
mail.outbox = []
def test_locmem_shared_messages(self):
"""
Make sure that the locmen backend populates the outbox.
"""
connection = locmem.EmailBackend()
connection2 = locmem.EmailBackend()
email = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
connection.send_messages([email])
connection2.send_messages([email])
self.assertEqual(len(mail.outbox), 2)
def test_validate_multiline_headers(self):
# Ticket #18861 - Validate emails when using the locmem backend
with self.assertRaises(BadHeaderError):
send_mail('Subject\nMultiline', 'Content', 'from@example.com', ['to@example.com'])
class FileBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.filebased.EmailBackend'
def setUp(self):
super(FileBackendTests, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tmp_dir)
self._settings_override = override_settings(EMAIL_FILE_PATH=self.tmp_dir)
self._settings_override.enable()
def tearDown(self):
self._settings_override.disable()
super(FileBackendTests, self).tearDown()
def flush_mailbox(self):
for filename in os.listdir(self.tmp_dir):
os.unlink(os.path.join(self.tmp_dir, filename))
def get_mailbox_content(self):
messages = []
for filename in os.listdir(self.tmp_dir):
with open(os.path.join(self.tmp_dir, filename), 'r') as fp:
session = force_text(fp.read()).split('\n' + ('-' * 79) + '\n')
messages.extend(email.message_from_string(force_str(m)) for m in session if m)
return messages
def test_file_sessions(self):
"""Make sure opening a connection creates a new file"""
msg = EmailMessage('Subject', 'Content', 'bounce@example.com', ['to@example.com'], headers={'From': 'from@example.com'})
connection = mail.get_connection()
connection.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 1)
with open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0])) as fp:
message = email.message_from_file(fp)
self.assertEqual(message.get_content_type(), 'text/plain')
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), 'from@example.com')
self.assertEqual(message.get('to'), 'to@example.com')
connection2 = mail.get_connection()
connection2.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 2)
connection.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 2)
msg.connection = mail.get_connection()
self.assertTrue(connection.open())
msg.send()
self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
msg.send()
self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
connection.close()
class ConsoleBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.console.EmailBackend'
def setUp(self):
super(ConsoleBackendTests, self).setUp()
self.__stdout = sys.stdout
self.stream = sys.stdout = StringIO()
def tearDown(self):
del self.stream
sys.stdout = self.__stdout
del self.__stdout
super(ConsoleBackendTests, self).tearDown()
def flush_mailbox(self):
self.stream = sys.stdout = StringIO()
def get_mailbox_content(self):
messages = force_text(self.stream.getvalue()).split('\n' + ('-' * 79) + '\n')
return [email.message_from_string(force_str(m)) for m in messages if m]
def test_console_stream_kwarg(self):
"""
Test that the console backend can be pointed at an arbitrary stream.
"""
s = StringIO()
connection = mail.get_connection('django.core.mail.backends.console.EmailBackend', stream=s)
send_mail('Subject', 'Content', 'from@example.com', ['to@example.com'], connection=connection)
self.assertMessageHasHeaders(s.getvalue(), {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', '7bit'),
('Subject', 'Subject'),
('From', 'from@example.com'),
('To', 'to@example.com')})
self.assertIn('\nDate: ', s.getvalue())
class FakeSMTPChannel(smtpd.SMTPChannel):
def collect_incoming_data(self, data):
try:
super(FakeSMTPChannel, self).collect_incoming_data(data)
except UnicodeDecodeError:
# ignore decode error in SSL/TLS connection tests as we only care
# whether the connection attempt was made
pass
class FakeSMTPServer(smtpd.SMTPServer, threading.Thread):
"""
Asyncore SMTP server wrapped into a thread. Based on DummyFTPServer from:
http://svn.python.org/view/python/branches/py3k/Lib/test/test_ftplib.py?revision=86061&view=markup
"""
channel_class = FakeSMTPChannel
def __init__(self, *args, **kwargs):
threading.Thread.__init__(self)
smtpd.SMTPServer.__init__(self, *args, **kwargs)
self._sink = []
self.active = False
self.active_lock = threading.Lock()
self.sink_lock = threading.Lock()
def process_message(self, peer, mailfrom, rcpttos, data):
m = email.message_from_string(data)
if PY3:
maddr = email.utils.parseaddr(m.get('from'))[1]
else:
maddr = email.Utils.parseaddr(m.get('from'))[1]
if mailfrom != maddr:
return "553 '%s' != '%s'" % (mailfrom, maddr)
with self.sink_lock:
self._sink.append(m)
def get_sink(self):
with self.sink_lock:
return self._sink[:]
def flush_sink(self):
with self.sink_lock:
self._sink[:] = []
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
with self.active_lock:
asyncore.loop(timeout=0.1, count=1)
asyncore.close_all()
def stop(self):
if self.active:
self.active = False
self.join()
class SMTPBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.smtp.EmailBackend'
@classmethod
def setUpClass(cls):
cls.server = FakeSMTPServer(('127.0.0.1', 0), None)
cls._settings_override = override_settings(
EMAIL_HOST="127.0.0.1",
EMAIL_PORT=cls.server.socket.getsockname()[1])
cls._settings_override.enable()
cls.server.start()
@classmethod
def tearDownClass(cls):
cls._settings_override.disable()
cls.server.stop()
def setUp(self):
super(SMTPBackendTests, self).setUp()
self.server.flush_sink()
def tearDown(self):
self.server.flush_sink()
super(SMTPBackendTests, self).tearDown()
def flush_mailbox(self):
self.server.flush_sink()
def get_mailbox_content(self):
return self.server.get_sink()
@override_settings(EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_authentication_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.username, 'not empty username')
self.assertEqual(backend.password, 'not empty password')
@override_settings(EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_authentication_override_settings(self):
backend = smtp.EmailBackend(username='username', password='password')
self.assertEqual(backend.username, 'username')
self.assertEqual(backend.password, 'password')
@override_settings(EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_disabled_authentication(self):
backend = smtp.EmailBackend(username='', password='')
self.assertEqual(backend.username, '')
self.assertEqual(backend.password, '')
def test_server_stopped(self):
"""
Test that closing the backend while the SMTP server is stopped doesn't
raise an exception.
"""
backend = smtp.EmailBackend(username='', password='')
backend.open()
self.server.stop()
try:
backend.close()
except Exception as e:
self.fail("close() unexpectedly raised an exception: %s" % e)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_use_settings(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_tls)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_override_settings(self):
backend = smtp.EmailBackend(use_tls=False)
self.assertFalse(backend.use_tls)
def test_email_tls_default_disabled(self):
backend = smtp.EmailBackend()
self.assertFalse(backend.use_tls)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_use_settings(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_ssl)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_override_settings(self):
backend = smtp.EmailBackend(use_ssl=False)
self.assertFalse(backend.use_ssl)
def test_email_ssl_default_disabled(self):
backend = smtp.EmailBackend()
self.assertFalse(backend.use_ssl)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_attempts_starttls(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_tls)
self.assertRaisesMessage(SMTPException,
'STARTTLS extension not supported by server.', backend.open)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_attempts_ssl_connection(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_ssl)
self.assertRaises(SSLError, backend.open)
|
(window["webpackJsonp"]=window["webpackJsonp"]||[]).push([["chunk-73f6d91d","chunk-3a83423a","chunk-2d216621"],{2626:function(t,e,r){"use strict";var o=r("d066"),n=r("9bf2"),a=r("b622"),u=r("83ab"),c=a("species");t.exports=function(t){var e=o(t),r=n.f;u&&e&&!e[c]&&r(e,c,{configurable:!0,get:function(){return this}})}},"3bbe":function(t,e,r){var o=r("861d");t.exports=function(t){if(!o(t)&&null!==t)throw TypeError("Can't set "+String(t)+" as a prototype");return t}},4999:function(t,e,r){"use strict";r.r(e);var o=function(){var t=this,e=t.$createElement,r=t._self._c||e;return r("div",[r("el-table",{ref:"multipleTable",staticStyle:{width:"100%"},attrs:{data:t.tableData,border:"",stripe:"","tooltip-effect":"dark"}},[r("el-table-column",{attrs:{label:"ID",prop:"ID",width:"60"}}),r("el-table-column",{attrs:{label:"流程名称",prop:"workflowProcess.label",width:"150"}}),r("el-table-column",{attrs:{label:"发起人",prop:"promoter.nickName",width:"120"}}),r("el-table-column",{attrs:{label:"节点日期",width:"180"},scopedSlots:t._u([{key:"default",fn:function(e){return[t._v(t._s(t._f("formatDate")(e.row.CreatedAt)))]}}])}),r("el-table-column",{attrs:{label:"业务代码",prop:"businessType",width:"120"}}),r("el-table-column",{attrs:{label:"当前节点",prop:"workflowNode.label",width:"120"}}),r("el-table-column",{attrs:{label:"流程状态",width:"120"},scopedSlots:t._u([{key:"default",fn:function(e){return[r("div",[t._v(" "+t._s(e.row.isActive?"进行中":"已结束")+" ")])]}}])}),r("el-table-column",{attrs:{label:"详细介绍",prop:"workflowProcess.description","min-width":"200"}}),r("el-table-column",{attrs:{label:"按钮组"},scopedSlots:t._u([{key:"default",fn:function(e){return[r("el-button",{on:{click:function(r){return t.view(e.row)}}},[t._v("查看")])]}}])})],1)],1)},n=[],a=(r("96cf"),r("1da1")),u=r("c466"),c=r("c1da"),l={data:function(){return{tableData:[]}},methods:{view:function(t){this.$router.push({name:"workflowUse",query:{workflowMoveID:t.ID}})}},created:function(){var t=this;return Object(a["a"])(regeneratorRuntime.mark((function e(){var r;return regeneratorRuntime.wrap((function(e){while(1)switch(e.prev=e.next){case 0:return e.next=2,Object(c["getMyStated"])();case 2:r=e.sent,0==r.code&&(t.tableData=r.data.wfms);case 4:case"end":return e.stop()}}),e)})))()},filters:{formatDate:function(t){if(null!=t&&""!=t){var e=new Date(t);return Object(u["formatTimeToStr"])(e,"yyyy-MM-dd hh:mm:ss")}return""}}},s=l,i=r("2877"),f=Object(i["a"])(s,o,n,!1,null,null,null);e["default"]=f.exports},"4d63":function(t,e,r){var o=r("83ab"),n=r("da84"),a=r("94ca"),u=r("7156"),c=r("9bf2").f,l=r("241c").f,s=r("44e7"),i=r("ad6d"),f=r("9f7f"),d=r("6eeb"),p=r("d039"),w=r("69f3").set,h=r("2626"),b=r("b622"),v=b("match"),g=n.RegExp,m=g.prototype,k=/a/g,y=/a/g,P=new g(k)!==k,_=f.UNSUPPORTED_Y,S=o&&a("RegExp",!P||_||p((function(){return y[v]=!1,g(k)!=k||g(y)==y||"/a/i"!=g(k,"i")})));if(S){var E=function(t,e){var r,o=this instanceof E,n=s(t),a=void 0===e;if(!o&&n&&t.constructor===E&&a)return t;P?n&&!a&&(t=t.source):t instanceof E&&(a&&(e=i.call(t)),t=t.source),_&&(r=!!e&&e.indexOf("y")>-1,r&&(e=e.replace(/y/g,"")));var c=u(P?new g(t,e):g(t,e),o?this:m,E);return _&&r&&w(c,{sticky:r}),c},O=function(t){t in E||c(E,t,{configurable:!0,get:function(){return g[t]},set:function(e){g[t]=e}})},x=l(g),M=0;while(x.length>M)O(x[M++]);m.constructor=E,E.prototype=m,d(n,"RegExp",E)}h("RegExp")},5319:function(t,e,r){"use strict";var o=r("d784"),n=r("825a"),a=r("7b0b"),u=r("50c4"),c=r("a691"),l=r("1d80"),s=r("8aa5"),i=r("14c3"),f=Math.max,d=Math.min,p=Math.floor,w=/\$([$&'`]|\d\d?|<[^>]*>)/g,h=/\$([$&'`]|\d\d?)/g,b=function(t){return void 0===t?t:String(t)};o("replace",2,(function(t,e,r,o){var v=o.REGEXP_REPLACE_SUBSTITUTES_UNDEFINED_CAPTURE,g=o.REPLACE_KEEPS_$0,m=v?"$":"$0";return[function(r,o){var n=l(this),a=void 0==r?void 0:r[t];return void 0!==a?a.call(r,n,o):e.call(String(n),r,o)},function(t,o){if(!v&&g||"string"===typeof o&&-1===o.indexOf(m)){var a=r(e,t,this,o);if(a.done)return a.value}var l=n(t),p=String(this),w="function"===typeof o;w||(o=String(o));var h=l.global;if(h){var y=l.unicode;l.lastIndex=0}var P=[];while(1){var _=i(l,p);if(null===_)break;if(P.push(_),!h)break;var S=String(_[0]);""===S&&(l.lastIndex=s(p,u(l.lastIndex),y))}for(var E="",O=0,x=0;x<P.length;x++){_=P[x];for(var M=String(_[0]),D=f(d(c(_.index),p.length),0),W=[],j=1;j<_.length;j++)W.push(b(_[j]));var R=_.groups;if(w){var T=[M].concat(W,D,p);void 0!==R&&T.push(R);var $=String(o.apply(void 0,T))}else $=k(M,p,D,W,R,o);D>=O&&(E+=p.slice(O,D)+$,O=D+M.length)}return E+p.slice(O)}];function k(t,r,o,n,u,c){var l=o+t.length,s=n.length,i=h;return void 0!==u&&(u=a(u),i=w),e.call(c,i,(function(e,a){var c;switch(a.charAt(0)){case"$":return"$";case"&":return t;case"`":return r.slice(0,o);case"'":return r.slice(l);case"<":c=u[a.slice(1,-1)];break;default:var i=+a;if(0===i)return e;if(i>s){var f=p(i/10);return 0===f?e:f<=s?void 0===n[f-1]?a.charAt(1):n[f-1]+a.charAt(1):e}c=n[i-1]}return void 0===c?"":c}))}}))},7156:function(t,e,r){var o=r("861d"),n=r("d2bb");t.exports=function(t,e,r){var a,u;return n&&"function"==typeof(a=e.constructor)&&a!==r&&o(u=a.prototype)&&u!==r.prototype&&n(t,u),t}},c1da:function(t,e,r){"use strict";r.r(e),r.d(e,"createWorkflowProcess",(function(){return n})),r.d(e,"deleteWorkflowProcess",(function(){return a})),r.d(e,"deleteWorkflowProcessByIds",(function(){return u})),r.d(e,"updateWorkflowProcess",(function(){return c})),r.d(e,"findWorkflowProcess",(function(){return l})),r.d(e,"getWorkflowProcessList",(function(){return s})),r.d(e,"findWorkflowStep",(function(){return i})),r.d(e,"startWorkflow",(function(){return f})),r.d(e,"completeWorkflowMove",(function(){return d})),r.d(e,"getMyStated",(function(){return p})),r.d(e,"getMyNeed",(function(){return w})),r.d(e,"getWorkflowMoveByID",(function(){return h}));var o=r("b775"),n=function(t){return Object(o["default"])({url:"/workflowProcess/createWorkflowProcess",method:"post",data:t})},a=function(t){return Object(o["default"])({url:"/workflowProcess/deleteWorkflowProcess",method:"delete",data:t})},u=function(t){return Object(o["default"])({url:"/workflowProcess/deleteWorkflowProcessByIds",method:"delete",data:t})},c=function(t){return Object(o["default"])({url:"/workflowProcess/updateWorkflowProcess",method:"put",data:t})},l=function(t){return Object(o["default"])({url:"/workflowProcess/findWorkflowProcess",method:"get",params:t})},s=function(t){return Object(o["default"])({url:"/workflowProcess/getWorkflowProcessList",method:"get",params:t})},i=function(t){return Object(o["default"])({url:"/workflowProcess/findWorkflowStep",method:"get",params:t})},f=function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{businessType:t.wf.businessType};return Object(o["default"])({url:"/workflowProcess/startWorkflow",method:"post",data:t,params:e})},d=function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{businessType:t.wf.businessType};return Object(o["default"])({url:"/workflowProcess/completeWorkflowMove",method:"post",data:t,params:e})},p=function(){return Object(o["default"])({url:"/workflowProcess/getMyStated",method:"get"})},w=function(){return Object(o["default"])({url:"/workflowProcess/getMyNeed",method:"get"})},h=function(t){return Object(o["default"])({url:"/workflowProcess/getWorkflowMoveByID",method:"get",params:t})}},c466:function(t,e,r){"use strict";r.r(e),r.d(e,"formatTimeToStr",(function(){return o}));r("4d63"),r("ac1f"),r("25f0"),r("5319");function o(t,e){var r=new Date(t).Format("yyyy-MM-dd hh:mm:ss");return e&&(r=new Date(t).Format(e)),r.toLocaleString()}Date.prototype.Format=function(t){var e={"M+":this.getMonth()+1,"d+":this.getDate(),"h+":this.getHours(),"m+":this.getMinutes(),"s+":this.getSeconds(),"q+":Math.floor((this.getMonth()+3)/3),S:this.getMilliseconds()};for(var r in/(y+)/.test(t)&&(t=t.replace(RegExp.$1,(this.getFullYear()+"").substr(4-RegExp.$1.length))),e)new RegExp("("+r+")").test(t)&&(t=t.replace(RegExp.$1,1==RegExp.$1.length?e[r]:("00"+e[r]).substr((""+e[r]).length)));return t}},d2bb:function(t,e,r){var o=r("825a"),n=r("3bbe");t.exports=Object.setPrototypeOf||("__proto__"in{}?function(){var t,e=!1,r={};try{t=Object.getOwnPropertyDescriptor(Object.prototype,"__proto__").set,t.call(r,[]),e=r instanceof Array}catch(a){}return function(r,a){return o(r),n(a),e?t.call(r,a):r.__proto__=a,r}}():void 0)}}]);
|
import csv
import json
import logging
import os
from typing import List
from google.cloud import bigquery
from google.oauth2 import service_account
import google.api_core.exceptions
from gatherers.gathererabc import Gatherer
from utils import utils
# Options:
#
# --timeout: Override the 10 minute job timeout (specify in seconds).
# --cache: Use locally cached export data instead of hitting BigQuery.
# Gathers hostnames from Censys.io via the Google BigQuery API.
#
# Before using this, you need to:
#
# * create a Project in Google Cloud, and an associated service account
# with access to create new jobs/queries and get their results.
# * give Censys.io this Google Cloud service account to grant access to.
#
# For details on concepts, and how to test access in the web console:
#
# * https://support.censys.io/google-bigquery/bigquery-introduction
# * https://support.censys.io/google-bigquery/adding-censys-datasets-to-bigquery
#
# Note that the web console access is based on access given to a Google account,
# but BigQuery API access via this script depends on access given to
# Google Cloud *service account* credentials.
# Defaults to 10 minute timeout.
default_timeout = 60 * 60 * 10
class Gatherer(Gatherer):
def gather(self):
# Returns a parsed, processed Google service credentials object.
credentials = load_credentials()
if credentials is None:
logging.warn("No BigQuery credentials provided.")
logging.warn("Set BIGQUERY_CREDENTIALS or BIGQUERY_CREDENTIALS_PATH environment variables.")
exit(1)
# When using this form of instantiation, the client won't pull
# the project_id out of the creds, has to be set explicitly.
client = bigquery.Client(
project=credentials.project_id,
credentials=credentials
)
# Allow override of default timeout (in seconds).
timeout = int(self.options.get("timeout", default_timeout))
# Construct the query.
query = query_for(self.suffixes)
logging.debug("Censys query:\n%s\n" % query)
# Plan to store in cache/censys/export.csv.
download_path = utils.cache_path(
"export", "censys", ext="csv", cache_dir=self.cache_dir)
# Reuse of cached data can be turned on with --cache.
cache = self.options.get("cache", False)
if (cache is True) and os.path.exists(download_path):
logging.warn("Using cached download data.")
# But by default, fetch new data from the BigQuery API,
# and write it to the expected download location.
else:
# Ensure cache destination exists.
utils.mkdir_p(os.path.dirname(download_path))
logging.warn("Kicking off SQL query job.")
rows = None
# Actually execute the query.
try:
# Executes query and loads all results into memory.
query_job = client.query(query)
iterator = query_job.result(timeout=timeout)
rows = list(iterator)
except google.api_core.exceptions.Forbidden:
logging.warn("Access denied to Censys' BigQuery tables.")
except:
logging.warn(utils.format_last_exception())
logging.warn("Error talking to BigQuery, aborting.")
# At this point, the job is complete and we need to download
# the resulting CSV URL in results_url.
logging.warn("Caching results of SQL query.")
download_file = open(download_path, 'w', newline='')
download_writer = csv.writer(download_file)
download_writer.writerow(["Domain"]) # will be skipped on read
# Parse the rows and write them out as they were returned (dupes
# and all), to be de-duped by the central gathering script.
for row in rows:
domains = row['common_name'] + row['dns_names']
for domain in domains:
download_writer.writerow([domain])
# End CSV writing.
download_file.close()
# Whether we downloaded it fresh or not, read from the cached data.
for domain in utils.load_domains(download_path):
if domain:
yield domain
# Constructs the query to run in BigQuery, against Censys'
# certificate datasets, for one or more suffixes.
#
# Example query:
#
# SELECT
# parsed.subject.common_name,
# parsed.extensions.subject_alt_name.dns_names
# FROM
# `censys-io.certificates_public.certificates`,
# UNNEST(parsed.subject.common_name) AS common_names,
# UNNEST(parsed.extensions.subject_alt_name.dns_names) AS sans
# WHERE
# (common_names LIKE "%.gov"
# OR sans LIKE "%.gov")
# OR (common_names LIKE "%.fed.us"
# OR sans LIKE "%.fed.us");
def query_for(suffixes: List[str]) -> str:
select = "\n".join([
" parsed.subject.common_name,",
" parsed.extensions.subject_alt_name.dns_names",
])
from_clause = "\n".join([
" `censys-io.certificates_public.certificates`,",
" UNNEST(parsed.subject.common_name) AS common_names,",
" UNNEST(parsed.extensions.subject_alt_name.dns_names) AS sans",
])
# Returns query fragment for a specific suffix.
def suffix_query(suffix):
return "\n".join([
"(common_names LIKE \"%%%s\"" % suffix,
" OR sans LIKE \"%%%s\")" % suffix,
])
# Join the individual suffix clauses into one WHERE clause.
where = str.join("\n OR ", [suffix_query(suffix) for suffix in suffixes])
query = "\n".join([
"SELECT",
select,
"FROM",
from_clause,
"WHERE",
" %s" % where
])
return query
def get_credentials_from_env_var_or_file(env_var: str="",
env_file_var: str="") -> str:
creds = os.environ.get(env_var, None)
if creds is None:
path = os.environ.get(env_file_var, None)
if path is not None:
with open(path) as f:
creds = f.read()
return creds
# Load BigQuery credentials from either a JSON string, or
# a JSON file. Passed in via environment variables either way.
def load_credentials():
creds = get_credentials_from_env_var_or_file(
env_var="BIGQUERY_CREDENTIALS",
env_file_var="BIGQUERY_CREDENTIALS_PATH")
if creds is None:
return None
parsed = json.loads(creds)
return service_account.Credentials.from_service_account_info(parsed)
|
/*! jQuery UI - v1.12.1 - 2018-03-22
* http://jqueryui.com
* Includes: effect.js, effects/effect-blind.js, effects/effect-bounce.js, effects/effect-clip.js, effects/effect-drop.js, effects/effect-explode.js, effects/effect-fade.js, effects/effect-fold.js, effects/effect-highlight.js, effects/effect-puff.js, effects/effect-pulsate.js, effects/effect-scale.js, effects/effect-shake.js, effects/effect-size.js, effects/effect-slide.js, effects/effect-transfer.js
* Copyright jQuery Foundation and other contributors; Licensed MIT */
(function( factory ) {
if ( typeof define === "function" && define.amd ) {
// AMD. Register as an anonymous module.
define([ "jquery" ], factory );
} else {
// Browser globals
factory( jQuery );
}
}(function( $ ) {
$.ui = $.ui || {};
var version = $.ui.version = "1.12.1";
/*!
* jQuery UI Effects 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Effects Core
//>>group: Effects
// jscs:disable maximumLineLength
//>>description: Extends the internal jQuery effects. Includes morphing and easing. Required by all other effects.
// jscs:enable maximumLineLength
//>>docs: http://api.jqueryui.com/category/effects-core/
//>>demos: http://jqueryui.com/effect/
var dataSpace = "ui-effects-",
dataSpaceStyle = "ui-effects-style",
dataSpaceAnimated = "ui-effects-animated",
// Create a local jQuery because jQuery Color relies on it and the
// global may not exist with AMD and a custom build (#10199)
jQuery = $;
$.effects = {
effect: {}
};
/*!
* jQuery Color Animations v2.1.2
* https://github.com/jquery/jquery-color
*
* Copyright 2014 jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*
* Date: Wed Jan 16 08:47:09 2013 -0600
*/
( function( jQuery, undefined ) {
var stepHooks = "backgroundColor borderBottomColor borderLeftColor borderRightColor " +
"borderTopColor color columnRuleColor outlineColor textDecorationColor textEmphasisColor",
// Plusequals test for += 100 -= 100
rplusequals = /^([\-+])=\s*(\d+\.?\d*)/,
// A set of RE's that can match strings and generate color tuples.
stringParsers = [ {
re: /rgba?\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,
parse: function( execResult ) {
return [
execResult[ 1 ],
execResult[ 2 ],
execResult[ 3 ],
execResult[ 4 ]
];
}
}, {
re: /rgba?\(\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,
parse: function( execResult ) {
return [
execResult[ 1 ] * 2.55,
execResult[ 2 ] * 2.55,
execResult[ 3 ] * 2.55,
execResult[ 4 ]
];
}
}, {
// This regex ignores A-F because it's compared against an already lowercased string
re: /#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})/,
parse: function( execResult ) {
return [
parseInt( execResult[ 1 ], 16 ),
parseInt( execResult[ 2 ], 16 ),
parseInt( execResult[ 3 ], 16 )
];
}
}, {
// This regex ignores A-F because it's compared against an already lowercased string
re: /#([a-f0-9])([a-f0-9])([a-f0-9])/,
parse: function( execResult ) {
return [
parseInt( execResult[ 1 ] + execResult[ 1 ], 16 ),
parseInt( execResult[ 2 ] + execResult[ 2 ], 16 ),
parseInt( execResult[ 3 ] + execResult[ 3 ], 16 )
];
}
}, {
re: /hsla?\(\s*(\d+(?:\.\d+)?)\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,
space: "hsla",
parse: function( execResult ) {
return [
execResult[ 1 ],
execResult[ 2 ] / 100,
execResult[ 3 ] / 100,
execResult[ 4 ]
];
}
} ],
// JQuery.Color( )
color = jQuery.Color = function( color, green, blue, alpha ) {
return new jQuery.Color.fn.parse( color, green, blue, alpha );
},
spaces = {
rgba: {
props: {
red: {
idx: 0,
type: "byte"
},
green: {
idx: 1,
type: "byte"
},
blue: {
idx: 2,
type: "byte"
}
}
},
hsla: {
props: {
hue: {
idx: 0,
type: "degrees"
},
saturation: {
idx: 1,
type: "percent"
},
lightness: {
idx: 2,
type: "percent"
}
}
}
},
propTypes = {
"byte": {
floor: true,
max: 255
},
"percent": {
max: 1
},
"degrees": {
mod: 360,
floor: true
}
},
support = color.support = {},
// Element for support tests
supportElem = jQuery( "<p>" )[ 0 ],
// Colors = jQuery.Color.names
colors,
// Local aliases of functions called often
each = jQuery.each;
// Determine rgba support immediately
supportElem.style.cssText = "background-color:rgba(1,1,1,.5)";
support.rgba = supportElem.style.backgroundColor.indexOf( "rgba" ) > -1;
// Define cache name and alpha properties
// for rgba and hsla spaces
each( spaces, function( spaceName, space ) {
space.cache = "_" + spaceName;
space.props.alpha = {
idx: 3,
type: "percent",
def: 1
};
} );
function clamp( value, prop, allowEmpty ) {
var type = propTypes[ prop.type ] || {};
if ( value == null ) {
return ( allowEmpty || !prop.def ) ? null : prop.def;
}
// ~~ is an short way of doing floor for positive numbers
value = type.floor ? ~~value : parseFloat( value );
// IE will pass in empty strings as value for alpha,
// which will hit this case
if ( isNaN( value ) ) {
return prop.def;
}
if ( type.mod ) {
// We add mod before modding to make sure that negatives values
// get converted properly: -10 -> 350
return ( value + type.mod ) % type.mod;
}
// For now all property types without mod have min and max
return 0 > value ? 0 : type.max < value ? type.max : value;
}
function stringParse( string ) {
var inst = color(),
rgba = inst._rgba = [];
string = string.toLowerCase();
each( stringParsers, function( i, parser ) {
var parsed,
match = parser.re.exec( string ),
values = match && parser.parse( match ),
spaceName = parser.space || "rgba";
if ( values ) {
parsed = inst[ spaceName ]( values );
// If this was an rgba parse the assignment might happen twice
// oh well....
inst[ spaces[ spaceName ].cache ] = parsed[ spaces[ spaceName ].cache ];
rgba = inst._rgba = parsed._rgba;
// Exit each( stringParsers ) here because we matched
return false;
}
} );
// Found a stringParser that handled it
if ( rgba.length ) {
// If this came from a parsed string, force "transparent" when alpha is 0
// chrome, (and maybe others) return "transparent" as rgba(0,0,0,0)
if ( rgba.join() === "0,0,0,0" ) {
jQuery.extend( rgba, colors.transparent );
}
return inst;
}
// Named colors
return colors[ string ];
}
color.fn = jQuery.extend( color.prototype, {
parse: function( red, green, blue, alpha ) {
if ( red === undefined ) {
this._rgba = [ null, null, null, null ];
return this;
}
if ( red.jquery || red.nodeType ) {
red = jQuery( red ).css( green );
green = undefined;
}
var inst = this,
type = jQuery.type( red ),
rgba = this._rgba = [];
// More than 1 argument specified - assume ( red, green, blue, alpha )
if ( green !== undefined ) {
red = [ red, green, blue, alpha ];
type = "array";
}
if ( type === "string" ) {
return this.parse( stringParse( red ) || colors._default );
}
if ( type === "array" ) {
each( spaces.rgba.props, function( key, prop ) {
rgba[ prop.idx ] = clamp( red[ prop.idx ], prop );
} );
return this;
}
if ( type === "object" ) {
if ( red instanceof color ) {
each( spaces, function( spaceName, space ) {
if ( red[ space.cache ] ) {
inst[ space.cache ] = red[ space.cache ].slice();
}
} );
} else {
each( spaces, function( spaceName, space ) {
var cache = space.cache;
each( space.props, function( key, prop ) {
// If the cache doesn't exist, and we know how to convert
if ( !inst[ cache ] && space.to ) {
// If the value was null, we don't need to copy it
// if the key was alpha, we don't need to copy it either
if ( key === "alpha" || red[ key ] == null ) {
return;
}
inst[ cache ] = space.to( inst._rgba );
}
// This is the only case where we allow nulls for ALL properties.
// call clamp with alwaysAllowEmpty
inst[ cache ][ prop.idx ] = clamp( red[ key ], prop, true );
} );
// Everything defined but alpha?
if ( inst[ cache ] &&
jQuery.inArray( null, inst[ cache ].slice( 0, 3 ) ) < 0 ) {
// Use the default of 1
inst[ cache ][ 3 ] = 1;
if ( space.from ) {
inst._rgba = space.from( inst[ cache ] );
}
}
} );
}
return this;
}
},
is: function( compare ) {
var is = color( compare ),
same = true,
inst = this;
each( spaces, function( _, space ) {
var localCache,
isCache = is[ space.cache ];
if ( isCache ) {
localCache = inst[ space.cache ] || space.to && space.to( inst._rgba ) || [];
each( space.props, function( _, prop ) {
if ( isCache[ prop.idx ] != null ) {
same = ( isCache[ prop.idx ] === localCache[ prop.idx ] );
return same;
}
} );
}
return same;
} );
return same;
},
_space: function() {
var used = [],
inst = this;
each( spaces, function( spaceName, space ) {
if ( inst[ space.cache ] ) {
used.push( spaceName );
}
} );
return used.pop();
},
transition: function( other, distance ) {
var end = color( other ),
spaceName = end._space(),
space = spaces[ spaceName ],
startColor = this.alpha() === 0 ? color( "transparent" ) : this,
start = startColor[ space.cache ] || space.to( startColor._rgba ),
result = start.slice();
end = end[ space.cache ];
each( space.props, function( key, prop ) {
var index = prop.idx,
startValue = start[ index ],
endValue = end[ index ],
type = propTypes[ prop.type ] || {};
// If null, don't override start value
if ( endValue === null ) {
return;
}
// If null - use end
if ( startValue === null ) {
result[ index ] = endValue;
} else {
if ( type.mod ) {
if ( endValue - startValue > type.mod / 2 ) {
startValue += type.mod;
} else if ( startValue - endValue > type.mod / 2 ) {
startValue -= type.mod;
}
}
result[ index ] = clamp( ( endValue - startValue ) * distance + startValue, prop );
}
} );
return this[ spaceName ]( result );
},
blend: function( opaque ) {
// If we are already opaque - return ourself
if ( this._rgba[ 3 ] === 1 ) {
return this;
}
var rgb = this._rgba.slice(),
a = rgb.pop(),
blend = color( opaque )._rgba;
return color( jQuery.map( rgb, function( v, i ) {
return ( 1 - a ) * blend[ i ] + a * v;
} ) );
},
toRgbaString: function() {
var prefix = "rgba(",
rgba = jQuery.map( this._rgba, function( v, i ) {
return v == null ? ( i > 2 ? 1 : 0 ) : v;
} );
if ( rgba[ 3 ] === 1 ) {
rgba.pop();
prefix = "rgb(";
}
return prefix + rgba.join() + ")";
},
toHslaString: function() {
var prefix = "hsla(",
hsla = jQuery.map( this.hsla(), function( v, i ) {
if ( v == null ) {
v = i > 2 ? 1 : 0;
}
// Catch 1 and 2
if ( i && i < 3 ) {
v = Math.round( v * 100 ) + "%";
}
return v;
} );
if ( hsla[ 3 ] === 1 ) {
hsla.pop();
prefix = "hsl(";
}
return prefix + hsla.join() + ")";
},
toHexString: function( includeAlpha ) {
var rgba = this._rgba.slice(),
alpha = rgba.pop();
if ( includeAlpha ) {
rgba.push( ~~( alpha * 255 ) );
}
return "#" + jQuery.map( rgba, function( v ) {
// Default to 0 when nulls exist
v = ( v || 0 ).toString( 16 );
return v.length === 1 ? "0" + v : v;
} ).join( "" );
},
toString: function() {
return this._rgba[ 3 ] === 0 ? "transparent" : this.toRgbaString();
}
} );
color.fn.parse.prototype = color.fn;
// Hsla conversions adapted from:
// https://code.google.com/p/maashaack/source/browse/packages/graphics/trunk/src/graphics/colors/HUE2RGB.as?r=5021
function hue2rgb( p, q, h ) {
h = ( h + 1 ) % 1;
if ( h * 6 < 1 ) {
return p + ( q - p ) * h * 6;
}
if ( h * 2 < 1 ) {
return q;
}
if ( h * 3 < 2 ) {
return p + ( q - p ) * ( ( 2 / 3 ) - h ) * 6;
}
return p;
}
spaces.hsla.to = function( rgba ) {
if ( rgba[ 0 ] == null || rgba[ 1 ] == null || rgba[ 2 ] == null ) {
return [ null, null, null, rgba[ 3 ] ];
}
var r = rgba[ 0 ] / 255,
g = rgba[ 1 ] / 255,
b = rgba[ 2 ] / 255,
a = rgba[ 3 ],
max = Math.max( r, g, b ),
min = Math.min( r, g, b ),
diff = max - min,
add = max + min,
l = add * 0.5,
h, s;
if ( min === max ) {
h = 0;
} else if ( r === max ) {
h = ( 60 * ( g - b ) / diff ) + 360;
} else if ( g === max ) {
h = ( 60 * ( b - r ) / diff ) + 120;
} else {
h = ( 60 * ( r - g ) / diff ) + 240;
}
// Chroma (diff) == 0 means greyscale which, by definition, saturation = 0%
// otherwise, saturation is based on the ratio of chroma (diff) to lightness (add)
if ( diff === 0 ) {
s = 0;
} else if ( l <= 0.5 ) {
s = diff / add;
} else {
s = diff / ( 2 - add );
}
return [ Math.round( h ) % 360, s, l, a == null ? 1 : a ];
};
spaces.hsla.from = function( hsla ) {
if ( hsla[ 0 ] == null || hsla[ 1 ] == null || hsla[ 2 ] == null ) {
return [ null, null, null, hsla[ 3 ] ];
}
var h = hsla[ 0 ] / 360,
s = hsla[ 1 ],
l = hsla[ 2 ],
a = hsla[ 3 ],
q = l <= 0.5 ? l * ( 1 + s ) : l + s - l * s,
p = 2 * l - q;
return [
Math.round( hue2rgb( p, q, h + ( 1 / 3 ) ) * 255 ),
Math.round( hue2rgb( p, q, h ) * 255 ),
Math.round( hue2rgb( p, q, h - ( 1 / 3 ) ) * 255 ),
a
];
};
each( spaces, function( spaceName, space ) {
var props = space.props,
cache = space.cache,
to = space.to,
from = space.from;
// Makes rgba() and hsla()
color.fn[ spaceName ] = function( value ) {
// Generate a cache for this space if it doesn't exist
if ( to && !this[ cache ] ) {
this[ cache ] = to( this._rgba );
}
if ( value === undefined ) {
return this[ cache ].slice();
}
var ret,
type = jQuery.type( value ),
arr = ( type === "array" || type === "object" ) ? value : arguments,
local = this[ cache ].slice();
each( props, function( key, prop ) {
var val = arr[ type === "object" ? key : prop.idx ];
if ( val == null ) {
val = local[ prop.idx ];
}
local[ prop.idx ] = clamp( val, prop );
} );
if ( from ) {
ret = color( from( local ) );
ret[ cache ] = local;
return ret;
} else {
return color( local );
}
};
// Makes red() green() blue() alpha() hue() saturation() lightness()
each( props, function( key, prop ) {
// Alpha is included in more than one space
if ( color.fn[ key ] ) {
return;
}
color.fn[ key ] = function( value ) {
var vtype = jQuery.type( value ),
fn = ( key === "alpha" ? ( this._hsla ? "hsla" : "rgba" ) : spaceName ),
local = this[ fn ](),
cur = local[ prop.idx ],
match;
if ( vtype === "undefined" ) {
return cur;
}
if ( vtype === "function" ) {
value = value.call( this, cur );
vtype = jQuery.type( value );
}
if ( value == null && prop.empty ) {
return this;
}
if ( vtype === "string" ) {
match = rplusequals.exec( value );
if ( match ) {
value = cur + parseFloat( match[ 2 ] ) * ( match[ 1 ] === "+" ? 1 : -1 );
}
}
local[ prop.idx ] = value;
return this[ fn ]( local );
};
} );
} );
// Add cssHook and .fx.step function for each named hook.
// accept a space separated string of properties
color.hook = function( hook ) {
var hooks = hook.split( " " );
each( hooks, function( i, hook ) {
jQuery.cssHooks[ hook ] = {
set: function( elem, value ) {
var parsed, curElem,
backgroundColor = "";
if ( value !== "transparent" && ( jQuery.type( value ) !== "string" ||
( parsed = stringParse( value ) ) ) ) {
value = color( parsed || value );
if ( !support.rgba && value._rgba[ 3 ] !== 1 ) {
curElem = hook === "backgroundColor" ? elem.parentNode : elem;
while (
( backgroundColor === "" || backgroundColor === "transparent" ) &&
curElem && curElem.style
) {
try {
backgroundColor = jQuery.css( curElem, "backgroundColor" );
curElem = curElem.parentNode;
} catch ( e ) {
}
}
value = value.blend( backgroundColor && backgroundColor !== "transparent" ?
backgroundColor :
"_default" );
}
value = value.toRgbaString();
}
try {
elem.style[ hook ] = value;
} catch ( e ) {
// Wrapped to prevent IE from throwing errors on "invalid" values like
// 'auto' or 'inherit'
}
}
};
jQuery.fx.step[ hook ] = function( fx ) {
if ( !fx.colorInit ) {
fx.start = color( fx.elem, hook );
fx.end = color( fx.end );
fx.colorInit = true;
}
jQuery.cssHooks[ hook ].set( fx.elem, fx.start.transition( fx.end, fx.pos ) );
};
} );
};
color.hook( stepHooks );
jQuery.cssHooks.borderColor = {
expand: function( value ) {
var expanded = {};
each( [ "Top", "Right", "Bottom", "Left" ], function( i, part ) {
expanded[ "border" + part + "Color" ] = value;
} );
return expanded;
}
};
// Basic color names only.
// Usage of any of the other color names requires adding yourself or including
// jquery.color.svg-names.js.
colors = jQuery.Color.names = {
// 4.1. Basic color keywords
aqua: "#00ffff",
black: "#000000",
blue: "#0000ff",
fuchsia: "#ff00ff",
gray: "#808080",
green: "#008000",
lime: "#00ff00",
maroon: "#800000",
navy: "#000080",
olive: "#808000",
purple: "#800080",
red: "#ff0000",
silver: "#c0c0c0",
teal: "#008080",
white: "#ffffff",
yellow: "#ffff00",
// 4.2.3. "transparent" color keyword
transparent: [ null, null, null, 0 ],
_default: "#ffffff"
};
} )( jQuery );
/******************************************************************************/
/****************************** CLASS ANIMATIONS ******************************/
/******************************************************************************/
( function() {
var classAnimationActions = [ "add", "remove", "toggle" ],
shorthandStyles = {
border: 1,
borderBottom: 1,
borderColor: 1,
borderLeft: 1,
borderRight: 1,
borderTop: 1,
borderWidth: 1,
margin: 1,
padding: 1
};
$.each(
[ "borderLeftStyle", "borderRightStyle", "borderBottomStyle", "borderTopStyle" ],
function( _, prop ) {
$.fx.step[ prop ] = function( fx ) {
if ( fx.end !== "none" && !fx.setAttr || fx.pos === 1 && !fx.setAttr ) {
jQuery.style( fx.elem, prop, fx.end );
fx.setAttr = true;
}
};
}
);
function getElementStyles( elem ) {
var key, len,
style = elem.ownerDocument.defaultView ?
elem.ownerDocument.defaultView.getComputedStyle( elem, null ) :
elem.currentStyle,
styles = {};
if ( style && style.length && style[ 0 ] && style[ style[ 0 ] ] ) {
len = style.length;
while ( len-- ) {
key = style[ len ];
if ( typeof style[ key ] === "string" ) {
styles[ $.camelCase( key ) ] = style[ key ];
}
}
// Support: Opera, IE <9
} else {
for ( key in style ) {
if ( typeof style[ key ] === "string" ) {
styles[ key ] = style[ key ];
}
}
}
return styles;
}
function styleDifference( oldStyle, newStyle ) {
var diff = {},
name, value;
for ( name in newStyle ) {
value = newStyle[ name ];
if ( oldStyle[ name ] !== value ) {
if ( !shorthandStyles[ name ] ) {
if ( $.fx.step[ name ] || !isNaN( parseFloat( value ) ) ) {
diff[ name ] = value;
}
}
}
}
return diff;
}
// Support: jQuery <1.8
if ( !$.fn.addBack ) {
$.fn.addBack = function( selector ) {
return this.add( selector == null ?
this.prevObject : this.prevObject.filter( selector )
);
};
}
$.effects.animateClass = function( value, duration, easing, callback ) {
var o = $.speed( duration, easing, callback );
return this.queue( function() {
var animated = $( this ),
baseClass = animated.attr( "class" ) || "",
applyClassChange,
allAnimations = o.children ? animated.find( "*" ).addBack() : animated;
// Map the animated objects to store the original styles.
allAnimations = allAnimations.map( function() {
var el = $( this );
return {
el: el,
start: getElementStyles( this )
};
} );
// Apply class change
applyClassChange = function() {
$.each( classAnimationActions, function( i, action ) {
if ( value[ action ] ) {
animated[ action + "Class" ]( value[ action ] );
}
} );
};
applyClassChange();
// Map all animated objects again - calculate new styles and diff
allAnimations = allAnimations.map( function() {
this.end = getElementStyles( this.el[ 0 ] );
this.diff = styleDifference( this.start, this.end );
return this;
} );
// Apply original class
animated.attr( "class", baseClass );
// Map all animated objects again - this time collecting a promise
allAnimations = allAnimations.map( function() {
var styleInfo = this,
dfd = $.Deferred(),
opts = $.extend( {}, o, {
queue: false,
complete: function() {
dfd.resolve( styleInfo );
}
} );
this.el.animate( this.diff, opts );
return dfd.promise();
} );
// Once all animations have completed:
$.when.apply( $, allAnimations.get() ).done( function() {
// Set the final class
applyClassChange();
// For each animated element,
// clear all css properties that were animated
$.each( arguments, function() {
var el = this.el;
$.each( this.diff, function( key ) {
el.css( key, "" );
} );
} );
// This is guarnteed to be there if you use jQuery.speed()
// it also handles dequeuing the next anim...
o.complete.call( animated[ 0 ] );
} );
} );
};
$.fn.extend( {
addClass: ( function( orig ) {
return function( classNames, speed, easing, callback ) {
return speed ?
$.effects.animateClass.call( this,
{ add: classNames }, speed, easing, callback ) :
orig.apply( this, arguments );
};
} )( $.fn.addClass ),
removeClass: ( function( orig ) {
return function( classNames, speed, easing, callback ) {
return arguments.length > 1 ?
$.effects.animateClass.call( this,
{ remove: classNames }, speed, easing, callback ) :
orig.apply( this, arguments );
};
} )( $.fn.removeClass ),
toggleClass: ( function( orig ) {
return function( classNames, force, speed, easing, callback ) {
if ( typeof force === "boolean" || force === undefined ) {
if ( !speed ) {
// Without speed parameter
return orig.apply( this, arguments );
} else {
return $.effects.animateClass.call( this,
( force ? { add: classNames } : { remove: classNames } ),
speed, easing, callback );
}
} else {
// Without force parameter
return $.effects.animateClass.call( this,
{ toggle: classNames }, force, speed, easing );
}
};
} )( $.fn.toggleClass ),
switchClass: function( remove, add, speed, easing, callback ) {
return $.effects.animateClass.call( this, {
add: add,
remove: remove
}, speed, easing, callback );
}
} );
} )();
/******************************************************************************/
/*********************************** EFFECTS **********************************/
/******************************************************************************/
( function() {
if ( $.expr && $.expr.filters && $.expr.filters.animated ) {
$.expr.filters.animated = ( function( orig ) {
return function( elem ) {
return !!$( elem ).data( dataSpaceAnimated ) || orig( elem );
};
} )( $.expr.filters.animated );
}
if ( $.uiBackCompat !== false ) {
$.extend( $.effects, {
// Saves a set of properties in a data storage
save: function( element, set ) {
var i = 0, length = set.length;
for ( ; i < length; i++ ) {
if ( set[ i ] !== null ) {
element.data( dataSpace + set[ i ], element[ 0 ].style[ set[ i ] ] );
}
}
},
// Restores a set of previously saved properties from a data storage
restore: function( element, set ) {
var val, i = 0, length = set.length;
for ( ; i < length; i++ ) {
if ( set[ i ] !== null ) {
val = element.data( dataSpace + set[ i ] );
element.css( set[ i ], val );
}
}
},
setMode: function( el, mode ) {
if ( mode === "toggle" ) {
mode = el.is( ":hidden" ) ? "show" : "hide";
}
return mode;
},
// Wraps the element around a wrapper that copies position properties
createWrapper: function( element ) {
// If the element is already wrapped, return it
if ( element.parent().is( ".ui-effects-wrapper" ) ) {
return element.parent();
}
// Wrap the element
var props = {
width: element.outerWidth( true ),
height: element.outerHeight( true ),
"float": element.css( "float" )
},
wrapper = $( "<div></div>" )
.addClass( "ui-effects-wrapper" )
.css( {
fontSize: "100%",
background: "transparent",
border: "none",
margin: 0,
padding: 0
} ),
// Store the size in case width/height are defined in % - Fixes #5245
size = {
width: element.width(),
height: element.height()
},
active = document.activeElement;
// Support: Firefox
// Firefox incorrectly exposes anonymous content
// https://bugzilla.mozilla.org/show_bug.cgi?id=561664
try {
active.id;
} catch ( e ) {
active = document.body;
}
element.wrap( wrapper );
// Fixes #7595 - Elements lose focus when wrapped.
if ( element[ 0 ] === active || $.contains( element[ 0 ], active ) ) {
$( active ).trigger( "focus" );
}
// Hotfix for jQuery 1.4 since some change in wrap() seems to actually
// lose the reference to the wrapped element
wrapper = element.parent();
// Transfer positioning properties to the wrapper
if ( element.css( "position" ) === "static" ) {
wrapper.css( { position: "relative" } );
element.css( { position: "relative" } );
} else {
$.extend( props, {
position: element.css( "position" ),
zIndex: element.css( "z-index" )
} );
$.each( [ "top", "left", "bottom", "right" ], function( i, pos ) {
props[ pos ] = element.css( pos );
if ( isNaN( parseInt( props[ pos ], 10 ) ) ) {
props[ pos ] = "auto";
}
} );
element.css( {
position: "relative",
top: 0,
left: 0,
right: "auto",
bottom: "auto"
} );
}
element.css( size );
return wrapper.css( props ).show();
},
removeWrapper: function( element ) {
var active = document.activeElement;
if ( element.parent().is( ".ui-effects-wrapper" ) ) {
element.parent().replaceWith( element );
// Fixes #7595 - Elements lose focus when wrapped.
if ( element[ 0 ] === active || $.contains( element[ 0 ], active ) ) {
$( active ).trigger( "focus" );
}
}
return element;
}
} );
}
$.extend( $.effects, {
version: "1.12.1",
define: function( name, mode, effect ) {
if ( !effect ) {
effect = mode;
mode = "effect";
}
$.effects.effect[ name ] = effect;
$.effects.effect[ name ].mode = mode;
return effect;
},
scaledDimensions: function( element, percent, direction ) {
if ( percent === 0 ) {
return {
height: 0,
width: 0,
outerHeight: 0,
outerWidth: 0
};
}
var x = direction !== "horizontal" ? ( ( percent || 100 ) / 100 ) : 1,
y = direction !== "vertical" ? ( ( percent || 100 ) / 100 ) : 1;
return {
height: element.height() * y,
width: element.width() * x,
outerHeight: element.outerHeight() * y,
outerWidth: element.outerWidth() * x
};
},
clipToBox: function( animation ) {
return {
width: animation.clip.right - animation.clip.left,
height: animation.clip.bottom - animation.clip.top,
left: animation.clip.left,
top: animation.clip.top
};
},
// Injects recently queued functions to be first in line (after "inprogress")
unshift: function( element, queueLength, count ) {
var queue = element.queue();
if ( queueLength > 1 ) {
queue.splice.apply( queue,
[ 1, 0 ].concat( queue.splice( queueLength, count ) ) );
}
element.dequeue();
},
saveStyle: function( element ) {
element.data( dataSpaceStyle, element[ 0 ].style.cssText );
},
restoreStyle: function( element ) {
element[ 0 ].style.cssText = element.data( dataSpaceStyle ) || "";
element.removeData( dataSpaceStyle );
},
mode: function( element, mode ) {
var hidden = element.is( ":hidden" );
if ( mode === "toggle" ) {
mode = hidden ? "show" : "hide";
}
if ( hidden ? mode === "hide" : mode === "show" ) {
mode = "none";
}
return mode;
},
// Translates a [top,left] array into a baseline value
getBaseline: function( origin, original ) {
var y, x;
switch ( origin[ 0 ] ) {
case "top":
y = 0;
break;
case "middle":
y = 0.5;
break;
case "bottom":
y = 1;
break;
default:
y = origin[ 0 ] / original.height;
}
switch ( origin[ 1 ] ) {
case "left":
x = 0;
break;
case "center":
x = 0.5;
break;
case "right":
x = 1;
break;
default:
x = origin[ 1 ] / original.width;
}
return {
x: x,
y: y
};
},
// Creates a placeholder element so that the original element can be made absolute
createPlaceholder: function( element ) {
var placeholder,
cssPosition = element.css( "position" ),
position = element.position();
// Lock in margins first to account for form elements, which
// will change margin if you explicitly set height
// see: http://jsfiddle.net/JZSMt/3/ https://bugs.webkit.org/show_bug.cgi?id=107380
// Support: Safari
element.css( {
marginTop: element.css( "marginTop" ),
marginBottom: element.css( "marginBottom" ),
marginLeft: element.css( "marginLeft" ),
marginRight: element.css( "marginRight" )
} )
.outerWidth( element.outerWidth() )
.outerHeight( element.outerHeight() );
if ( /^(static|relative)/.test( cssPosition ) ) {
cssPosition = "absolute";
placeholder = $( "<" + element[ 0 ].nodeName + ">" ).insertAfter( element ).css( {
// Convert inline to inline block to account for inline elements
// that turn to inline block based on content (like img)
display: /^(inline|ruby)/.test( element.css( "display" ) ) ?
"inline-block" :
"block",
visibility: "hidden",
// Margins need to be set to account for margin collapse
marginTop: element.css( "marginTop" ),
marginBottom: element.css( "marginBottom" ),
marginLeft: element.css( "marginLeft" ),
marginRight: element.css( "marginRight" ),
"float": element.css( "float" )
} )
.outerWidth( element.outerWidth() )
.outerHeight( element.outerHeight() )
.addClass( "ui-effects-placeholder" );
element.data( dataSpace + "placeholder", placeholder );
}
element.css( {
position: cssPosition,
left: position.left,
top: position.top
} );
return placeholder;
},
removePlaceholder: function( element ) {
var dataKey = dataSpace + "placeholder",
placeholder = element.data( dataKey );
if ( placeholder ) {
placeholder.remove();
element.removeData( dataKey );
}
},
// Removes a placeholder if it exists and restores
// properties that were modified during placeholder creation
cleanUp: function( element ) {
$.effects.restoreStyle( element );
$.effects.removePlaceholder( element );
},
setTransition: function( element, list, factor, value ) {
value = value || {};
$.each( list, function( i, x ) {
var unit = element.cssUnit( x );
if ( unit[ 0 ] > 0 ) {
value[ x ] = unit[ 0 ] * factor + unit[ 1 ];
}
} );
return value;
}
} );
// Return an effect options object for the given parameters:
function _normalizeArguments( effect, options, speed, callback ) {
// Allow passing all options as the first parameter
if ( $.isPlainObject( effect ) ) {
options = effect;
effect = effect.effect;
}
// Convert to an object
effect = { effect: effect };
// Catch (effect, null, ...)
if ( options == null ) {
options = {};
}
// Catch (effect, callback)
if ( $.isFunction( options ) ) {
callback = options;
speed = null;
options = {};
}
// Catch (effect, speed, ?)
if ( typeof options === "number" || $.fx.speeds[ options ] ) {
callback = speed;
speed = options;
options = {};
}
// Catch (effect, options, callback)
if ( $.isFunction( speed ) ) {
callback = speed;
speed = null;
}
// Add options to effect
if ( options ) {
$.extend( effect, options );
}
speed = speed || options.duration;
effect.duration = $.fx.off ? 0 :
typeof speed === "number" ? speed :
speed in $.fx.speeds ? $.fx.speeds[ speed ] :
$.fx.speeds._default;
effect.complete = callback || options.complete;
return effect;
}
function standardAnimationOption( option ) {
// Valid standard speeds (nothing, number, named speed)
if ( !option || typeof option === "number" || $.fx.speeds[ option ] ) {
return true;
}
// Invalid strings - treat as "normal" speed
if ( typeof option === "string" && !$.effects.effect[ option ] ) {
return true;
}
// Complete callback
if ( $.isFunction( option ) ) {
return true;
}
// Options hash (but not naming an effect)
if ( typeof option === "object" && !option.effect ) {
return true;
}
// Didn't match any standard API
return false;
}
$.fn.extend( {
effect: function( /* effect, options, speed, callback */ ) {
var args = _normalizeArguments.apply( this, arguments ),
effectMethod = $.effects.effect[ args.effect ],
defaultMode = effectMethod.mode,
queue = args.queue,
queueName = queue || "fx",
complete = args.complete,
mode = args.mode,
modes = [],
prefilter = function( next ) {
var el = $( this ),
normalizedMode = $.effects.mode( el, mode ) || defaultMode;
// Sentinel for duck-punching the :animated psuedo-selector
el.data( dataSpaceAnimated, true );
// Save effect mode for later use,
// we can't just call $.effects.mode again later,
// as the .show() below destroys the initial state
modes.push( normalizedMode );
// See $.uiBackCompat inside of run() for removal of defaultMode in 1.13
if ( defaultMode && ( normalizedMode === "show" ||
( normalizedMode === defaultMode && normalizedMode === "hide" ) ) ) {
el.show();
}
if ( !defaultMode || normalizedMode !== "none" ) {
$.effects.saveStyle( el );
}
if ( $.isFunction( next ) ) {
next();
}
};
if ( $.fx.off || !effectMethod ) {
// Delegate to the original method (e.g., .show()) if possible
if ( mode ) {
return this[ mode ]( args.duration, complete );
} else {
return this.each( function() {
if ( complete ) {
complete.call( this );
}
} );
}
}
function run( next ) {
var elem = $( this );
function cleanup() {
elem.removeData( dataSpaceAnimated );
$.effects.cleanUp( elem );
if ( args.mode === "hide" ) {
elem.hide();
}
done();
}
function done() {
if ( $.isFunction( complete ) ) {
complete.call( elem[ 0 ] );
}
if ( $.isFunction( next ) ) {
next();
}
}
// Override mode option on a per element basis,
// as toggle can be either show or hide depending on element state
args.mode = modes.shift();
if ( $.uiBackCompat !== false && !defaultMode ) {
if ( elem.is( ":hidden" ) ? mode === "hide" : mode === "show" ) {
// Call the core method to track "olddisplay" properly
elem[ mode ]();
done();
} else {
effectMethod.call( elem[ 0 ], args, done );
}
} else {
if ( args.mode === "none" ) {
// Call the core method to track "olddisplay" properly
elem[ mode ]();
done();
} else {
effectMethod.call( elem[ 0 ], args, cleanup );
}
}
}
// Run prefilter on all elements first to ensure that
// any showing or hiding happens before placeholder creation,
// which ensures that any layout changes are correctly captured.
return queue === false ?
this.each( prefilter ).each( run ) :
this.queue( queueName, prefilter ).queue( queueName, run );
},
show: ( function( orig ) {
return function( option ) {
if ( standardAnimationOption( option ) ) {
return orig.apply( this, arguments );
} else {
var args = _normalizeArguments.apply( this, arguments );
args.mode = "show";
return this.effect.call( this, args );
}
};
} )( $.fn.show ),
hide: ( function( orig ) {
return function( option ) {
if ( standardAnimationOption( option ) ) {
return orig.apply( this, arguments );
} else {
var args = _normalizeArguments.apply( this, arguments );
args.mode = "hide";
return this.effect.call( this, args );
}
};
} )( $.fn.hide ),
toggle: ( function( orig ) {
return function( option ) {
if ( standardAnimationOption( option ) || typeof option === "boolean" ) {
return orig.apply( this, arguments );
} else {
var args = _normalizeArguments.apply( this, arguments );
args.mode = "toggle";
return this.effect.call( this, args );
}
};
} )( $.fn.toggle ),
cssUnit: function( key ) {
var style = this.css( key ),
val = [];
$.each( [ "em", "px", "%", "pt" ], function( i, unit ) {
if ( style.indexOf( unit ) > 0 ) {
val = [ parseFloat( style ), unit ];
}
} );
return val;
},
cssClip: function( clipObj ) {
if ( clipObj ) {
return this.css( "clip", "rect(" + clipObj.top + "px " + clipObj.right + "px " +
clipObj.bottom + "px " + clipObj.left + "px)" );
}
return parseClip( this.css( "clip" ), this );
},
transfer: function( options, done ) {
var element = $( this ),
target = $( options.to ),
targetFixed = target.css( "position" ) === "fixed",
body = $( "body" ),
fixTop = targetFixed ? body.scrollTop() : 0,
fixLeft = targetFixed ? body.scrollLeft() : 0,
endPosition = target.offset(),
animation = {
top: endPosition.top - fixTop,
left: endPosition.left - fixLeft,
height: target.innerHeight(),
width: target.innerWidth()
},
startPosition = element.offset(),
transfer = $( "<div class='ui-effects-transfer'></div>" )
.appendTo( "body" )
.addClass( options.className )
.css( {
top: startPosition.top - fixTop,
left: startPosition.left - fixLeft,
height: element.innerHeight(),
width: element.innerWidth(),
position: targetFixed ? "fixed" : "absolute"
} )
.animate( animation, options.duration, options.easing, function() {
transfer.remove();
if ( $.isFunction( done ) ) {
done();
}
} );
}
} );
function parseClip( str, element ) {
var outerWidth = element.outerWidth(),
outerHeight = element.outerHeight(),
clipRegex = /^rect\((-?\d*\.?\d*px|-?\d+%|auto),?\s*(-?\d*\.?\d*px|-?\d+%|auto),?\s*(-?\d*\.?\d*px|-?\d+%|auto),?\s*(-?\d*\.?\d*px|-?\d+%|auto)\)$/,
values = clipRegex.exec( str ) || [ "", 0, outerWidth, outerHeight, 0 ];
return {
top: parseFloat( values[ 1 ] ) || 0,
right: values[ 2 ] === "auto" ? outerWidth : parseFloat( values[ 2 ] ),
bottom: values[ 3 ] === "auto" ? outerHeight : parseFloat( values[ 3 ] ),
left: parseFloat( values[ 4 ] ) || 0
};
}
$.fx.step.clip = function( fx ) {
if ( !fx.clipInit ) {
fx.start = $( fx.elem ).cssClip();
if ( typeof fx.end === "string" ) {
fx.end = parseClip( fx.end, fx.elem );
}
fx.clipInit = true;
}
$( fx.elem ).cssClip( {
top: fx.pos * ( fx.end.top - fx.start.top ) + fx.start.top,
right: fx.pos * ( fx.end.right - fx.start.right ) + fx.start.right,
bottom: fx.pos * ( fx.end.bottom - fx.start.bottom ) + fx.start.bottom,
left: fx.pos * ( fx.end.left - fx.start.left ) + fx.start.left
} );
};
} )();
/******************************************************************************/
/*********************************** EASING ***********************************/
/******************************************************************************/
( function() {
// Based on easing equations from Robert Penner (http://www.robertpenner.com/easing)
var baseEasings = {};
$.each( [ "Quad", "Cubic", "Quart", "Quint", "Expo" ], function( i, name ) {
baseEasings[ name ] = function( p ) {
return Math.pow( p, i + 2 );
};
} );
$.extend( baseEasings, {
Sine: function( p ) {
return 1 - Math.cos( p * Math.PI / 2 );
},
Circ: function( p ) {
return 1 - Math.sqrt( 1 - p * p );
},
Elastic: function( p ) {
return p === 0 || p === 1 ? p :
-Math.pow( 2, 8 * ( p - 1 ) ) * Math.sin( ( ( p - 1 ) * 80 - 7.5 ) * Math.PI / 15 );
},
Back: function( p ) {
return p * p * ( 3 * p - 2 );
},
Bounce: function( p ) {
var pow2,
bounce = 4;
while ( p < ( ( pow2 = Math.pow( 2, --bounce ) ) - 1 ) / 11 ) {}
return 1 / Math.pow( 4, 3 - bounce ) - 7.5625 * Math.pow( ( pow2 * 3 - 2 ) / 22 - p, 2 );
}
} );
$.each( baseEasings, function( name, easeIn ) {
$.easing[ "easeIn" + name ] = easeIn;
$.easing[ "easeOut" + name ] = function( p ) {
return 1 - easeIn( 1 - p );
};
$.easing[ "easeInOut" + name ] = function( p ) {
return p < 0.5 ?
easeIn( p * 2 ) / 2 :
1 - easeIn( p * -2 + 2 ) / 2;
};
} );
} )();
var effect = $.effects;
/*!
* jQuery UI Effects Blind 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Blind Effect
//>>group: Effects
//>>description: Blinds the element.
//>>docs: http://api.jqueryui.com/blind-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectBlind = $.effects.define( "blind", "hide", function( options, done ) {
var map = {
up: [ "bottom", "top" ],
vertical: [ "bottom", "top" ],
down: [ "top", "bottom" ],
left: [ "right", "left" ],
horizontal: [ "right", "left" ],
right: [ "left", "right" ]
},
element = $( this ),
direction = options.direction || "up",
start = element.cssClip(),
animate = { clip: $.extend( {}, start ) },
placeholder = $.effects.createPlaceholder( element );
animate.clip[ map[ direction ][ 0 ] ] = animate.clip[ map[ direction ][ 1 ] ];
if ( options.mode === "show" ) {
element.cssClip( animate.clip );
if ( placeholder ) {
placeholder.css( $.effects.clipToBox( animate ) );
}
animate.clip = start;
}
if ( placeholder ) {
placeholder.animate( $.effects.clipToBox( animate ), options.duration, options.easing );
}
element.animate( animate, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: done
} );
} );
/*!
* jQuery UI Effects Bounce 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Bounce Effect
//>>group: Effects
//>>description: Bounces an element horizontally or vertically n times.
//>>docs: http://api.jqueryui.com/bounce-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectBounce = $.effects.define( "bounce", function( options, done ) {
var upAnim, downAnim, refValue,
element = $( this ),
// Defaults:
mode = options.mode,
hide = mode === "hide",
show = mode === "show",
direction = options.direction || "up",
distance = options.distance,
times = options.times || 5,
// Number of internal animations
anims = times * 2 + ( show || hide ? 1 : 0 ),
speed = options.duration / anims,
easing = options.easing,
// Utility:
ref = ( direction === "up" || direction === "down" ) ? "top" : "left",
motion = ( direction === "up" || direction === "left" ),
i = 0,
queuelen = element.queue().length;
$.effects.createPlaceholder( element );
refValue = element.css( ref );
// Default distance for the BIGGEST bounce is the outer Distance / 3
if ( !distance ) {
distance = element[ ref === "top" ? "outerHeight" : "outerWidth" ]() / 3;
}
if ( show ) {
downAnim = { opacity: 1 };
downAnim[ ref ] = refValue;
// If we are showing, force opacity 0 and set the initial position
// then do the "first" animation
element
.css( "opacity", 0 )
.css( ref, motion ? -distance * 2 : distance * 2 )
.animate( downAnim, speed, easing );
}
// Start at the smallest distance if we are hiding
if ( hide ) {
distance = distance / Math.pow( 2, times - 1 );
}
downAnim = {};
downAnim[ ref ] = refValue;
// Bounces up/down/left/right then back to 0 -- times * 2 animations happen here
for ( ; i < times; i++ ) {
upAnim = {};
upAnim[ ref ] = ( motion ? "-=" : "+=" ) + distance;
element
.animate( upAnim, speed, easing )
.animate( downAnim, speed, easing );
distance = hide ? distance * 2 : distance / 2;
}
// Last Bounce when Hiding
if ( hide ) {
upAnim = { opacity: 0 };
upAnim[ ref ] = ( motion ? "-=" : "+=" ) + distance;
element.animate( upAnim, speed, easing );
}
element.queue( done );
$.effects.unshift( element, queuelen, anims + 1 );
} );
/*!
* jQuery UI Effects Clip 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Clip Effect
//>>group: Effects
//>>description: Clips the element on and off like an old TV.
//>>docs: http://api.jqueryui.com/clip-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectClip = $.effects.define( "clip", "hide", function( options, done ) {
var start,
animate = {},
element = $( this ),
direction = options.direction || "vertical",
both = direction === "both",
horizontal = both || direction === "horizontal",
vertical = both || direction === "vertical";
start = element.cssClip();
animate.clip = {
top: vertical ? ( start.bottom - start.top ) / 2 : start.top,
right: horizontal ? ( start.right - start.left ) / 2 : start.right,
bottom: vertical ? ( start.bottom - start.top ) / 2 : start.bottom,
left: horizontal ? ( start.right - start.left ) / 2 : start.left
};
$.effects.createPlaceholder( element );
if ( options.mode === "show" ) {
element.cssClip( animate.clip );
animate.clip = start;
}
element.animate( animate, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: done
} );
} );
/*!
* jQuery UI Effects Drop 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Drop Effect
//>>group: Effects
//>>description: Moves an element in one direction and hides it at the same time.
//>>docs: http://api.jqueryui.com/drop-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectDrop = $.effects.define( "drop", "hide", function( options, done ) {
var distance,
element = $( this ),
mode = options.mode,
show = mode === "show",
direction = options.direction || "left",
ref = ( direction === "up" || direction === "down" ) ? "top" : "left",
motion = ( direction === "up" || direction === "left" ) ? "-=" : "+=",
oppositeMotion = ( motion === "+=" ) ? "-=" : "+=",
animation = {
opacity: 0
};
$.effects.createPlaceholder( element );
distance = options.distance ||
element[ ref === "top" ? "outerHeight" : "outerWidth" ]( true ) / 2;
animation[ ref ] = motion + distance;
if ( show ) {
element.css( animation );
animation[ ref ] = oppositeMotion + distance;
animation.opacity = 1;
}
// Animate
element.animate( animation, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: done
} );
} );
/*!
* jQuery UI Effects Explode 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Explode Effect
//>>group: Effects
// jscs:disable maximumLineLength
//>>description: Explodes an element in all directions into n pieces. Implodes an element to its original wholeness.
// jscs:enable maximumLineLength
//>>docs: http://api.jqueryui.com/explode-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectExplode = $.effects.define( "explode", "hide", function( options, done ) {
var i, j, left, top, mx, my,
rows = options.pieces ? Math.round( Math.sqrt( options.pieces ) ) : 3,
cells = rows,
element = $( this ),
mode = options.mode,
show = mode === "show",
// Show and then visibility:hidden the element before calculating offset
offset = element.show().css( "visibility", "hidden" ).offset(),
// Width and height of a piece
width = Math.ceil( element.outerWidth() / cells ),
height = Math.ceil( element.outerHeight() / rows ),
pieces = [];
// Children animate complete:
function childComplete() {
pieces.push( this );
if ( pieces.length === rows * cells ) {
animComplete();
}
}
// Clone the element for each row and cell.
for ( i = 0; i < rows; i++ ) { // ===>
top = offset.top + i * height;
my = i - ( rows - 1 ) / 2;
for ( j = 0; j < cells; j++ ) { // |||
left = offset.left + j * width;
mx = j - ( cells - 1 ) / 2;
// Create a clone of the now hidden main element that will be absolute positioned
// within a wrapper div off the -left and -top equal to size of our pieces
element
.clone()
.appendTo( "body" )
.wrap( "<div></div>" )
.css( {
position: "absolute",
visibility: "visible",
left: -j * width,
top: -i * height
} )
// Select the wrapper - make it overflow: hidden and absolute positioned based on
// where the original was located +left and +top equal to the size of pieces
.parent()
.addClass( "ui-effects-explode" )
.css( {
position: "absolute",
overflow: "hidden",
width: width,
height: height,
left: left + ( show ? mx * width : 0 ),
top: top + ( show ? my * height : 0 ),
opacity: show ? 0 : 1
} )
.animate( {
left: left + ( show ? 0 : mx * width ),
top: top + ( show ? 0 : my * height ),
opacity: show ? 1 : 0
}, options.duration || 500, options.easing, childComplete );
}
}
function animComplete() {
element.css( {
visibility: "visible"
} );
$( pieces ).remove();
done();
}
} );
/*!
* jQuery UI Effects Fade 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Fade Effect
//>>group: Effects
//>>description: Fades the element.
//>>docs: http://api.jqueryui.com/fade-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectFade = $.effects.define( "fade", "toggle", function( options, done ) {
var show = options.mode === "show";
$( this )
.css( "opacity", show ? 0 : 1 )
.animate( {
opacity: show ? 1 : 0
}, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: done
} );
} );
/*!
* jQuery UI Effects Fold 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Fold Effect
//>>group: Effects
//>>description: Folds an element first horizontally and then vertically.
//>>docs: http://api.jqueryui.com/fold-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectFold = $.effects.define( "fold", "hide", function( options, done ) {
// Create element
var element = $( this ),
mode = options.mode,
show = mode === "show",
hide = mode === "hide",
size = options.size || 15,
percent = /([0-9]+)%/.exec( size ),
horizFirst = !!options.horizFirst,
ref = horizFirst ? [ "right", "bottom" ] : [ "bottom", "right" ],
duration = options.duration / 2,
placeholder = $.effects.createPlaceholder( element ),
start = element.cssClip(),
animation1 = { clip: $.extend( {}, start ) },
animation2 = { clip: $.extend( {}, start ) },
distance = [ start[ ref[ 0 ] ], start[ ref[ 1 ] ] ],
queuelen = element.queue().length;
if ( percent ) {
size = parseInt( percent[ 1 ], 10 ) / 100 * distance[ hide ? 0 : 1 ];
}
animation1.clip[ ref[ 0 ] ] = size;
animation2.clip[ ref[ 0 ] ] = size;
animation2.clip[ ref[ 1 ] ] = 0;
if ( show ) {
element.cssClip( animation2.clip );
if ( placeholder ) {
placeholder.css( $.effects.clipToBox( animation2 ) );
}
animation2.clip = start;
}
// Animate
element
.queue( function( next ) {
if ( placeholder ) {
placeholder
.animate( $.effects.clipToBox( animation1 ), duration, options.easing )
.animate( $.effects.clipToBox( animation2 ), duration, options.easing );
}
next();
} )
.animate( animation1, duration, options.easing )
.animate( animation2, duration, options.easing )
.queue( done );
$.effects.unshift( element, queuelen, 4 );
} );
/*!
* jQuery UI Effects Highlight 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Highlight Effect
//>>group: Effects
//>>description: Highlights the background of an element in a defined color for a custom duration.
//>>docs: http://api.jqueryui.com/highlight-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectHighlight = $.effects.define( "highlight", "show", function( options, done ) {
var element = $( this ),
animation = {
backgroundColor: element.css( "backgroundColor" )
};
if ( options.mode === "hide" ) {
animation.opacity = 0;
}
$.effects.saveStyle( element );
element
.css( {
backgroundImage: "none",
backgroundColor: options.color || "#ffff99"
} )
.animate( animation, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: done
} );
} );
/*!
* jQuery UI Effects Size 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Size Effect
//>>group: Effects
//>>description: Resize an element to a specified width and height.
//>>docs: http://api.jqueryui.com/size-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectSize = $.effects.define( "size", function( options, done ) {
// Create element
var baseline, factor, temp,
element = $( this ),
// Copy for children
cProps = [ "fontSize" ],
vProps = [ "borderTopWidth", "borderBottomWidth", "paddingTop", "paddingBottom" ],
hProps = [ "borderLeftWidth", "borderRightWidth", "paddingLeft", "paddingRight" ],
// Set options
mode = options.mode,
restore = mode !== "effect",
scale = options.scale || "both",
origin = options.origin || [ "middle", "center" ],
position = element.css( "position" ),
pos = element.position(),
original = $.effects.scaledDimensions( element ),
from = options.from || original,
to = options.to || $.effects.scaledDimensions( element, 0 );
$.effects.createPlaceholder( element );
if ( mode === "show" ) {
temp = from;
from = to;
to = temp;
}
// Set scaling factor
factor = {
from: {
y: from.height / original.height,
x: from.width / original.width
},
to: {
y: to.height / original.height,
x: to.width / original.width
}
};
// Scale the css box
if ( scale === "box" || scale === "both" ) {
// Vertical props scaling
if ( factor.from.y !== factor.to.y ) {
from = $.effects.setTransition( element, vProps, factor.from.y, from );
to = $.effects.setTransition( element, vProps, factor.to.y, to );
}
// Horizontal props scaling
if ( factor.from.x !== factor.to.x ) {
from = $.effects.setTransition( element, hProps, factor.from.x, from );
to = $.effects.setTransition( element, hProps, factor.to.x, to );
}
}
// Scale the content
if ( scale === "content" || scale === "both" ) {
// Vertical props scaling
if ( factor.from.y !== factor.to.y ) {
from = $.effects.setTransition( element, cProps, factor.from.y, from );
to = $.effects.setTransition( element, cProps, factor.to.y, to );
}
}
// Adjust the position properties based on the provided origin points
if ( origin ) {
baseline = $.effects.getBaseline( origin, original );
from.top = ( original.outerHeight - from.outerHeight ) * baseline.y + pos.top;
from.left = ( original.outerWidth - from.outerWidth ) * baseline.x + pos.left;
to.top = ( original.outerHeight - to.outerHeight ) * baseline.y + pos.top;
to.left = ( original.outerWidth - to.outerWidth ) * baseline.x + pos.left;
}
element.css( from );
// Animate the children if desired
if ( scale === "content" || scale === "both" ) {
vProps = vProps.concat( [ "marginTop", "marginBottom" ] ).concat( cProps );
hProps = hProps.concat( [ "marginLeft", "marginRight" ] );
// Only animate children with width attributes specified
// TODO: is this right? should we include anything with css width specified as well
element.find( "*[width]" ).each( function() {
var child = $( this ),
childOriginal = $.effects.scaledDimensions( child ),
childFrom = {
height: childOriginal.height * factor.from.y,
width: childOriginal.width * factor.from.x,
outerHeight: childOriginal.outerHeight * factor.from.y,
outerWidth: childOriginal.outerWidth * factor.from.x
},
childTo = {
height: childOriginal.height * factor.to.y,
width: childOriginal.width * factor.to.x,
outerHeight: childOriginal.height * factor.to.y,
outerWidth: childOriginal.width * factor.to.x
};
// Vertical props scaling
if ( factor.from.y !== factor.to.y ) {
childFrom = $.effects.setTransition( child, vProps, factor.from.y, childFrom );
childTo = $.effects.setTransition( child, vProps, factor.to.y, childTo );
}
// Horizontal props scaling
if ( factor.from.x !== factor.to.x ) {
childFrom = $.effects.setTransition( child, hProps, factor.from.x, childFrom );
childTo = $.effects.setTransition( child, hProps, factor.to.x, childTo );
}
if ( restore ) {
$.effects.saveStyle( child );
}
// Animate children
child.css( childFrom );
child.animate( childTo, options.duration, options.easing, function() {
// Restore children
if ( restore ) {
$.effects.restoreStyle( child );
}
} );
} );
}
// Animate
element.animate( to, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: function() {
var offset = element.offset();
if ( to.opacity === 0 ) {
element.css( "opacity", from.opacity );
}
if ( !restore ) {
element
.css( "position", position === "static" ? "relative" : position )
.offset( offset );
// Need to save style here so that automatic style restoration
// doesn't restore to the original styles from before the animation.
$.effects.saveStyle( element );
}
done();
}
} );
} );
/*!
* jQuery UI Effects Scale 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Scale Effect
//>>group: Effects
//>>description: Grows or shrinks an element and its content.
//>>docs: http://api.jqueryui.com/scale-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectScale = $.effects.define( "scale", function( options, done ) {
// Create element
var el = $( this ),
mode = options.mode,
percent = parseInt( options.percent, 10 ) ||
( parseInt( options.percent, 10 ) === 0 ? 0 : ( mode !== "effect" ? 0 : 100 ) ),
newOptions = $.extend( true, {
from: $.effects.scaledDimensions( el ),
to: $.effects.scaledDimensions( el, percent, options.direction || "both" ),
origin: options.origin || [ "middle", "center" ]
}, options );
// Fade option to support puff
if ( options.fade ) {
newOptions.from.opacity = 1;
newOptions.to.opacity = 0;
}
$.effects.effect.size.call( this, newOptions, done );
} );
/*!
* jQuery UI Effects Puff 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Puff Effect
//>>group: Effects
//>>description: Creates a puff effect by scaling the element up and hiding it at the same time.
//>>docs: http://api.jqueryui.com/puff-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectPuff = $.effects.define( "puff", "hide", function( options, done ) {
var newOptions = $.extend( true, {}, options, {
fade: true,
percent: parseInt( options.percent, 10 ) || 150
} );
$.effects.effect.scale.call( this, newOptions, done );
} );
/*!
* jQuery UI Effects Pulsate 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Pulsate Effect
//>>group: Effects
//>>description: Pulsates an element n times by changing the opacity to zero and back.
//>>docs: http://api.jqueryui.com/pulsate-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectPulsate = $.effects.define( "pulsate", "show", function( options, done ) {
var element = $( this ),
mode = options.mode,
show = mode === "show",
hide = mode === "hide",
showhide = show || hide,
// Showing or hiding leaves off the "last" animation
anims = ( ( options.times || 5 ) * 2 ) + ( showhide ? 1 : 0 ),
duration = options.duration / anims,
animateTo = 0,
i = 1,
queuelen = element.queue().length;
if ( show || !element.is( ":visible" ) ) {
element.css( "opacity", 0 ).show();
animateTo = 1;
}
// Anims - 1 opacity "toggles"
for ( ; i < anims; i++ ) {
element.animate( { opacity: animateTo }, duration, options.easing );
animateTo = 1 - animateTo;
}
element.animate( { opacity: animateTo }, duration, options.easing );
element.queue( done );
$.effects.unshift( element, queuelen, anims + 1 );
} );
/*!
* jQuery UI Effects Shake 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Shake Effect
//>>group: Effects
//>>description: Shakes an element horizontally or vertically n times.
//>>docs: http://api.jqueryui.com/shake-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectShake = $.effects.define( "shake", function( options, done ) {
var i = 1,
element = $( this ),
direction = options.direction || "left",
distance = options.distance || 20,
times = options.times || 3,
anims = times * 2 + 1,
speed = Math.round( options.duration / anims ),
ref = ( direction === "up" || direction === "down" ) ? "top" : "left",
positiveMotion = ( direction === "up" || direction === "left" ),
animation = {},
animation1 = {},
animation2 = {},
queuelen = element.queue().length;
$.effects.createPlaceholder( element );
// Animation
animation[ ref ] = ( positiveMotion ? "-=" : "+=" ) + distance;
animation1[ ref ] = ( positiveMotion ? "+=" : "-=" ) + distance * 2;
animation2[ ref ] = ( positiveMotion ? "-=" : "+=" ) + distance * 2;
// Animate
element.animate( animation, speed, options.easing );
// Shakes
for ( ; i < times; i++ ) {
element
.animate( animation1, speed, options.easing )
.animate( animation2, speed, options.easing );
}
element
.animate( animation1, speed, options.easing )
.animate( animation, speed / 2, options.easing )
.queue( done );
$.effects.unshift( element, queuelen, anims + 1 );
} );
/*!
* jQuery UI Effects Slide 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Slide Effect
//>>group: Effects
//>>description: Slides an element in and out of the viewport.
//>>docs: http://api.jqueryui.com/slide-effect/
//>>demos: http://jqueryui.com/effect/
var effectsEffectSlide = $.effects.define( "slide", "show", function( options, done ) {
var startClip, startRef,
element = $( this ),
map = {
up: [ "bottom", "top" ],
down: [ "top", "bottom" ],
left: [ "right", "left" ],
right: [ "left", "right" ]
},
mode = options.mode,
direction = options.direction || "left",
ref = ( direction === "up" || direction === "down" ) ? "top" : "left",
positiveMotion = ( direction === "up" || direction === "left" ),
distance = options.distance ||
element[ ref === "top" ? "outerHeight" : "outerWidth" ]( true ),
animation = {};
$.effects.createPlaceholder( element );
startClip = element.cssClip();
startRef = element.position()[ ref ];
// Define hide animation
animation[ ref ] = ( positiveMotion ? -1 : 1 ) * distance + startRef;
animation.clip = element.cssClip();
animation.clip[ map[ direction ][ 1 ] ] = animation.clip[ map[ direction ][ 0 ] ];
// Reverse the animation if we're showing
if ( mode === "show" ) {
element.cssClip( animation.clip );
element.css( ref, animation[ ref ] );
animation.clip = startClip;
animation[ ref ] = startRef;
}
// Actually animate
element.animate( animation, {
queue: false,
duration: options.duration,
easing: options.easing,
complete: done
} );
} );
/*!
* jQuery UI Effects Transfer 1.12.1
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Transfer Effect
//>>group: Effects
//>>description: Displays a transfer effect from one element to another.
//>>docs: http://api.jqueryui.com/transfer-effect/
//>>demos: http://jqueryui.com/effect/
var effect;
if ( $.uiBackCompat !== false ) {
effect = $.effects.define( "transfer", function( options, done ) {
$( this ).transfer( options, done );
} );
}
var effectsEffectTransfer = effect;
}));
|
# Generated by Django 2.0.5 on 2018-05-25 21:53
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('questions', '0003_profile_about'),
]
operations = [
migrations.AddField(
model_name='profile',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
|
const path = require('path');
IndexLoader = {
entry: './index.js',
output: {
filename: 'bundle.js',
path: path.resolve(__dirname, 'build'),
libraryTarget: 'var',
library: 'MinervaStory'
}
}
module.exports = [IndexLoader]
|
# Copyright (c) 2020 Cisco and/or its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.ansible.netcommon.plugins.action.network import (
ActionModule as ActionNetworkModule,
)
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
msg = ''
warnings = []
config = self._task.args.get('config', None)
if (config is None):
self.result = super(ActionModule, self).run(task_vars=task_vars)
return self.result
for cfg in config:
pop_key = ''
flattened = False
flat_sw_list = []
if (cfg.get('switch', None) is not None):
for sw in cfg['switch']:
if (isinstance(sw, list)):
msg = " !!! Switches included in playbook profiles must be individual items, but given switch element = {} is a list ".format(sw)
warnings.append(msg)
flattened = True
flat_sw_list.extend(sw)
if (flattened is True):
cfg['switch'] = flat_sw_list
keys = cfg.keys()
for k in keys:
if (('profile' in k) and (k != 'profile')):
msg = " !!! Profile name included in playbook tasks must be 'profile', but given profile name = '{}' ".format(k)
warnings.append(msg)
pop_key = k
if (pop_key != ''):
cfg['profile'] = cfg[pop_key]
cfg.pop(pop_key)
self.result = super(ActionModule, self).run(task_vars=task_vars)
if (warnings):
self.result['warnings'] = []
self.result['warnings'].append(warnings)
return self.result
|
#!/usr/bin/env python
'''
calculate GPS CEP from DF or mavlink log for all present GPS modules
This assumes the GPS modules were not moving during the test
'''
from builtins import range
import os
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("logs", metavar="LOG", nargs="+")
args = parser.parse_args()
from pymavlink import mavutil
from pymavlink.mavextra import *
# full set of data, indexed by sysId and
DATA = {}
class Sample:
def __init__(self, lat, lon, alt, fix_type):
self.lat = lat
self.lon = lon
self.alt = alt
self.fix_type = fix_type
def add_data(sysid, gps_id, lat, lon, alt, fix_type):
global DATA
if not sysid in DATA:
DATA[sysid] = {}
if not gps_id in DATA[sysid]:
DATA[sysid][gps_id] = []
DATA[sysid][gps_id].append(Sample(lat, lon, alt, fix_type))
def process_log(logfile):
'''process GPS logs'''
print("Processing log %s" % filename)
mlog = mavutil.mavlink_connection(filename)
while True:
m = mlog.recv_match(type=['GPS', 'GPS2', 'GPS_RAW_INT', 'GPS2_RAW'])
if m is None:
break
mtype = m.get_type()
if mtype in ['GPS_RAW_INT', 'GPS2_RAW']:
(lat, lon, alt, fix_type) = (m.lat*1.0e-7, m.lon*1.0e-7, m.alt*1.0e-3, m.fix_type)
else:
(lat, lon, alt, fix_type) = (m.Lat, m.Lng, m.Alt, m.Status)
if fix_type < 3:
continue
if mtype in ['GPS', 'GPS_RAW_INT']:
gps_id = 1
else:
gps_id = 2
add_data(m.get_srcSystem(), gps_id, lat, lon, alt, fix_type)
for filename in args.logs:
process_log(filename)
def calc_cep(data, pct):
'''calculate CEP horizontally and alt vertically'''
# get median pos
count = len(data)
mid = count//2
mid2 = count*pct//100
median_lat = sorted(data, key=lambda x: x.lat)[mid].lat
median_lon = sorted(data, key=lambda x: x.lon)[mid].lon
median_alt = sorted(data, key=lambda x: x.alt)[mid].alt
# find point that is half way through list sorted by distance from median pos
d1 = sorted(data, key=lambda x: distance_lat_lon(x.lat, x.lon, median_lat, median_lon))[mid2]
# find point that is half way through list sorted by alt from median alt
d2 = sorted(data, key=lambda x: abs(x.alt-median_alt))[mid2]
# cep is distance from median point to the point that 50% of points are within CEP
cep = distance_lat_lon(d1.lat, d1.lon, median_lat, median_lon)
# alt accuracy is alt difference from median alt to point where 50% of points are within accuracy
hep = abs(d2.alt - median_alt)
return (cep, hep)
def process_CEP(sysid, gps_id, data):
rtk5 = []
rtk6 = []
non_rtk = []
for d in data:
if d.fix_type >= 6:
rtk6.append(d)
elif d.fix_type == 5:
rtk5.append(d)
elif d.fix_type <= 4:
non_rtk.append(d)
# HEP50
# CEP50
# CEP99
# HEP99
# process if we have at least 100 points
if len(rtk5) > 100:
(cep50, hep50) = calc_cep(rtk5, 50)
(cep99, hep99) = calc_cep(rtk5, 99)
print("GPS-RTK5 %s:%u CEP50:%.3fm CEP99:%.2fm HEP50:%.3fm HEP99:%.3fm (%u points)" % (sysid, gps_id,
cep50, cep99, hep50, hep99,
len(rtk5)))
if len(rtk6) > 100:
(cep50, hep50) = calc_cep(rtk6, 50)
(cep99, hep99) = calc_cep(rtk6, 99)
print("GPS-RTK6 %s:%u CEP50:%.3fm CEP99:%.2fm HEP50:%.3fm HEP99:%.3fm (%u points)" % (sysid, gps_id,
cep50, cep99, hep50, hep99,
len(rtk6)))
if len(non_rtk) > 100:
(cep50, hep50) = calc_cep(non_rtk, 50)
(cep99, hep99) = calc_cep(non_rtk, 99)
print("GPS-NORM %s:%u CEP50:%.3fm CEP99:%.2fm HEP50:%.3fm HEP99:%.3fm (%u points)" % (sysid, gps_id,
cep50, cep99, hep50, hep99,
len(non_rtk)))
for sysid in sorted(DATA.keys()):
for gps_id in sorted(DATA[sysid].keys()):
process_CEP(sysid, gps_id, DATA[sysid][gps_id])
|
(function (lib, img, cjs, ss, an) {
var p; // shortcut to reference prototypes
lib.webFontTxtInst = {};
var loadedTypekitCount = 0;
var loadedGoogleCount = 0;
var gFontsUpdateCacheList = [];
var tFontsUpdateCacheList = [];
var rect; // used to reference frame bounds
lib.ssMetadata = [
{name:"OfficeSpaceAnimation_atlas_", frames: [[0,0,699,711],[701,0,698,603],[0,713,1024,236]]},
{name:"OfficeSpaceAnimation_atlas_2", frames: [[1858,313,51,21],[902,427,51,21],[975,0,329,705],[0,0,973,239],[0,495,560,120],[1306,0,189,497],[1752,313,51,25],[1497,347,205,311],[1752,0,157,311],[1704,347,202,311],[1497,0,253,345],[700,241,200,368],[1805,313,51,25],[1306,660,583,40],[0,617,585,51],[562,495,136,46],[902,241,22,184],[0,241,698,252]]}
];
lib.updateListCache = function (cacheList) {
for(var i = 0; i < cacheList.length; i++) {
if(cacheList[i].cacheCanvas)
cacheList[i].updateCache();
}
};
lib.addElementsToCache = function (textInst, cacheList) {
var cur = textInst;
while(cur != exportRoot) {
if(cacheList.indexOf(cur) != -1)
break;
cur = cur.parent;
}
if(cur != exportRoot) {
var cur2 = textInst;
var index = cacheList.indexOf(cur);
while(cur2 != cur) {
cacheList.splice(index, 0, cur2);
cur2 = cur2.parent;
index++;
}
}
else {
cur = textInst;
while(cur != exportRoot) {
cacheList.push(cur);
cur = cur.parent;
}
}
};
lib.gfontAvailable = function(family, totalGoogleCount) {
lib.properties.webfonts[family] = true;
var txtInst = lib.webFontTxtInst && lib.webFontTxtInst[family] || [];
for(var f = 0; f < txtInst.length; ++f)
lib.addElementsToCache(txtInst[f], gFontsUpdateCacheList);
loadedGoogleCount++;
if(loadedGoogleCount == totalGoogleCount) {
lib.updateListCache(gFontsUpdateCacheList);
}
};
lib.tfontAvailable = function(family, totalTypekitCount) {
lib.properties.webfonts[family] = true;
var txtInst = lib.webFontTxtInst && lib.webFontTxtInst[family] || [];
for(var f = 0; f < txtInst.length; ++f)
lib.addElementsToCache(txtInst[f], tFontsUpdateCacheList);
loadedTypekitCount++;
if(loadedTypekitCount == totalTypekitCount) {
lib.updateListCache(tFontsUpdateCacheList);
}
};
// symbols:
(lib.BlackEyes = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(0);
}).prototype = p = new cjs.Sprite();
(lib.BlinkEyes = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(1);
}).prototype = p = new cjs.Sprite();
(lib.brick = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(2);
}).prototype = p = new cjs.Sprite();
(lib.clouds = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(3);
}).prototype = p = new cjs.Sprite();
(lib.Layer3 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(4);
}).prototype = p = new cjs.Sprite();
(lib.Layer4 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(5);
}).prototype = p = new cjs.Sprite();
(lib.NormalMouth = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(6);
}).prototype = p = new cjs.Sprite();
(lib.officechair1 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(7);
}).prototype = p = new cjs.Sprite();
(lib.officechair2 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(8);
}).prototype = p = new cjs.Sprite();
(lib.officechair3 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(9);
}).prototype = p = new cjs.Sprite();
(lib.officechair4 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(10);
}).prototype = p = new cjs.Sprite();
(lib.officechair5 = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(11);
}).prototype = p = new cjs.Sprite();
(lib.Smile = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(12);
}).prototype = p = new cjs.Sprite();
(lib.tableedge = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(13);
}).prototype = p = new cjs.Sprite();
(lib.tableface = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(14);
}).prototype = p = new cjs.Sprite();
(lib.tablefoot = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(15);
}).prototype = p = new cjs.Sprite();
(lib.tableleg = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(16);
}).prototype = p = new cjs.Sprite();
(lib.viewcropped = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_"];
this.gotoAndStop(0);
}).prototype = p = new cjs.Sprite();
(lib.viewsky = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_2"];
this.gotoAndStop(17);
}).prototype = p = new cjs.Sprite();
(lib.windowoutline = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_"];
this.gotoAndStop(1);
}).prototype = p = new cjs.Sprite();
(lib.wood = function() {
this.spriteSheet = ss["OfficeSpaceAnimation_atlas_"];
this.gotoAndStop(2);
}).prototype = p = new cjs.Sprite();
// helper functions:
function mc_symbol_clone() {
var clone = this._cloneProps(new this.constructor(this.mode, this.startPosition, this.loop));
clone.gotoAndStop(this.currentFrame);
clone.paused = this.paused;
clone.framerate = this.framerate;
return clone;
}
function getMCSymbolPrototype(symbol, nominalBounds, frameBounds) {
var prototype = cjs.extend(symbol, cjs.MovieClip);
prototype.clone = mc_symbol_clone;
prototype.nominalBounds = nominalBounds;
prototype.frameBounds = frameBounds;
return prototype;
}
(lib.Symbol2 = function(mode,startPosition,loop) {
this.initialize(mode,startPosition,loop,{});
// Layer 1
this.instance = new lib.viewsky();
this.instance.parent = this;
this.instance.setTransform(437,485,1.885,1.792);
this.instance_1 = new lib.clouds();
this.instance_1.parent = this;
this.instance_1.setTransform(0,0,1.885,1.792);
this.timeline.addTween(cjs.Tween.get({}).to({state:[{t:this.instance_1},{t:this.instance}]}).wait(1));
}).prototype = getMCSymbolPrototype(lib.Symbol2, rect = new cjs.Rectangle(0,0,1834.4,936.5), [rect]);
(lib.Symbol3 = function(mode,startPosition,loop) {
this.initialize(mode,startPosition,loop,{});
// Layer 1
this.instance = new lib.Symbol2();
this.instance.parent = this;
this.instance.setTransform(917.2,468.3,1,1,180,0,0,917.2,468.2);
this.instance_1 = new lib.Symbol2();
this.instance_1.parent = this;
this.instance_1.setTransform(2603.6,811.8,1,1,0,0,0,917.2,468.2);
this.timeline.addTween(cjs.Tween.get({}).to({state:[{t:this.instance_1},{t:this.instance}]}).wait(1));
}).prototype = getMCSymbolPrototype(lib.Symbol3, rect = new cjs.Rectangle(0,0,3520.8,1280.1), [rect]);
// stage content:
(lib.RECOVER_OfficeSpaceAnimation = function(mode,startPosition,loop) {
this.initialize(mode,startPosition,loop,{});
// chair2
this.instance = new lib.officechair5();
this.instance.parent = this;
this.instance.setTransform(1020,491,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance).wait(500));
// idk
this.instance_1 = new lib.officechair5();
this.instance_1.parent = this;
this.instance_1.setTransform(1020,491,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_1).wait(500));
// chair4
this.instance_2 = new lib.officechair4();
this.instance_2.parent = this;
this.instance_2.setTransform(1448,533,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_2).wait(500));
// tableedge
this.instance_3 = new lib.tableedge();
this.instance_3.parent = this;
this.instance_3.setTransform(826,655,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_3).wait(500));
// tabletop
this.instance_4 = new lib.tableface();
this.instance_4.parent = this;
this.instance_4.setTransform(822,614,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_4).wait(500));
// tableleg
this.instance_5 = new lib.tableleg();
this.instance_5.parent = this;
this.instance_5.setTransform(1296,671,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_5).wait(500));
// tablebase
this.instance_6 = new lib.tablefoot();
this.instance_6.parent = this;
this.instance_6.setTransform(1196,955,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_6).wait(500));
// chair3
this.instance_7 = new lib.officechair3();
this.instance_7.parent = this;
this.instance_7.setTransform(1196,399,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_7).wait(500));
// chair5
this.instance_8 = new lib.officechair2();
this.instance_8.parent = this;
this.instance_8.setTransform(1629,421,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_8).wait(500));
// chair1
this.instance_9 = new lib.officechair1();
this.instance_9.parent = this;
this.instance_9.setTransform(668,421,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_9).wait(500));
// windowframe
this.instance_10 = new lib.windowoutline();
this.instance_10.parent = this;
this.instance_10.setTransform(610,-225,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_10).wait(500));
// openeyes
this.instance_11 = new lib.BlackEyes();
this.instance_11.parent = this;
this.instance_11.setTransform(319,252,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_11).wait(40).to({_off:true},1).wait(5).to({_off:false,x:320},0).wait(97).to({x:319},0).to({_off:true},1).wait(5).to({_off:false},0).to({_off:true},3).wait(5).to({_off:false},0).wait(115).to({_off:true},1).wait(6).to({_off:false},0).wait(109).to({_off:true},1).wait(4).to({_off:false},0).wait(107));
// closedeyes
this.instance_12 = new lib.BlinkEyes();
this.instance_12.parent = this;
this.instance_12.setTransform(319,252,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_12).wait(500));
// openmouth
this.instance_13 = new lib.Smile();
this.instance_13.parent = this;
this.instance_13.setTransform(314,322,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_13).wait(209).to({_off:true},1).wait(7).to({_off:false},0).wait(283));
// closedmouth
this.instance_14 = new lib.NormalMouth();
this.instance_14.parent = this;
this.instance_14.setTransform(314,322,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_14).wait(500));
// body
this.instance_15 = new lib.Layer4();
this.instance_15.parent = this;
this.instance_15.setTransform(153,147,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_15).wait(500));
// brick
this.instance_16 = new lib.brick();
this.instance_16.parent = this;
this.instance_16.setTransform(-5,-225,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_16).wait(500));
// city
this.instance_17 = new lib.viewcropped();
this.instance_17.parent = this;
this.instance_17.setTransform(608,-225,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_17).wait(500));
// ceiling
this.instance_18 = new lib.Layer3();
this.instance_18.parent = this;
this.instance_18.setTransform(232,-225,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_18).wait(500));
// clouds3
this.instance_19 = new lib.Symbol3();
this.instance_19.parent = this;
this.instance_19.setTransform(-1412.3,133,1,1,0,0,0,1760.4,640);
this.timeline.addTween(cjs.Tween.get(this.instance_19).wait(1).to({x:-1402.3,y:132.3},0).wait(1).to({x:-1392.2,y:131.6},0).wait(1).to({x:-1382.2,y:130.9},0).wait(1).to({x:-1372.1,y:130.2},0).wait(1).to({x:-1362.1,y:129.5},0).wait(1).to({x:-1352,y:128.8},0).wait(1).to({x:-1342,y:128.1},0).wait(1).to({x:-1332,y:127.5},0).wait(1).to({x:-1321.9,y:126.8},0).wait(1).to({x:-1311.9,y:126.1},0).wait(1).to({x:-1301.8,y:125.4},0).wait(1).to({x:-1291.8,y:124.7},0).wait(1).to({x:-1281.7,y:124},0).wait(1).to({x:-1271.7,y:123.3},0).wait(1).to({x:-1261.7,y:122.6},0).wait(1).to({x:-1251.6,y:121.9},0).wait(1).to({x:-1241.6,y:121.2},0).wait(1).to({x:-1231.5,y:120.5},0).wait(1).to({x:-1221.5,y:119.8},0).wait(1).to({x:-1211.4,y:119.1},0).wait(1).to({x:-1201.4,y:118.4},0).wait(1).to({x:-1191.4,y:117.7},0).wait(1).to({x:-1181.3,y:117.1},0).wait(1).to({x:-1171.3,y:116.4},0).wait(1).to({x:-1161.2,y:115.7},0).wait(1).to({x:-1151.2,y:115},0).wait(1).to({x:-1141.2,y:114.3},0).wait(1).to({x:-1131.1,y:113.6},0).wait(1).to({x:-1121.1,y:112.9},0).wait(1).to({x:-1111,y:112.2},0).wait(1).to({x:-1101,y:111.5},0).wait(1).to({x:-1090.9,y:110.8},0).wait(1).to({x:-1080.9,y:110.1},0).wait(1).to({x:-1070.9,y:109.4},0).wait(1).to({x:-1060.8,y:108.7},0).wait(1).to({x:-1050.8,y:108},0).wait(1).to({x:-1040.7,y:107.3},0).wait(1).to({x:-1030.7,y:106.6},0).wait(1).to({x:-1020.6,y:106},0).wait(1).to({x:-1010.6,y:105.3},0).wait(1).to({x:-1000.6,y:104.6},0).wait(1).to({x:-990.5,y:103.9},0).wait(1).to({x:-980.5,y:103.2},0).wait(1).to({x:-970.4,y:102.5},0).wait(1).to({x:-960.4,y:101.8},0).wait(1).to({x:-950.3,y:101.1},0).wait(1).to({x:-940.3,y:100.4},0).wait(1).to({x:-930.3,y:99.7},0).wait(1).to({x:-920.2,y:99},0).wait(1).to({x:-910.2,y:98.3},0).wait(1).to({x:-900.1,y:97.6},0).wait(1).to({x:-890.1,y:96.9},0).wait(1).to({x:-880,y:96.2},0).wait(1).to({x:-870,y:95.6},0).wait(1).to({x:-860,y:94.9},0).wait(1).to({x:-849.9,y:94.2},0).wait(1).to({x:-839.9,y:93.5},0).wait(1).to({x:-829.8,y:92.8},0).wait(1).to({x:-819.8,y:92.1},0).wait(1).to({x:-809.7,y:91.4},0).wait(1).to({x:-799.7,y:90.7},0).wait(1).to({x:-789.7,y:90},0).wait(1).to({x:-779.6,y:89.3},0).wait(1).to({x:-769.6,y:88.6},0).wait(1).to({x:-759.5,y:87.9},0).wait(1).to({x:-749.5,y:87.2},0).wait(1).to({x:-739.4,y:86.5},0).wait(1).to({x:-729.4,y:85.8},0).wait(1).to({x:-719.4,y:85.2},0).wait(1).to({x:-709.3,y:84.5},0).wait(1).to({x:-699.3,y:83.8},0).wait(1).to({x:-689.2,y:83.1},0).wait(1).to({x:-679.2,y:82.4},0).wait(1).to({x:-669.1,y:81.7},0).wait(1).to({x:-659.1,y:81},0).wait(1).to({x:-649.1,y:80.3},0).wait(1).to({x:-639,y:79.6},0).wait(1).to({x:-629,y:78.9},0).wait(1).to({x:-618.9,y:78.2},0).wait(1).to({x:-608.9,y:77.5},0).wait(1).to({x:-598.9,y:76.8},0).wait(1).to({x:-588.8,y:76.1},0).wait(1).to({x:-578.8,y:75.4},0).wait(1).to({x:-568.7,y:74.7},0).wait(1).to({x:-558.7,y:74.1},0).wait(1).to({x:-548.6,y:73.4},0).wait(1).to({x:-538.6,y:72.7},0).wait(1).to({x:-528.6,y:72},0).wait(1).to({x:-518.5,y:71.3},0).wait(1).to({x:-508.5,y:70.6},0).wait(1).to({x:-498.4,y:69.9},0).wait(1).to({x:-488.4,y:69.2},0).wait(1).to({x:-478.3,y:68.5},0).wait(1).to({x:-468.3,y:67.8},0).wait(1).to({x:-458.3,y:67.1},0).wait(1).to({x:-448.2,y:66.4},0).wait(1).to({x:-438.2,y:65.7},0).wait(1).to({x:-428.1,y:65},0).wait(1).to({x:-418.1,y:64.3},0).wait(1).to({x:-408,y:63.7},0).wait(1).to({x:-398,y:63},0).wait(1).to({x:-388,y:62.3},0).wait(1).to({x:-377.9,y:61.6},0).wait(1).to({x:-367.9,y:60.9},0).wait(1).to({x:-357.8,y:60.2},0).wait(1).to({x:-347.8,y:59.5},0).wait(1).to({x:-337.7,y:58.8},0).wait(1).to({x:-327.7,y:58.1},0).wait(1).to({x:-317.7,y:57.4},0).wait(1).to({x:-307.6,y:56.7},0).wait(1).to({x:-297.6,y:56},0).wait(1).to({x:-287.5,y:55.3},0).wait(1).to({x:-277.5,y:54.6},0).wait(1).to({x:-267.4,y:53.9},0).wait(1).to({x:-257.4,y:53.3},0).wait(1).to({x:-247.4,y:52.6},0).wait(1).to({x:-237.3,y:51.9},0).wait(1).to({x:-227.3,y:51.2},0).wait(1).to({x:-217.2,y:50.5},0).wait(1).to({x:-207.2,y:49.8},0).wait(1).to({x:-197.1,y:49.1},0).wait(1).to({x:-187.1,y:48.4},0).wait(1).to({x:-177.1,y:47.7},0).wait(1).to({x:-167,y:47},0).wait(1).to({x:-157,y:46.3},0).wait(1).to({x:-146.9,y:45.6},0).wait(1).to({x:-136.9,y:44.9},0).wait(1).to({x:-126.8,y:44.2},0).wait(1).to({x:-116.8,y:43.5},0).wait(1).to({x:-106.8,y:42.9},0).wait(1).to({x:-96.7,y:42.2},0).wait(1).to({x:-86.7,y:41.5},0).wait(1).to({x:-76.6,y:40.8},0).wait(1).to({x:-66.6,y:40.1},0).wait(1).to({x:-56.6,y:39.4},0).wait(1).to({x:-46.5,y:38.7},0).wait(1).to({x:-36.5,y:38},0).wait(1).to({x:-26.4,y:37.3},0).wait(1).to({x:-16.4,y:36.6},0).wait(1).to({x:-6.3,y:35.9},0).wait(1).to({x:3.7,y:35.2},0).wait(1).to({x:13.7,y:34.5},0).wait(1).to({x:23.8,y:33.8},0).wait(1).to({x:33.8,y:33.1},0).wait(1).to({x:43.9,y:32.4},0).wait(1).to({x:53.9,y:31.8},0).wait(1).to({x:64,y:31.1},0).wait(1).to({x:74,y:30.4},0).wait(1).to({x:84,y:29.7},0).wait(1).to({x:94.1,y:29},0).wait(1).to({x:104.1,y:28.3},0).wait(1).to({x:114.2,y:27.6},0).wait(1).to({x:124.2,y:26.9},0).wait(1).to({x:134.3,y:26.2},0).wait(1).to({x:144.3,y:25.5},0).wait(1).to({x:154.3,y:24.8},0).wait(1).to({x:164.4,y:24.1},0).wait(1).to({x:174.4,y:23.4},0).wait(1).to({x:184.5,y:22.7},0).wait(1).to({x:194.5,y:22},0).wait(1).to({x:204.6,y:21.4},0).wait(1).to({x:214.6,y:20.7},0).wait(1).to({x:224.6,y:20},0).wait(1).to({x:234.7,y:19.3},0).wait(1).to({x:244.7,y:18.6},0).wait(1).to({x:254.8,y:17.9},0).wait(1).to({x:264.8,y:17.2},0).wait(1).to({x:274.9,y:16.5},0).wait(1).to({x:284.9,y:15.8},0).wait(1).to({x:294.9,y:15.1},0).wait(1).to({x:305,y:14.4},0).wait(1).to({x:315,y:13.7},0).wait(1).to({x:325.1,y:13},0).wait(1).to({x:335.1,y:12.3},0).wait(1).to({x:345.2,y:11.6},0).wait(1).to({x:355.2,y:11},0).wait(1).to({x:365.2,y:10.3},0).wait(1).to({x:375.3,y:9.6},0).wait(1).to({x:385.3,y:8.9},0).wait(1).to({x:395.4,y:8.2},0).wait(1).to({x:405.4,y:7.5},0).wait(1).to({x:415.5,y:6.8},0).wait(1).to({x:425.5,y:6.1},0).wait(1).to({x:435.5,y:5.4},0).wait(1).to({x:445.6,y:4.7},0).wait(1).to({x:455.6,y:4},0).wait(1).to({x:465.7,y:3.3},0).wait(1).to({x:475.7,y:2.6},0).wait(1).to({x:485.7,y:1.9},0).wait(1).to({x:495.8,y:1.2},0).wait(1).to({x:505.8,y:0.6},0).wait(1).to({x:515.9,y:-0.1},0).wait(1).to({x:525.9,y:-0.8},0).wait(1).to({x:536,y:-1.5},0).wait(1).to({x:546,y:-2.2},0).wait(1).to({x:556,y:-2.9},0).wait(1).to({x:566.1,y:-3.6},0).wait(1).to({x:576.1,y:-4.3},0).wait(1).to({x:586.2,y:-5},0).wait(1).to({x:596.2,y:-5.7},0).wait(1).to({x:606.3,y:-6.4},0).wait(1).to({x:616.3,y:-7.1},0).wait(1).to({x:626.3,y:-7.8},0).wait(1).to({x:636.4,y:-8.5},0).wait(1).to({x:646.4,y:-9.2},0).wait(1).to({x:656.5,y:-9.9},0).wait(1).to({x:666.5,y:-10.5},0).wait(1).to({x:676.6,y:-11.2},0).wait(1).to({x:686.6,y:-11.9},0).wait(1).to({x:696.6,y:-12.6},0).wait(1).to({x:706.7,y:-13.3},0).wait(1).to({x:716.7,y:-14},0).wait(1).to({x:726.8,y:-14.7},0).wait(1).to({x:736.8,y:-15.4},0).wait(1).to({x:746.9,y:-16.1},0).wait(1).to({x:756.9,y:-16.8},0).wait(1).to({x:766.9,y:-17.5},0).wait(1).to({x:777,y:-18.2},0).wait(1).to({x:787,y:-18.9},0).wait(1).to({x:797.1,y:-19.6},0).wait(1).to({x:807.1,y:-20.3},0).wait(1).to({x:817.2,y:-20.9},0).wait(1).to({x:827.2,y:-21.6},0).wait(1).to({x:837.2,y:-22.3},0).wait(1).to({x:847.3,y:-23},0).wait(1).to({x:857.3,y:-23.7},0).wait(1).to({x:867.4,y:-24.4},0).wait(1).to({x:877.4,y:-25.1},0).wait(1).to({x:887.5,y:-25.8},0).wait(1).to({x:897.5,y:-26.5},0).wait(1).to({x:907.5,y:-27.2},0).wait(1).to({x:917.6,y:-27.9},0).wait(1).to({x:927.6,y:-28.6},0).wait(1).to({x:937.7,y:-29.3},0).wait(1).to({x:947.7,y:-30},0).wait(1).to({x:957.8,y:-30.7},0).wait(1).to({x:967.8,y:-31.3},0).wait(1).to({x:977.8,y:-32},0).wait(1).to({x:987.9,y:-32.7},0).wait(1).to({x:997.9,y:-33.4},0).wait(1).to({x:1008,y:-34.1},0).wait(1).to({x:1018,y:-34.8},0).wait(1).to({x:1028,y:-35.5},0).wait(1).to({x:1038.1,y:-36.2},0).wait(1).to({x:1048.1,y:-36.9},0).wait(1).to({x:1058.2,y:-37.6},0).wait(1).to({x:1068.2,y:-38.3},0).wait(1).to({x:1078.3,y:-39},0).wait(1).to({x:1088.3,y:-39.7},0).wait(1).to({x:1098.3,y:-40.4},0).wait(1).to({x:1108.4,y:-41.1},0).wait(1).to({x:1118.4,y:-41.8},0).wait(1).to({x:1128.5,y:-42.4},0).wait(1).to({x:1138.5,y:-43.1},0).wait(1).to({x:1148.6,y:-43.8},0).wait(1).to({x:1158.6,y:-44.5},0).wait(1).to({x:1168.6,y:-45.2},0).wait(1).to({x:1178.7,y:-45.9},0).wait(1).to({x:1188.7,y:-46.6},0).wait(1).to({x:1198.8,y:-47.3},0).wait(1).to({x:1208.8,y:-48},0).wait(1).to({x:1218.9,y:-48.7},0).wait(1).to({x:1228.9,y:-49.4},0).wait(1).to({x:1238.9,y:-50.1},0).wait(1).to({x:1249,y:-50.8},0).wait(1).to({x:1259,y:-51.5},0).wait(1).to({x:1269.1,y:-52.2},0).wait(1).to({x:1279.1,y:-52.8},0).wait(1).to({x:1289.2,y:-53.5},0).wait(1).to({x:1299.2,y:-54.2},0).wait(1).to({x:1309.2,y:-54.9},0).wait(1).to({x:1319.3,y:-55.6},0).wait(1).to({x:1329.3,y:-56.3},0).wait(1).to({x:1339.4,y:-57},0).wait(1).to({x:1349.4,y:-57.7},0).wait(1).to({x:1359.5,y:-58.4},0).wait(1).to({x:1369.5,y:-59.1},0).wait(1).to({x:1379.5,y:-59.8},0).wait(1).to({x:1389.6,y:-60.5},0).wait(1).to({x:1399.6,y:-61.2},0).wait(1).to({x:1409.7,y:-61.9},0).wait(1).to({x:1419.7,y:-62.6},0).wait(1).to({x:1429.8,y:-63.2},0).wait(1).to({x:1439.8,y:-63.9},0).wait(1).to({x:1449.8,y:-64.6},0).wait(1).to({x:1459.9,y:-65.3},0).wait(1).to({x:1469.9,y:-66},0).wait(1).to({x:1480,y:-66.7},0).wait(1).to({x:1490,y:-67.4},0).wait(1).to({x:1500,y:-68.1},0).wait(1).to({x:1510.1,y:-68.8},0).wait(1).to({x:1520.1,y:-69.5},0).wait(1).to({x:1530.2,y:-70.2},0).wait(1).to({x:1540.2,y:-70.9},0).wait(1).to({x:1550.3,y:-71.6},0).wait(1).to({x:1560.3,y:-72.3},0).wait(1).to({x:1570.3,y:-73},0).wait(1).to({x:1580.4,y:-73.6},0).wait(1).to({x:1590.4,y:-74.3},0).wait(1).to({x:1600.5,y:-75},0).wait(1).to({x:1610.5,y:-75.7},0).wait(1).to({x:1620.6,y:-76.4},0).wait(1).to({x:1630.6,y:-77.1},0).wait(1).to({x:1640.6,y:-77.8},0).wait(1).to({x:1650.7,y:-78.5},0).wait(1).to({x:1660.7,y:-79.2},0).wait(1).to({x:1670.8,y:-79.9},0).wait(1).to({x:1680.8,y:-80.6},0).wait(1).to({x:1690.9,y:-81.3},0).wait(1).to({x:1700.9,y:-82},0).wait(1).to({x:1710.9,y:-82.7},0).wait(1).to({x:1721,y:-83.4},0).wait(1).to({x:1731,y:-84.1},0).wait(1).to({x:1741.1,y:-84.7},0).wait(1).to({x:1751.1,y:-85.4},0).wait(1).to({x:1761.1,y:-86.1},0).wait(1).to({x:1771.1,y:-86.8},0).wait(1).to({x:1781.2,y:-87.5},0).wait(1).to({x:1791.2,y:-88.2},0).wait(1).to({x:1801.3,y:-88.9},0).wait(1).to({x:1811.3,y:-89.6},0).wait(1).to({x:1821.4,y:-90.3},0).wait(1).to({x:1831.4,y:-91},0).wait(1).to({x:1841.4,y:-91.7},0).wait(1).to({x:1851.5,y:-92.4},0).wait(1).to({x:1861.5,y:-93.1},0).wait(1).to({x:1871.6,y:-93.8},0).wait(1).to({x:1881.6,y:-94.5},0).wait(1).to({x:1891.7,y:-95.1},0).wait(1).to({x:1901.7,y:-95.8},0).wait(1).to({x:1911.7,y:-96.5},0).wait(1).to({x:1921.8,y:-97.2},0).wait(1).to({x:1931.8,y:-97.9},0).wait(1).to({x:1941.9,y:-98.6},0).wait(1).to({x:1951.9,y:-99.3},0).wait(1).to({x:1962,y:-100},0).wait(1).to({x:1972,y:-100.7},0).wait(1).to({x:1982,y:-101.4},0).wait(1).to({x:1992.1,y:-102.1},0).wait(1).to({x:2002.1,y:-102.8},0).wait(1).to({x:2012.2,y:-103.5},0).wait(1).to({x:2022.2,y:-104.2},0).wait(1).to({x:2032.3,y:-104.9},0).wait(1).to({x:2042.3,y:-105.5},0).wait(1).to({x:2052.3,y:-106.2},0).wait(1).to({x:2062.4,y:-106.9},0).wait(1).to({x:2072.4,y:-107.6},0).wait(1).to({x:2082.5,y:-108.3},0).wait(1).to({x:2092.5,y:-109},0).wait(1).to({x:2102.6,y:-109.7},0).wait(1).to({x:2112.6,y:-110.4},0).wait(1).to({x:2122.6,y:-111.1},0).wait(1).to({x:2132.7,y:-111.8},0).wait(1).to({x:2142.7,y:-112.5},0).wait(1).to({x:2152.8,y:-113.2},0).wait(1).to({x:2162.8,y:-113.9},0).wait(1).to({x:2172.9,y:-114.6},0).wait(1).to({x:2182.9,y:-115.3},0).wait(1).to({x:2192.9,y:-116},0).wait(1).to({x:2203,y:-116.6},0).wait(1).to({x:2213,y:-117.3},0).wait(1).to({x:2223.1,y:-118},0).wait(1).to({x:2233.1,y:-118.7},0).wait(1).to({x:2243.2,y:-119.4},0).wait(1).to({x:2253.2,y:-120.1},0).wait(1).to({x:2263.2,y:-120.8},0).wait(1).to({x:2273.3,y:-121.5},0).wait(1).to({x:2283.3,y:-122.2},0).wait(1).to({x:2293.4,y:-122.9},0).wait(1).to({x:2303.4,y:-123.6},0).wait(1).to({x:2313.4,y:-124.3},0).wait(1).to({x:2323.5,y:-125},0).wait(1).to({x:2333.5,y:-125.7},0).wait(1).to({x:2343.6,y:-126.4},0).wait(1).to({x:2353.6,y:-127},0).wait(1).to({x:2363.7,y:-127.7},0).wait(1).to({x:2373.7,y:-128.4},0).wait(1).to({x:2383.7,y:-129.1},0).wait(1).to({x:2393.8,y:-129.8},0).wait(1).to({x:2403.8,y:-130.5},0).wait(1).to({x:2413.9,y:-131.2},0).wait(1).to({x:2423.9,y:-131.9},0).wait(1).to({x:2434,y:-132.6},0).wait(1).to({x:2444,y:-133.3},0).wait(1).to({x:2454,y:-134},0).wait(1).to({x:2464.1,y:-134.7},0).wait(1).to({x:2474.1,y:-135.4},0).wait(1).to({x:2484.2,y:-136.1},0).wait(1).to({x:2494.2,y:-136.8},0).wait(1).to({x:2504.3,y:-137.4},0).wait(1).to({x:2514.3,y:-138.1},0).wait(1).to({x:2524.3,y:-138.8},0).wait(1).to({x:2534.4,y:-139.5},0).wait(1).to({x:2544.4,y:-140.2},0).wait(1).to({x:2554.5,y:-140.9},0).wait(1).to({x:2564.5,y:-141.6},0).wait(1).to({x:2574.6,y:-142.3},0).wait(1).to({x:2584.6,y:-143},0).wait(1).to({x:2594.6,y:-143.7},0).wait(1).to({x:2604.7,y:-144.4},0).wait(1).to({x:2614.7,y:-145.1},0).wait(1).to({x:2624.8,y:-145.8},0).wait(1).to({x:2634.8,y:-146.5},0).wait(1).to({x:2644.9,y:-147.2},0).wait(1).to({x:2654.9,y:-147.8},0).wait(1).to({x:2664.9,y:-148.5},0).wait(1).to({x:2675,y:-149.2},0).wait(1).to({x:2685,y:-149.9},0).wait(1).to({x:2695.1,y:-150.6},0).wait(1).to({x:2705.1,y:-151.3},0).wait(1).to({x:2715.2,y:-152},0).wait(1).to({x:2725.2,y:-152.7},0).wait(1).to({x:2735.2,y:-153.4},0).wait(1).to({x:2745.3,y:-154.1},0).wait(1).to({x:2755.3,y:-154.8},0).wait(1).to({x:2765.4,y:-155.5},0).wait(1).to({x:2775.4,y:-156.2},0).wait(1).to({x:2785.5,y:-156.9},0).wait(1).to({x:2795.5,y:-157.6},0).wait(1).to({x:2805.5,y:-158.3},0).wait(1).to({x:2815.6,y:-158.9},0).wait(1).to({x:2825.6,y:-159.6},0).wait(1).to({x:2835.7,y:-160.3},0).wait(1).to({x:2845.7,y:-161},0).wait(1).to({x:2855.7,y:-161.7},0).wait(1).to({x:2865.8,y:-162.4},0).wait(1).to({x:2875.8,y:-163.1},0).wait(1).to({x:2885.9,y:-163.8},0).wait(1).to({x:2895.9,y:-164.5},0).wait(1).to({x:2906,y:-165.2},0).wait(1).to({x:2916,y:-165.9},0).wait(1).to({x:2926,y:-166.6},0).wait(1).to({x:2936.1,y:-167.3},0).wait(1).to({x:2946.1,y:-168},0).wait(1).to({x:2956.2,y:-168.7},0).wait(1).to({x:2966.2,y:-169.3},0).wait(1).to({x:2976.3,y:-170},0).wait(1).to({x:2986.3,y:-170.7},0).wait(1).to({x:2996.3,y:-171.4},0).wait(1).to({x:3006.4,y:-172.1},0).wait(1).to({x:3016.4,y:-172.8},0).wait(1).to({x:3026.5,y:-173.5},0).wait(1).to({x:3036.5,y:-174.2},0).wait(1).to({x:3046.6,y:-174.9},0).wait(1).to({x:3056.6,y:-175.6},0).wait(1).to({x:3066.6,y:-176.3},0).wait(1).to({x:3076.7,y:-177},0).wait(1).to({x:3086.7,y:-177.7},0).wait(1).to({x:3096.8,y:-178.4},0).wait(1).to({x:3106.8,y:-179.1},0).wait(1).to({x:3116.9,y:-179.7},0).wait(1).to({x:3126.9,y:-180.4},0).wait(1).to({x:3136.9,y:-181.1},0).wait(1).to({x:3147,y:-181.8},0).wait(1).to({x:3157,y:-182.5},0).wait(1).to({x:3167.1,y:-183.2},0).wait(1).to({x:3177.1,y:-183.9},0).wait(1).to({x:3187.2,y:-184.6},0).wait(1).to({x:3197.2,y:-185.3},0).wait(1).to({x:3207.2,y:-186},0).wait(1).to({x:3217.3,y:-186.7},0).wait(1).to({x:3227.3,y:-187.4},0).wait(1).to({x:3237.4,y:-188.1},0).wait(1).to({x:3247.4,y:-188.8},0).wait(1).to({x:3257.5,y:-189.5},0).wait(1).to({x:3267.5,y:-190.1},0).wait(1).to({x:3277.5,y:-190.8},0).wait(1).to({x:3287.6,y:-191.5},0).wait(1).to({x:3297.6,y:-192.2},0).wait(1).to({x:3307.7,y:-192.9},0).wait(1).to({x:3317.7,y:-193.6},0).wait(1).to({x:3327.8,y:-194.3},0).wait(1).to({x:3337.8,y:-195},0).wait(1).to({x:3347.8,y:-195.7},0).wait(1).to({x:3357.9,y:-196.4},0).wait(1).to({x:3367.9,y:-197.1},0).wait(1).to({x:3378,y:-197.8},0).wait(1).to({x:3388,y:-198.5},0).wait(1).to({x:3398,y:-199.2},0).wait(1).to({x:3408.1,y:-199.9},0).wait(1).to({x:3418.1,y:-200.6},0).wait(1).to({x:3428.2,y:-201.2},0).wait(1).to({x:3438.2,y:-201.9},0).wait(1).to({x:3448.3,y:-202.6},0).wait(1).to({x:3458.3,y:-203.3},0).wait(1).to({x:3468.3,y:-204},0).wait(1).to({x:3478.4,y:-204.7},0).wait(1).to({x:3488.4,y:-205.4},0).wait(1).to({x:3498.5,y:-206.1},0).wait(1).to({x:3508.5,y:-206.8},0).wait(1).to({x:3518.6,y:-207.5},0).wait(1).to({x:3528.6,y:-208.2},0).wait(1).to({x:3538.6,y:-208.9},0).wait(1).to({x:3548.7,y:-209.6},0).wait(1).to({x:3558.7,y:-210.3},0).wait(1).to({x:3568.8,y:-211},0).wait(1).to({x:3578.8,y:-211.6},0).wait(1).to({x:3588.9,y:-212.3},0).wait(1).to({x:3598.9,y:-213},0).wait(1));
// bluesky
this.instance_20 = new lib.viewsky();
this.instance_20.parent = this;
this.instance_20.setTransform(610,-225,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_20).wait(500));
// floor
this.instance_21 = new lib.wood();
this.instance_21.parent = this;
this.instance_21.setTransform(-5,729,1.885,1.792);
this.timeline.addTween(cjs.Tween.get(this.instance_21).wait(500));
}).prototype = p = new cjs.MovieClip();
p.nominalBounds = rect = new cjs.Rectangle(-2212.7,32.9,5098.7,1658.9);
p.frameBounds = [rect, new cjs.Rectangle(-2202.7,32.3,5088.7,1659.5), new cjs.Rectangle(-2192.7,31.6,5078.7,1660.2), new cjs.Rectangle(-2182.6,30.9,5068.6,1660.9), new cjs.Rectangle(-2172.6,30.2,5058.6,1661.6), new cjs.Rectangle(-2162.5,29.5,5048.5,1662.3), new cjs.Rectangle(-2152.5,28.8,5038.5,1663), new cjs.Rectangle(-2142.5,28.1,5028.5,1663.7), new cjs.Rectangle(-2132.4,27.5,5018.4,1664.4), new cjs.Rectangle(-2122.4,26.8,5008.4,1665.1), new cjs.Rectangle(-2112.3,26,4998.3,1665.8), new cjs.Rectangle(-2102.3,25.4,4988.3,1666.5), new cjs.Rectangle(-2092.2,24.6,4978.2,1667.2), new cjs.Rectangle(-2082.2,24,4968.2,1667.9), new cjs.Rectangle(-2072.2,23.3,4958.2,1668.6), new cjs.Rectangle(-2062.1,22.5,4948.1,1669.3), new cjs.Rectangle(-2052.1,21.9,4938.1,1669.9), new cjs.Rectangle(-2042,21.2,4928,1670.6), new cjs.Rectangle(-2032,20.5,4918,1671.3), new cjs.Rectangle(-2021.9,19.8,4907.9,1672), new cjs.Rectangle(-2011.9,19.1,4897.9,1672.7), new cjs.Rectangle(-2001.9,18.4,4887.9,1673.4), new cjs.Rectangle(-1991.8,17.7,4877.8,1674.1), new cjs.Rectangle(-1981.8,17,4867.8,1674.8), new cjs.Rectangle(-1971.7,16.4,4857.7,1675.5), new cjs.Rectangle(-1961.7,15.6,4847.7,1676.2), new cjs.Rectangle(-1951.6,15,4837.6,1676.9), new cjs.Rectangle(-1941.6,14.3,4827.6,1677.6), new cjs.Rectangle(-1931.6,13.5,4817.6,1678.3), new cjs.Rectangle(-1921.5,12.9,4807.5,1679), new cjs.Rectangle(-1911.5,12.1,4797.5,1679.7), new cjs.Rectangle(-1901.4,11.5,4787.4,1680.3), new cjs.Rectangle(-1891.4,10.8,4777.4,1681), new cjs.Rectangle(-1881.3,10.1,4767.3,1681.7), new cjs.Rectangle(-1871.3,9.4,4757.3,1682.4), new cjs.Rectangle(-1861.3,8.7,4747.3,1683.1), new cjs.Rectangle(-1851.2,8,4737.2,1683.8), new cjs.Rectangle(-1841.2,7.3,4727.2,1684.5), new cjs.Rectangle(-1831.1,6.6,4717.1,1685.2), new cjs.Rectangle(-1821.1,6,4707.1,1685.9), new cjs.Rectangle(-1811,5.3,4697,1686.6), new cjs.Rectangle(-1801,4.5,4687,1687.3), new cjs.Rectangle(-1791,3.9,4677,1688), new cjs.Rectangle(-1780.9,3.1,4666.9,1688.7), new cjs.Rectangle(-1770.9,2.5,4656.9,1689.4), new cjs.Rectangle(-1760.8,1.8,4646.8,1690.1), new cjs.Rectangle(-1750.8,1.1,4636.8,1690.7), new cjs.Rectangle(-1740.7,0.4,4626.7,1691.4), new cjs.Rectangle(-1730.7,-0.3,4616.7,1692.1), new cjs.Rectangle(-1720.7,-1,4606.7,1692.8), new cjs.Rectangle(-1710.6,-1.7,4596.6,1693.5), new cjs.Rectangle(-1700.6,-2.4,4586.6,1694.2), new cjs.Rectangle(-1690.5,-3.1,4576.5,1694.9), new cjs.Rectangle(-1680.5,-3.8,4566.5,1695.6), new cjs.Rectangle(-1670.5,-4.5,4556.5,1696.3), new cjs.Rectangle(-1660.4,-5.1,4546.4,1697), new cjs.Rectangle(-1650.4,-5.9,4536.4,1697.7), new cjs.Rectangle(-1640.3,-6.5,4526.3,1698.4), new cjs.Rectangle(-1630.3,-7.2,4516.3,1699.1), new cjs.Rectangle(-1620.2,-8,4506.2,1699.8), new cjs.Rectangle(-1610.2,-8.6,4496.2,1700.5), new cjs.Rectangle(-1600.2,-9.4,4486.2,1701.2), new cjs.Rectangle(-1590.1,-10,4476.1,1701.8), new cjs.Rectangle(-1580.1,-10.7,4466.1,1702.5), new cjs.Rectangle(-1570.1,-11.4,4456,1703.2), new cjs.Rectangle(-1560,-12.1,4446,1703.9), new cjs.Rectangle(-1549.9,-12.8,4435.9,1704.6), new cjs.Rectangle(-1539.9,-13.5,4425.9,1705.3), new cjs.Rectangle(-1529.9,-14.2,4415.9,1706), new cjs.Rectangle(-1519.8,-14.9,4405.8,1706.7), new cjs.Rectangle(-1509.8,-15.5,4395.8,1707.4), new cjs.Rectangle(-1499.7,-16.2,4385.7,1708.1), new cjs.Rectangle(-1489.7,-17,4375.7,1708.8), new cjs.Rectangle(-1479.6,-17.6,4365.6,1709.5), new cjs.Rectangle(-1469.6,-18.4,4355.6,1710.2), new cjs.Rectangle(-1459.6,-19,4345.6,1710.9), new cjs.Rectangle(-1449.6,-19.7,4335.5,1711.6), new cjs.Rectangle(-1439.5,-20.4,4325.5,1712.2), new cjs.Rectangle(-1429.4,-21.1,4315.4,1712.9), new cjs.Rectangle(-1419.4,-21.8,4305.4,1713.6), new cjs.Rectangle(-1409.3,-22.5,4295.3,1714.3), new cjs.Rectangle(-1399.3,-23.2,4285.3,1715), new cjs.Rectangle(-1389.3,-23.9,4275.3,1715.7), new cjs.Rectangle(-1379.2,-24.6,4265.2,1716.4), new cjs.Rectangle(-1369.2,-25.3,4255.2,1717.1), new cjs.Rectangle(-1359.1,-26,4245.1,1717.8), new cjs.Rectangle(-1349.1,-26.6,4235.1,1718.5), new cjs.Rectangle(-1339.1,-27.4,4225,1719.2), new cjs.Rectangle(-1329.1,-28,4215,1719.9), new cjs.Rectangle(-1319,-28.7,4205,1720.6), new cjs.Rectangle(-1308.9,-29.5,4194.9,1721.3), new cjs.Rectangle(-1298.9,-30.1,4184.9,1722), new cjs.Rectangle(-1288.8,-30.8,4174.8,1722.6), new cjs.Rectangle(-1278.8,-31.5,4164.8,1723.3), new cjs.Rectangle(-1268.7,-32.2,4154.7,1724), new cjs.Rectangle(-1258.7,-32.9,4144.7,1724.7), new cjs.Rectangle(-1248.7,-33.6,4134.7,1725.4), new cjs.Rectangle(-1238.6,-34.3,4124.6,1726.1), new cjs.Rectangle(-1228.6,-35,4114.6,1726.8), new cjs.Rectangle(-1218.6,-35.7,4104.5,1727.5), new cjs.Rectangle(-1208.5,-36.4,4094.5,1728.2), new cjs.Rectangle(-1198.4,-37,4084.4,1728.9), new cjs.Rectangle(-1188.4,-37.7,4074.4,1729.6), new cjs.Rectangle(-1178.4,-38.5,4064.4,1730.3), new cjs.Rectangle(-1168.3,-39.1,4054.3,1731), new cjs.Rectangle(-1158.3,-39.9,4044.3,1731.7), new cjs.Rectangle(-1148.2,-40.5,4034.2,1732.4), new cjs.Rectangle(-1138.2,-41.2,4024.2,1733), new cjs.Rectangle(-1128.2,-41.9,4014.2,1733.7), new cjs.Rectangle(-1118.1,-42.6,4004.1,1734.4), new cjs.Rectangle(-1108.1,-43.3,3994.1,1735.1), new cjs.Rectangle(-1098.1,-44,3984,1735.8), new cjs.Rectangle(-1088,-44.7,3974,1736.5), new cjs.Rectangle(-1077.9,-45.4,3963.9,1737.2), new cjs.Rectangle(-1067.9,-46.1,3953.9,1737.9), new cjs.Rectangle(-1057.9,-46.7,3943.9,1738.6), new cjs.Rectangle(-1047.8,-47.5,3933.8,1739.3), new cjs.Rectangle(-1037.8,-48.1,3923.8,1740), new cjs.Rectangle(-1027.7,-48.9,3913.7,1740.7), new cjs.Rectangle(-1017.7,-49.5,3903.7,1741.4), new cjs.Rectangle(-1007.6,-50.2,3893.6,1742.1), new cjs.Rectangle(-997.6,-51,3883.6,1742.8), new cjs.Rectangle(-987.6,-51.6,3873.6,1743.5), new cjs.Rectangle(-977.5,-52.3,3863.5,1744.1), new cjs.Rectangle(-967.5,-53,3853.5,1744.8), new cjs.Rectangle(-957.4,-53.7,3843.4,1745.5), new cjs.Rectangle(-947.4,-54.4,3833.4,1746.2), new cjs.Rectangle(-937.3,-55.1,3823.3,1746.9), new cjs.Rectangle(-927.3,-55.8,3813.3,1747.6), new cjs.Rectangle(-917.3,-56.5,3803.3,1748.3), new cjs.Rectangle(-907.2,-57.1,3793.2,1749), new cjs.Rectangle(-897.2,-57.9,3783.2,1749.7), new cjs.Rectangle(-887.1,-58.5,3773.1,1750.4), new cjs.Rectangle(-877.1,-59.2,3763.1,1751.1), new cjs.Rectangle(-867,-60,3753,1751.8), new cjs.Rectangle(-857,-60.6,3743,1752.5), new cjs.Rectangle(-847,-61.4,3733,1753.2), new cjs.Rectangle(-836.9,-62,3722.9,1753.9), new cjs.Rectangle(-826.9,-62.7,3712.9,1754.5), new cjs.Rectangle(-816.8,-63.4,3702.8,1755.2), new cjs.Rectangle(-806.8,-64.1,3692.8,1755.9), new cjs.Rectangle(-796.7,-64.8,3682.7,1756.6), new cjs.Rectangle(-786.7,-65.5,3672.7,1757.3), new cjs.Rectangle(-776.7,-66.2,3662.7,1758), new cjs.Rectangle(-766.7,-66.9,3652.6,1758.7), new cjs.Rectangle(-756.6,-67.6,3642.6,1759.4), new cjs.Rectangle(-746.5,-68.2,3632.5,1760.1), new cjs.Rectangle(-736.5,-69,3622.5,1760.8), new cjs.Rectangle(-726.4,-69.6,3612.4,1761.5), new cjs.Rectangle(-716.4,-70.4,3602.4,1762.2), new cjs.Rectangle(-706.4,-71,3592.4,1762.9), new cjs.Rectangle(-696.3,-71.7,3582.3,1763.6), new cjs.Rectangle(-686.3,-72.5,3572.3,1764.3), new cjs.Rectangle(-676.2,-73.1,3562.2,1764.9), new cjs.Rectangle(-666.2,-73.8,3552.2,1765.6), new cjs.Rectangle(-656.2,-74.5,3542.1,1766.3), new cjs.Rectangle(-646.2,-75.2,3532.1,1767), new cjs.Rectangle(-636.1,-75.9,3522.1,1767.7), new cjs.Rectangle(-626,-76.6,3520.8,1768.4), new cjs.Rectangle(-616,-77.3,3520.8,1769.1), new cjs.Rectangle(-605.9,-78,3520.8,1769.8), new cjs.Rectangle(-595.9,-78.6,3520.8,1770.5), new cjs.Rectangle(-585.9,-79.4,3520.8,1771.2), new cjs.Rectangle(-575.8,-80,3520.8,1771.9), new cjs.Rectangle(-565.8,-80.7,3520.8,1772.6), new cjs.Rectangle(-555.7,-81.5,3520.8,1773.3), new cjs.Rectangle(-545.7,-82.1,3520.8,1774), new cjs.Rectangle(-535.7,-82.9,3520.8,1774.7), new cjs.Rectangle(-525.6,-83.5,3520.8,1775.4), new cjs.Rectangle(-515.6,-84.2,3520.8,1776), new cjs.Rectangle(-505.5,-84.9,3520.8,1776.7), new cjs.Rectangle(-495.5,-85.6,3520.8,1777.4), new cjs.Rectangle(-485.4,-86.3,3520.8,1778.1), new cjs.Rectangle(-475.4,-87,3520.8,1778.8), new cjs.Rectangle(-465.3,-87.7,3520.8,1779.5), new cjs.Rectangle(-455.3,-88.4,3520.8,1780.2), new cjs.Rectangle(-445.3,-89,3520.8,1780.9), new cjs.Rectangle(-435.2,-89.7,3520.8,1781.6), new cjs.Rectangle(-425.2,-90.5,3520.8,1782.3), new cjs.Rectangle(-415.2,-91.1,3520.8,1783), new cjs.Rectangle(-405.1,-91.9,3520.8,1783.7), new cjs.Rectangle(-395,-92.5,3520.8,1784.4), new cjs.Rectangle(-385,-93.2,3520.8,1785.1), new cjs.Rectangle(-375,-94,3520.8,1785.8), new cjs.Rectangle(-365,-94.6,3520.8,1786.4), new cjs.Rectangle(-354.9,-95.3,3520.8,1787.1), new cjs.Rectangle(-344.8,-96,3520.8,1787.8), new cjs.Rectangle(-334.8,-96.7,3520.8,1788.5), new cjs.Rectangle(-324.7,-97.4,3520.8,1789.2), new cjs.Rectangle(-314.7,-98.1,3520.8,1789.9), new cjs.Rectangle(-304.7,-98.8,3520.8,1790.6), new cjs.Rectangle(-294.7,-99.5,3520.8,1791.3), new cjs.Rectangle(-284.6,-100.1,3520.8,1792), new cjs.Rectangle(-274.5,-100.9,3520.8,1792.7), new cjs.Rectangle(-264.5,-101.5,3520.8,1793.4), new cjs.Rectangle(-254.5,-102.2,3520.8,1794.1), new cjs.Rectangle(-244.5,-103,3520.8,1794.8), new cjs.Rectangle(-234.4,-103.6,3520.8,1795.5), new cjs.Rectangle(-224.3,-104.4,3520.8,1796.2), new cjs.Rectangle(-214.3,-105,3520.8,1796.8), new cjs.Rectangle(-204.2,-105.7,3520.8,1797.5), new cjs.Rectangle(-194.2,-106.4,3520.8,1798.2), new cjs.Rectangle(-184.2,-107.1,3520.8,1798.9), new cjs.Rectangle(-174.2,-107.8,3520.8,1799.6), new cjs.Rectangle(-164.1,-108.5,3520.8,1800.3), new cjs.Rectangle(-154,-109.2,3520.8,1801), new cjs.Rectangle(-144,-109.9,3520.8,1801.7), new cjs.Rectangle(-134,-110.5,3520.8,1802.4), new cjs.Rectangle(-123.9,-111.2,3520.8,1803.1), new cjs.Rectangle(-113.8,-112,3520.8,1803.8), new cjs.Rectangle(-103.8,-112.6,3520.8,1804.5), new cjs.Rectangle(-93.8,-113.4,3520.8,1805.2), new cjs.Rectangle(-83.7,-114,3520.8,1805.9), new cjs.Rectangle(-73.7,-114.7,3520.8,1806.6), new cjs.Rectangle(-63.6,-115.4,3520.8,1807.2), new cjs.Rectangle(-53.6,-116.1,3520.8,1807.9), new cjs.Rectangle(-43.6,-116.8,3520.8,1808.6), new cjs.Rectangle(-33.5,-117.5,3520.8,1809.3), new cjs.Rectangle(-23.5,-118.2,3520.8,1810), new cjs.Rectangle(-13.5,-118.9,3520.8,1810.7), new cjs.Rectangle(-3.4,-119.6,3520.8,1811.4), new cjs.Rectangle(6.6,-120.3,3520.8,1812.1), new cjs.Rectangle(16.7,-121,3520.8,1812.8), new cjs.Rectangle(26.7,-121.6,3520.8,1813.5), new cjs.Rectangle(36.8,-122.4,3520.8,1814.2), new cjs.Rectangle(46.8,-123,3520.8,1814.9), new cjs.Rectangle(56.9,-123.7,3520.8,1815.6), new cjs.Rectangle(66.9,-124.5,3520.8,1816.3), new cjs.Rectangle(77,-125.1,3520.8,1817), new cjs.Rectangle(87,-125.9,3520.8,1817.7), new cjs.Rectangle(97,-126.5,3520.8,1818.3), new cjs.Rectangle(107,-127.2,3520.8,1819), new cjs.Rectangle(117.1,-127.9,3520.8,1819.7), new cjs.Rectangle(127.1,-128.6,3520.8,1820.4), new cjs.Rectangle(137.2,-129.3,3520.8,1821.1), new cjs.Rectangle(147.3,-130,3520.8,1821.8), new cjs.Rectangle(157.3,-130.7,3520.8,1822.5), new cjs.Rectangle(167.3,-131.4,3520.8,1823.2), new cjs.Rectangle(177.4,-132,3520.8,1823.9), new cjs.Rectangle(187.4,-132.7,3520.8,1824.6), new cjs.Rectangle(197.5,-133.5,3520.8,1825.3), new cjs.Rectangle(207.5,-134.1,3520.8,1826), new cjs.Rectangle(217.5,-134.9,3520.8,1826.7), new cjs.Rectangle(227.5,-135.5,3520.8,1827.4), new cjs.Rectangle(237.6,-136.2,3520.8,1828.1), new cjs.Rectangle(247.7,-136.9,3520.8,1828.7), new cjs.Rectangle(257.7,-137.6,3520.8,1829.4), new cjs.Rectangle(267.8,-138.3,3520.8,1830.1), new cjs.Rectangle(277.8,-139,3520.8,1830.8), new cjs.Rectangle(287.9,-139.7,3520.8,1831.5), new cjs.Rectangle(297.9,-140.4,3520.8,1832.2), new cjs.Rectangle(307.9,-141.1,3520.8,1832.9), new cjs.Rectangle(318,-141.8,3520.8,1833.6), new cjs.Rectangle(328,-142.5,3520.8,1834.3), new cjs.Rectangle(338,-143.1,3520.8,1835), new cjs.Rectangle(348.1,-143.9,3520.8,1835.7), new cjs.Rectangle(358.2,-144.5,3520.8,1836.4), new cjs.Rectangle(368.2,-145.2,3520.8,1837.1), new cjs.Rectangle(378.2,-146,3520.8,1837.8), new cjs.Rectangle(388.3,-146.6,3520.8,1838.5), new cjs.Rectangle(398.3,-147.3,3520.8,1839.1), new cjs.Rectangle(408.4,-148,3520.8,1839.8), new cjs.Rectangle(418.4,-148.7,3520.8,1840.5), new cjs.Rectangle(428.4,-149.4,3520.8,1841.2), new cjs.Rectangle(438.5,-150.1,3520.8,1841.9), new cjs.Rectangle(448.5,-150.8,3520.8,1842.6), new cjs.Rectangle(458.6,-151.5,3520.8,1843.3), new cjs.Rectangle(468.6,-152.2,3520.8,1844), new cjs.Rectangle(478.7,-152.9,3520.8,1844.7), new cjs.Rectangle(488.7,-153.5,3520.8,1845.4), new cjs.Rectangle(498.7,-154.2,3520.8,1846.1), new cjs.Rectangle(508.8,-155,3520.8,1846.8), new cjs.Rectangle(518.8,-155.6,3520.8,1847.5), new cjs.Rectangle(528.9,-156.4,3520.8,1848.2), new cjs.Rectangle(538.9,-157,3520.8,1848.9), new cjs.Rectangle(549,-157.7,3520.8,1849.6), new cjs.Rectangle(559,-158.4,3520.8,1850.2), new cjs.Rectangle(569,-159.1,3520.8,1850.9), new cjs.Rectangle(579.1,-159.8,3520.8,1851.6), new cjs.Rectangle(589.1,-160.5,3520.8,1852.3), new cjs.Rectangle(599.2,-161.2,3520.8,1853), new cjs.Rectangle(609.2,-161.9,3520.8,1853.7), new cjs.Rectangle(619.3,-162.6,3520.8,1854.4), new cjs.Rectangle(629.3,-163.2,3520.8,1855.1), new cjs.Rectangle(639.3,-164,3520.8,1855.8), new cjs.Rectangle(649.4,-164.6,3520.8,1856.5), new cjs.Rectangle(659.4,-165.4,3520.8,1857.2), new cjs.Rectangle(669.5,-166,3520.8,1857.9), new cjs.Rectangle(679.5,-166.7,3520.8,1858.6), new cjs.Rectangle(689.6,-167.5,3520.8,1859.3), new cjs.Rectangle(699.6,-168.1,3520.8,1860), new cjs.Rectangle(709.6,-168.8,3520.8,1860.6), new cjs.Rectangle(719.7,-169.5,3520.8,1861.3), new cjs.Rectangle(729.7,-170.2,3520.8,1862), new cjs.Rectangle(739.8,-170.9,3520.8,1862.7), new cjs.Rectangle(749.8,-171.6,3520.8,1863.4), new cjs.Rectangle(759.9,-172.3,3520.8,1864.1), new cjs.Rectangle(769.9,-173,3520.8,1864.8), new cjs.Rectangle(779.9,-173.6,3520.8,1865.5), new cjs.Rectangle(790,-174.4,3520.8,1866.2), new cjs.Rectangle(800,-175,3520.8,1866.9), new cjs.Rectangle(810.1,-175.7,3520.8,1867.6), new cjs.Rectangle(820.1,-176.5,3520.8,1868.3), new cjs.Rectangle(830.2,-177.1,3520.8,1869), new cjs.Rectangle(840.2,-177.9,3520.8,1869.7), new cjs.Rectangle(850.2,-178.5,3520.8,1870.4), new cjs.Rectangle(860.3,-179.2,3520.8,1871), new cjs.Rectangle(870.3,-179.9,3520.8,1871.7), new cjs.Rectangle(880.4,-180.6,3520.8,1872.4), new cjs.Rectangle(890.4,-181.3,3520.8,1873.1), new cjs.Rectangle(900.5,-182,3520.8,1873.8), new cjs.Rectangle(910.5,-182.7,3520.8,1874.5), new cjs.Rectangle(920.5,-183.4,3520.8,1875.2), new cjs.Rectangle(930.6,-184.1,3520.8,1875.9), new cjs.Rectangle(940.6,-184.7,3520.8,1876.6), new cjs.Rectangle(950.7,-185.5,3520.8,1877.3), new cjs.Rectangle(955,-186.1,3526.5,1878), new cjs.Rectangle(955,-186.9,3536.5,1878.7), new cjs.Rectangle(955,-187.5,3546.5,1879.4), new cjs.Rectangle(955,-188.2,3556.6,1880.1), new cjs.Rectangle(955,-189,3566.6,1880.8), new cjs.Rectangle(955,-189.6,3576.7,1881.4), new cjs.Rectangle(955,-190.3,3586.7,1882.1), new cjs.Rectangle(955,-191,3596.8,1882.8), new cjs.Rectangle(955,-191.7,3606.8,1883.5), new cjs.Rectangle(955,-192.4,3616.8,1884.2), new cjs.Rectangle(955,-193.1,3626.9,1884.9), new cjs.Rectangle(955,-193.8,3636.9,1885.6), new cjs.Rectangle(955,-194.5,3647,1886.3), new cjs.Rectangle(955,-195.1,3657,1887), new cjs.Rectangle(955,-195.9,3667.1,1887.7), new cjs.Rectangle(955,-196.5,3677.1,1888.4), new cjs.Rectangle(955,-197.2,3687.1,1889.1), new cjs.Rectangle(955,-198,3697.2,1889.8), new cjs.Rectangle(955,-198.6,3707.2,1890.5), new cjs.Rectangle(955,-199.4,3717.3,1891.2), new cjs.Rectangle(955,-200,3727.3,1891.9), new cjs.Rectangle(955,-200.7,3737.4,1892.5), new cjs.Rectangle(955,-201.4,3747.4,1893.2), new cjs.Rectangle(955,-202.1,3757.4,1893.9), new cjs.Rectangle(955,-202.8,3767.5,1894.6), new cjs.Rectangle(955,-203.5,3777.5,1895.3), new cjs.Rectangle(955,-204.2,3787.6,1896), new cjs.Rectangle(955,-204.9,3797.6,1896.7), new cjs.Rectangle(955,-205.5,3807.6,1897.4), new cjs.Rectangle(955,-206.2,3817.7,1898.1), new cjs.Rectangle(955,-207,3827.7,1898.8), new cjs.Rectangle(955,-207.6,3837.8,1899.5), new cjs.Rectangle(955,-208.3,3847.8,1900.2), new cjs.Rectangle(955,-209,3857.9,1900.9), new cjs.Rectangle(955,-209.7,3867.9,1901.6), new cjs.Rectangle(955,-210.5,3877.9,1902.3), new cjs.Rectangle(955,-211.1,3888,1902.9), new cjs.Rectangle(955,-211.8,3898,1903.6), new cjs.Rectangle(955,-212.5,3908.1,1904.3), new cjs.Rectangle(955,-213.2,3918.1,1905), new cjs.Rectangle(955,-213.9,3928.2,1905.7), new cjs.Rectangle(955,-214.6,3938.2,1906.4), new cjs.Rectangle(955,-215.3,3948.2,1907.1), new cjs.Rectangle(955,-216,3958.3,1907.8), new cjs.Rectangle(955,-216.6,3968.3,1908.5), new cjs.Rectangle(955,-217.3,3978.4,1909.2), new cjs.Rectangle(955,-218,3988.4,1909.9), new cjs.Rectangle(955,-218.7,3998.5,1910.6), new cjs.Rectangle(955,-219.5,4008.5,1911.3), new cjs.Rectangle(955,-220.1,4018.5,1912), new cjs.Rectangle(955,-220.8,4028.6,1912.7), new cjs.Rectangle(955,-221.5,4038.6,1913.3), new cjs.Rectangle(955,-222.2,4048.7,1914), new cjs.Rectangle(955,-222.9,4058.7,1914.7), new cjs.Rectangle(955,-223.6,4068.8,1915.4), new cjs.Rectangle(955,-224.3,4078.8,1916.1), new cjs.Rectangle(955,-225,4088.8,1916.8), new cjs.Rectangle(955,-225.7,4098.9,1917.5), new cjs.Rectangle(955,-226.4,4108.9,1918.2), new cjs.Rectangle(955,-227,4119,1918.9), new cjs.Rectangle(955,-227.7,4129,1919.6), new cjs.Rectangle(955,-228.5,4139.1,1920.3), new cjs.Rectangle(955,-229.1,4149.1,1921), new cjs.Rectangle(955,-229.8,4159.1,1921.7), new cjs.Rectangle(955,-230.5,4169.2,1922.4), new cjs.Rectangle(955,-231.2,4179.2,1923.1), new cjs.Rectangle(955,-231.9,4189.3,1923.7), new cjs.Rectangle(955,-232.6,4199.3,1924.4), new cjs.Rectangle(955,-233.3,4209.4,1925.1), new cjs.Rectangle(955,-234,4219.4,1925.8), new cjs.Rectangle(955,-234.7,4229.4,1926.5), new cjs.Rectangle(955,-235.4,4239.5,1927.2), new cjs.Rectangle(955,-236.1,4249.5,1927.9), new cjs.Rectangle(955,-236.8,4259.6,1928.6), new cjs.Rectangle(955,-237.5,4269.6,1929.3), new cjs.Rectangle(955,-238.1,4279.7,1930), new cjs.Rectangle(955,-238.8,4289.7,1930.7), new cjs.Rectangle(955,-239.5,4299.7,1931.4), new cjs.Rectangle(955,-240.2,4309.8,1932.1), new cjs.Rectangle(955,-241,4319.8,1932.8), new cjs.Rectangle(955,-241.6,4329.9,1933.5), new cjs.Rectangle(955,-242.3,4339.9,1934.2), new cjs.Rectangle(955,-243,4349.9,1934.8), new cjs.Rectangle(955,-243.7,4360,1935.5), new cjs.Rectangle(955,-244.4,4370,1936.2), new cjs.Rectangle(955,-245.1,4380.1,1936.9), new cjs.Rectangle(955,-245.8,4390.1,1937.6), new cjs.Rectangle(955,-246.5,4400.2,1938.3), new cjs.Rectangle(955,-247.2,4410.2,1939), new cjs.Rectangle(955,-247.8,4420.2,1939.7), new cjs.Rectangle(955,-248.5,4430.3,1940.4), new cjs.Rectangle(955,-249.2,4440.3,1941.1), new cjs.Rectangle(955,-250,4450.4,1941.8), new cjs.Rectangle(955,-250.6,4460.4,1942.5), new cjs.Rectangle(955,-251.3,4470.5,1943.2), new cjs.Rectangle(955,-252,4480.5,1943.9), new cjs.Rectangle(955,-252.7,4490.5,1944.6), new cjs.Rectangle(955,-253.4,4500.6,1945.2), new cjs.Rectangle(955,-254.1,4510.6,1945.9), new cjs.Rectangle(955,-254.8,4520.7,1946.6), new cjs.Rectangle(955,-255.5,4530.7,1947.3), new cjs.Rectangle(955,-256.2,4540.8,1948), new cjs.Rectangle(955,-256.9,4550.8,1948.7), new cjs.Rectangle(955,-257.6,4560.8,1949.4), new cjs.Rectangle(955,-258.3,4570.9,1950.1), new cjs.Rectangle(955,-259,4580.9,1950.8), new cjs.Rectangle(955,-259.6,4591,1951.5), new cjs.Rectangle(955,-260.3,4601,1952.2), new cjs.Rectangle(955,-261,4611.1,1952.9), new cjs.Rectangle(955,-261.7,4621.1,1953.6), new cjs.Rectangle(955,-262.5,4631.1,1954.3), new cjs.Rectangle(955,-263.1,4641.2,1955), new cjs.Rectangle(955,-263.8,4651.2,1955.6), new cjs.Rectangle(955,-264.5,4661.3,1956.3), new cjs.Rectangle(955,-265.2,4671.3,1957), new cjs.Rectangle(955,-265.9,4681.4,1957.7), new cjs.Rectangle(955,-266.6,4691.4,1958.4), new cjs.Rectangle(955,-267.3,4701.4,1959.1), new cjs.Rectangle(955,-268,4711.5,1959.8), new cjs.Rectangle(955,-268.7,4721.5,1960.5), new cjs.Rectangle(955,-269.3,4731.6,1961.2), new cjs.Rectangle(955,-270,4741.6,1961.9), new cjs.Rectangle(955,-270.7,4751.7,1962.6), new cjs.Rectangle(955,-271.5,4761.7,1963.3), new cjs.Rectangle(955,-272.1,4771.7,1964), new cjs.Rectangle(955,-272.8,4781.8,1964.7), new cjs.Rectangle(955,-273.5,4791.8,1965.4), new cjs.Rectangle(955,-274.2,4801.9,1966.1), new cjs.Rectangle(955,-274.9,4811.9,1966.7), new cjs.Rectangle(955,-275.6,4822,1967.4), new cjs.Rectangle(955,-276.3,4832,1968.1), new cjs.Rectangle(955,-277,4842,1968.8), new cjs.Rectangle(955,-277.7,4852.1,1969.5), new cjs.Rectangle(955,-278.4,4862.1,1970.2), new cjs.Rectangle(955,-279.1,4872.2,1970.9), new cjs.Rectangle(955,-279.7,4882.2,1971.6), new cjs.Rectangle(955,-280.5,4892.2,1972.3), new cjs.Rectangle(955,-281.1,4902.3,1973), new cjs.Rectangle(955,-281.8,4912.3,1973.7), new cjs.Rectangle(955,-282.5,4922.4,1974.4), new cjs.Rectangle(955,-283.2,4932.4,1975.1), new cjs.Rectangle(955,-284,4942.5,1975.8), new cjs.Rectangle(955,-284.6,4952.5,1976.5), new cjs.Rectangle(955,-285.3,4962.5,1977.1), new cjs.Rectangle(955,-286,4972.6,1977.8), new cjs.Rectangle(955,-286.7,4982.6,1978.5), new cjs.Rectangle(955,-287.4,4992.7,1979.2), new cjs.Rectangle(955,-288.1,5002.7,1979.9), new cjs.Rectangle(955,-288.8,5012.8,1980.6), new cjs.Rectangle(955,-289.5,5022.8,1981.3), new cjs.Rectangle(955,-290.1,5032.8,1982), new cjs.Rectangle(955,-290.8,5042.9,1982.7), new cjs.Rectangle(955,-291.5,5052.9,1983.4), new cjs.Rectangle(955,-292.2,5063,1984.1), new cjs.Rectangle(955,-293,5073,1984.8), new cjs.Rectangle(955,-293.6,5083.1,1985.5), new cjs.Rectangle(955,-294.3,5093.1,1986.2), new cjs.Rectangle(955,-295,5103.1,1986.9), new cjs.Rectangle(955,-295.7,5113.2,1987.5), new cjs.Rectangle(955,-296.4,5123.2,1988.2), new cjs.Rectangle(955,-297.1,5133.3,1988.9), new cjs.Rectangle(955,-297.8,5143.3,1989.6), new cjs.Rectangle(955,-298.5,5153.4,1990.3), new cjs.Rectangle(955,-299.2,5163.4,1991), new cjs.Rectangle(955,-299.9,5173.4,1991.7), new cjs.Rectangle(955,-300.6,5183.5,1992.4), new cjs.Rectangle(955,-301.2,5193.5,1993.1), new cjs.Rectangle(955,-302,5203.6,1993.8), new cjs.Rectangle(955,-302.6,5213.6,1994.5), new cjs.Rectangle(955,-303.3,5223.7,1995.2), new cjs.Rectangle(955,-304,5233.7,1995.9), new cjs.Rectangle(955,-304.7,5243.7,1996.6), new cjs.Rectangle(955,-305.5,5253.8,1997.3), new cjs.Rectangle(955,-306.1,5263.8,1997.9), new cjs.Rectangle(955,-306.8,5273.9,1998.6), new cjs.Rectangle(955,-307.5,5283.9,1999.3), new cjs.Rectangle(955,-308.2,5294,2000), new cjs.Rectangle(955,-308.9,5304,2000.7), new cjs.Rectangle(955,-309.6,5314,2001.4), new cjs.Rectangle(955,-310.3,5324.1,2002.1), new cjs.Rectangle(955,-311,5334.1,2002.8), new cjs.Rectangle(955,-311.6,5344.2,2003.5), new cjs.Rectangle(955,-312.3,5354.2,2004.2), new cjs.Rectangle(955,-313,5364.3,2004.9)];
// library properties:
lib.properties = {
width: 1920,
height: 1080,
fps: 24,
color: "#FFFFFF",
opacity: 1.00,
webfonts: {},
manifest: [
{src:"aa_images/OfficeSpaceAnimation_atlas_.png", id:"OfficeSpaceAnimation_atlas_"},
{src:"aa_images/OfficeSpaceAnimation_atlas_2.png", id:"OfficeSpaceAnimation_atlas_2"}
],
preloads: []
};
})(lib = lib||{}, images = images||{}, createjs = createjs||{}, ss = ss||{}, AdobeAn = AdobeAn||{});
var lib, images, createjs, ss, AdobeAn;
|
'use strict';
exports.__esModule = true;
var _ReactNativeStyleResolver = require('./ReactNativeStyleResolver');
var _ReactNativeStyleResolver2 = _interopRequireDefault(_ReactNativeStyleResolver);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var styleResolver = new _ReactNativeStyleResolver2.default(); /**
* Copyright (c) 2016-present, Nicolas Gallagher.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
*
*/
exports.default = styleResolver;
module.exports = exports['default'];
|
'use strict';
describe('basic mark with custom element and class', function() {
var $ctx1, $ctx2;
beforeEach(function(done) {
loadFixtures('basic/custom-element-class.html');
$ctx1 = $('.basic-custom-element-class > div:first-child');
$ctx2 = $('.basic-custom-element-class > div:last-child');
new Mark($ctx1[0]).mark('lorem ipsum', {
'diacritics': false,
'separateWordSearch': false,
'element': 'i',
'done': function() {
new Mark($ctx2[0]).mark('lorem ipsum', {
'diacritics': false,
'separateWordSearch': false,
'element': 'i',
'className': 'custom',
'done': done
});
}
});
});
it('should not add a class to matched elements if specified', function() {
expect($ctx1.find('i')).toHaveLength(4);
});
it('should wrap matches with specified element and class', function() {
expect($ctx2.find('i.custom')).toHaveLength(4);
});
});
|
const mongoose = require("mongoose");
module.exports = () =>{
mongoose.connect("mongodb+srv://admin:369erdal456@movie-api-nodejs-tgvir.mongodb.net/test?retryWrites=true&w=majority");
mongoose.connection.on("open", ()=>{
console.log("MongoDB: Connected");
})
mongoose.connection.on("error", (err)=>{
console.log("MongoDB: Err", err);
});
mongoose.Promise = global.Promise;
}
|
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
class LoginForm(forms.Form):
username = forms.CharField(max_length=100, label='Kullanıcı Adı')
password = forms.CharField(max_length=100, label='Parola', widget=forms.PasswordInput)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
user = authenticate(username=username, password=password)
if not user:
raise forms.ValidationError('Kullanıcı adını ya da parolayı yanlış girdiniz')
return super(LoginForm, self).clean()
class RegisterForm(forms.ModelForm):
username = forms.CharField(max_length=100, label='Kullanıcı Adı')
password1 = forms.CharField(max_length=100, label='Parola', widget=forms.PasswordInput)
password2 = forms.CharField(max_length=100, label='Parola Doğrulama', widget=forms.PasswordInput)
class Meta:
model = User
fields = [
'username',
'password1',
'password2',
]
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError('Parolalar eşleşmiyor')
return password2
|
# Copyright 2020 Joseph T. Iosue
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Contains tests for functions in the _binary_helpers.py file.
"""
from qubovert.utils import is_solution_spin, num_bits
from qubovert.utils import sum as qvsum
from qubovert import boolean_var, spin_var, PCBO, PCSO
from numpy.testing import assert_raises
def test_is_solution_spin():
assert not is_solution_spin((0, 1, 1, 0))
assert is_solution_spin((1, -1, -1, 1))
assert is_solution_spin((1, 1, 1, 1), None) is None
assert not is_solution_spin((1, 1, 1, 1))
assert is_solution_spin((1, 1, 1, 1), True)
assert not is_solution_spin(dict(enumerate((0, 1, 1, 0))))
assert is_solution_spin(dict(enumerate((1, -1, -1, 1))))
assert is_solution_spin(dict(enumerate((1, 1, 1, 1))), None) is None
assert not is_solution_spin(dict(enumerate((1, 1, 1, 1))))
assert is_solution_spin(dict(enumerate((1, 1, 1, 1))), True)
def test_num_bits():
assert num_bits(7) == 3
assert num_bits(8) == 4
assert num_bits(7, False) == 7
assert num_bits(8, False) == 8
with assert_raises(ValueError):
num_bits(-1)
with assert_raises(ValueError):
num_bits(-1, False)
def test_sum():
xs = [boolean_var(i) for i in range(100)]
assert sum(xs) == qvsum(xs) == qvsum(xs[i] for i in range(100))
assert sum(xs, 2) == qvsum(xs, 2)
assert isinstance(qvsum(xs), PCBO)
zs = [spin_var(i) for i in range(100)]
assert sum(zs) == qvsum(zs) == qvsum(zs[i] for i in range(100))
assert sum(zs, 2) == qvsum(zs, 2)
assert isinstance(qvsum(zs), PCSO)
|
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator.throw(value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments)).next());
});
};
const webdriver_commands_1 = require('./webdriver_commands');
const HIGHLIGHT_COMMAND = [webdriver_commands_1.CommandName.ElementClick, webdriver_commands_1.CommandName.ElementSendKeys, webdriver_commands_1.CommandName.ElementClear];
let clientScripts = require('./client_scripts/highlight.js');
/**
* A barrier that delays forwarding WebDriver commands that can affect the app (ie, clicks or
* sending text) for a fixed amount of time. During the delay, the element that's the target
* of the command will be highlighted by drawing a transparent div on top of it.
*/
class HighlightDelayBarrier {
constructor(client, delay) {
this.client = client;
this.delay = delay;
}
isHighlightCommand(command) {
return HIGHLIGHT_COMMAND.indexOf(command.commandName) !== -1;
}
highlightData(top, left, width, height) {
return JSON.stringify({
script: 'return (' + clientScripts.HIGHLIGHT_FN + ').apply(null, arguments);',
args: [top, left, width, height]
});
}
removeHighlightData() {
return JSON.stringify({
script: 'return (' + clientScripts.REMOVE_HIGHLIGHT_FN + ').apply(null, arguments);',
args: []
});
}
// Simple promise-based sleep so we can use async/await
sleep(delay) {
return new Promise((resolve) => {
setTimeout(() => {
resolve();
}, delay);
});
}
onCommand(command) {
return __awaiter(this, void 0, void 0, function* () {
if (!this.isHighlightCommand(command) || !this.delay) {
return;
}
const sessId = command.sessionId;
const el = command.getParam('elementId');
// The W3C spec does have a 'getRect', but the standalone server doesn't support it yet.
const loc = yield this.client.getLocation(sessId, el);
const size = yield this.client.getSize(sessId, el);
// Set the highlight
yield this.client.execute(sessId, this.highlightData(loc['y'], loc['x'], size['width'], size['height']));
// Wait
yield this.sleep(this.delay);
// Clear the highlight
yield this.client.execute(sessId, this.removeHighlightData());
});
}
}
exports.HighlightDelayBarrier = HighlightDelayBarrier;
//# sourceMappingURL=highlight_delay_barrier.js.map
|
"""Matern kernel."""
from typing import Optional
import numpy as np
import scipy.spatial.distance
import scipy.special
import probnum.utils as _utils
from probnum.typing import IntArgType, ScalarArgType
from ._kernel import IsotropicMixin, Kernel
class Matern(Kernel, IsotropicMixin):
r"""Matern kernel.
Covariance function defined by
.. math::
:nowrap:
\begin{equation}
k(x_0, x_1)
=
\frac{1}{\Gamma(\nu) 2^{\nu - 1}}
\left( \frac{\sqrt{2 \nu}}{l} \lVert x_0 - x_1 \rVert_2 \right)^\nu
K_\nu \left( \frac{\sqrt{2 \nu}}{l} \lVert x_0 - x_1 \rVert_2 \right),
\end{equation}
where :math:`K_\nu` is a modified Bessel function. The Matern kernel generalizes the
:class:`~probnum.kernels.ExpQuad` kernel via its additional parameter :math:`\nu`
controlling the smoothness of the function. For :math:`\nu \rightarrow \infty`
the Matern kernel converges to the :class:`~probnum.kernels.ExpQuad` kernel. A
Gaussian process with Matern covariance function is :math:`\lceil \nu \rceil - 1`
times differentiable.
Parameters
----------
input_dim :
Input dimension of the kernel.
lengthscale :
Lengthscale :math:`l` of the kernel. Describes the input scale on which the
process varies.
nu :
Hyperparameter :math:`\nu` controlling differentiability.
See Also
--------
ExpQuad : Exponentiated Quadratic / RBF kernel.
Examples
--------
>>> import numpy as np
>>> from probnum.kernels import Matern
>>> K = Matern(input_dim=1, lengthscale=0.1, nu=2.5)
>>> xs = np.linspace(0, 1, 3)[:, None]
>>> K.matrix(xs)
array([[1.00000000e+00, 7.50933789e-04, 3.69569622e-08],
[7.50933789e-04, 1.00000000e+00, 7.50933789e-04],
[3.69569622e-08, 7.50933789e-04, 1.00000000e+00]])
"""
def __init__(
self,
input_dim: IntArgType,
lengthscale: ScalarArgType = 1.0,
nu: ScalarArgType = 1.5,
):
self.lengthscale = _utils.as_numpy_scalar(lengthscale)
if not self.lengthscale > 0:
raise ValueError(f"Lengthscale l={self.lengthscale} must be positive.")
self.nu = _utils.as_numpy_scalar(nu)
if not self.nu > 0:
raise ValueError(f"Hyperparameter nu={self.nu} must be positive.")
super().__init__(input_dim=input_dim)
def _evaluate(self, x0: np.ndarray, x1: Optional[np.ndarray] = None) -> np.ndarray:
distances = self._euclidean_distances(x0, x1)
# Kernel matrix computation dependent on differentiability
if self.nu == 0.5:
return np.exp(-1.0 / self.lengthscale * distances)
if self.nu == 1.5:
scaled_distances = -np.sqrt(3) / self.lengthscale * distances
return (1.0 + scaled_distances) * np.exp(-scaled_distances)
if self.nu == 2.5:
scaled_distances = np.sqrt(5) / self.lengthscale * distances
return (1.0 + scaled_distances + scaled_distances ** 2 / 3.0) * np.exp(
-scaled_distances
)
if self.nu == np.inf:
return np.exp(-1.0 / (2.0 * self.lengthscale ** 2) * distances ** 2)
# The modified Bessel function K_nu is not defined for z=0
distances = np.maximum(distances, np.finfo(distances.dtype).eps)
scaled_distances = np.sqrt(2 * self.nu) / self.lengthscale * distances
return (
2 ** (1.0 - self.nu)
/ scipy.special.gamma(self.nu)
* scaled_distances ** self.nu
* scipy.special.kv(self.nu, scaled_distances)
)
|
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
from nose_parameterized import parameterized
from six.moves import range
from unittest import TestCase
from zipline import TradingAlgorithm
from zipline.test_algorithms import NoopAlgorithm
from zipline.utils import factory
class BeforeTradingAlgorithm(TradingAlgorithm):
def __init__(self, *args, **kwargs):
self.before_trading_at = []
super(BeforeTradingAlgorithm, self).__init__(*args, **kwargs)
def before_trading_start(self, data):
self.before_trading_at.append(self.datetime)
FREQUENCIES = {'daily': 0, 'minute': 1} # daily is less frequent than minute
class TestTradeSimulation(TestCase):
def test_minutely_emissions_generate_performance_stats_for_last_day(self):
params = factory.create_simulation_parameters(num_days=1,
data_frequency='minute',
emission_rate='minute')
algo = NoopAlgorithm(sim_params=params)
algo.run(source=[], overwrite_sim_params=False)
self.assertEqual(algo.perf_tracker.day_count, 1.0)
@parameterized.expand([('%s_%s_%s' % (num_days, freq, emission_rate),
num_days, freq, emission_rate)
for freq in FREQUENCIES
for emission_rate in FREQUENCIES
for num_days in range(1, 4)
if FREQUENCIES[emission_rate] <= FREQUENCIES[freq]])
def test_before_trading_start(self, test_name, num_days, freq,
emission_rate):
params = factory.create_simulation_parameters(
num_days=num_days, data_frequency=freq,
emission_rate=emission_rate)
algo = BeforeTradingAlgorithm(sim_params=params)
algo.run(source=[], overwrite_sim_params=False)
self.assertEqual(algo.perf_tracker.day_count, num_days)
self.assertTrue(params.trading_days.equals(
pd.DatetimeIndex(algo.before_trading_at)),
"Expected %s but was %s."
% (params.trading_days, algo.before_trading_at))
|
const getWindowPosition = () => {
return {
windowPosition: { x: 0, y: 0 },
};
};
export default getWindowPosition;
|
/**
* Given a linked list, rotate the list to the right by k places, where k is non-negative.
* @author zheng
* @date 2020/10/13 13:22:28
*/
/**
* 旋转链表
* 临界条件及时退出
* 找到要移动的结点的位置,划分链表,分别操作对应指针,注意新的最后结点的 next 要置为 null
*/
const { ListNode, createList } = require('./ListNode');
/**
* Time complexity: O(N)
* Space complexity: O(1)
* @param {ListNode} head
* @param {number} k
* @return {ListNode}
*/
const rotateRight = function (head, k) {
if (!head || k === 0) return head;
let count = 0;
let curr = head;
while (curr) {
count++;
curr = curr.next;
}
let newHead = new ListNode(0);
newHead.next = head;
k = k % count;
if (k === 0) return head;
let splitNode = head;
let reamin = count - k;
while (splitNode) {
if (--reamin === 0) {
break;
}
splitNode = splitNode.next;
}
let lastNode = splitNode;
newHead.next = lastNode.next;
while (lastNode.next) {
lastNode = lastNode.next;
}
lastNode.next = head;
splitNode.next = null;
return newHead.next;
};
let arr = [1, 2, 3, 4, 5];
let list = createList(arr);
let res = rotateRight(list.next, 4);
console.log(res);
while (res) {
console.log(res.val);
res = res.next;
}
|
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var prefix = 'bi';
var iconName = 'cart';
var width = 512;
var height = 512;
var ligatures = [];
var unicode = null;
var svgPathData = 'M 16 32 A 16 16 0 0 0 0 48 A 16 16 0 0 0 16 64 L 51.519531 64 L 64.320312 115.42383 L 112.28711 370.94336 A 16 16 0 0 0 128 384 L 160 384 L 384 384 L 416 384 A 16 16 0 0 0 431.71289 370.94336 L 479.71289 114.94336 A 16 16 0 0 0 464 96 L 92.480469 96 L 79.519531 44.128906 A 16 16 0 0 0 64 32 L 16 32 z M 384 384 A 64 64 0 0 0 384 512 A 64 64 0 0 0 384 384 z M 160 384 A 64 64 0 0 0 160 512 A 64 64 0 0 0 160 384 z M 99.263672 128 L 444.73633 128 L 402.7207 352 L 141.2793 352 L 99.263672 128 z M 159.83789 416 A 32 32 0 0 1 160 416 A 32 32 0 0 1 160 480 A 32 32 0 0 1 159.83789 416 z M 383.83789 416 A 32 32 0 0 1 384 416 A 32 32 0 0 1 384 480 A 32 32 0 0 1 383.83789 416 z ';
exports.definition = {
prefix: prefix,
iconName: iconName,
icon: [
width,
height,
ligatures,
unicode,
svgPathData
]};
exports.biCart = exports.definition;
exports.prefix = prefix;
exports.iconName = iconName;
exports.width = width;
exports.height = height;
exports.ligatures = ligatures;
exports.unicode = unicode;
exports.svgPathData = svgPathData;
|
import pytest
import os
@pytest.fixture(autouse=True)
def set_env(monkeypatch):
monkeypatch.setenv("TESTING", "TRUE")
|
#!/usr/bin/env python3
#import roslib; roslib.load_manifest('BINCADDY')
import rospy
#import roslib
import tf.transformations
import tf_conversions
import tf2_ros
import std_msgs.msg
from std_msgs.msg import Float64, Int32
from geometry_msgs.msg import Twist, TransformStamped
from nav_msgs.msg import Odometry
from sensor_msgs.msg import JointState
import std_srvs.srv
#roslib.load_manifest('diagnostic_updater')
import diagnostic_updater, diagnostic_msgs.msg
import time
import math
import traceback
import queue
from .odrive_interface import ODriveInterfaceAPI, ODriveFailure
from .odrive_interface import ChannelBrokenException, ChannelDamagedException
from .odrive_simulator import ODriveInterfaceSimulator
class ROSLogger(object):
"""Imitate a standard Python logger, but pass the messages to rospy logging.
"""
def debug(self, msg): rospy.logdebug(msg) # print(msg) #
def info(self, msg): rospy.loginfo(msg) # print(msg) #
def warn(self, msg): rospy.logwarn(msg) # print(msg) #
def error(self, msg): rospy.logerr(msg) # print(msg) #
def critical(self, msg): rospy.logfatal(msg) # print(msg) #
# use_index = False (bool)
# offset_float = 0.590887010098 (float)
# calib_range = 0.019999999553 (float)
# mode = 0 (int)
# offset = 1809 (int)
# cpr = 4096 (int)
# idx_search_speed = 10.0 (float)
# pre_calibrated = False (bool)
#m_s_to_rpm = 60.0/tyre_circumference
#m_s_to_erpm = 10 * m_s_to_rpm
# 4096 counts / rev, so 4096 == 1 rev/s
# 1 m/s = 3.6 km/hr
def get_param(name, default):
val = rospy.get_param(name, default)
rospy.loginfo(' %s: %s', name, str(val))
return val
class ODriveNode(object):
last_speed = 0.0
driver = None
prerolling = False
# Robot wheel_track params for velocity -> motor speed conversion
wheel_track = None
tyre_circumference = None
encoder_counts_per_rev = None
m_s_to_value = 1.0
axis_for_right = 0
encoder_cpr = 4096
# Startup parameters
connect_on_startup = False
calibrate_on_startup = False
engage_on_startup = False
publish_joint_angles = True
# Simulation mode
# When enabled, output simulated odometry and joint angles (TODO: do joint angles anyway from ?)
sim_mode = False
def __init__(self):
self.sim_mode = get_param('simulation_mode', False)
self.publish_joint_angles = get_param('publish_joint_angles', True) # if self.sim_mode else False
self.publish_temperatures = get_param('publish_temperatures', True)
self.axis_for_right = float(get_param('~axis_for_right', 0)) # if right calibrates first, this should be 0, else 1
self.wheel_track = float(get_param('~wheel_track', 0.285)) # m, distance between wheel centres
self.tyre_circumference = float(get_param('~tyre_circumference', 0.341)) # used to translate velocity commands in m/s into motor rpm
self.connect_on_startup = get_param('~connect_on_startup', False)
#self.calibrate_on_startup = get_param('~calibrate_on_startup', False)
#self.engage_on_startup = get_param('~engage_on_startup', False)
self.has_preroll = get_param('~use_preroll', True)
self.publish_current = get_param('~publish_current', True)
self.publish_raw_odom =get_param('~publish_raw_odom', True)
self.publish_odom = get_param('~publish_odom', True)
self.publish_tf = get_param('~publish_odom_tf', False)
self.odom_topic = get_param('~odom_topic', "odom")
self.odom_frame = get_param('~odom_frame', "odom")
self.base_frame = get_param('~base_frame', "base_link")
self.odom_calc_hz = get_param('~odom_calc_hz', 10)
rospy.on_shutdown(self.terminate)
rospy.Service('connect_driver', std_srvs.srv.Trigger, self.connect_driver)
rospy.Service('disconnect_driver', std_srvs.srv.Trigger, self.disconnect_driver)
rospy.Service('calibrate_motors', std_srvs.srv.Trigger, self.calibrate_motor)
rospy.Service('engage_motors', std_srvs.srv.Trigger, self.engage_motor)
rospy.Service('release_motors', std_srvs.srv.Trigger, self.release_motor)
# odometry update, disable during preroll, whenever wheels off ground
self.odometry_update_enabled = True
rospy.Service('enable_odometry_updates', std_srvs.srv.SetBool, self.enable_odometry_update_service)
self.status_pub = rospy.Publisher('status', std_msgs.msg.String, latch=True, queue_size=2)
self.status = "disconnected"
self.status_pub.publish(self.status)
self.command_queue = queue.Queue(maxsize=5)
self.vel_subscribe = rospy.Subscriber("/cmd_vel", Twist, self.cmd_vel_callback, queue_size=2)
self.publish_diagnostics = True
if self.publish_diagnostics:
self.diagnostic_updater = diagnostic_updater.Updater()
self.diagnostic_updater.setHardwareID("Not connected, unknown")
self.diagnostic_updater.add("ODrive Diagnostics", self.pub_diagnostics)
if self.publish_temperatures:
self.temperature_publisher_left = rospy.Publisher('left/temperature', Float64, queue_size=2)
self.temperature_publisher_right = rospy.Publisher('right/temperature', Float64, queue_size=2)
self.i2t_error_latch = False
if self.publish_current:
#self.current_loop_count = 0
#self.left_current_accumulator = 0.0
#self.right_current_accumulator = 0.0
self.current_publisher_left = rospy.Publisher('left/current', Float64, queue_size=2)
self.current_publisher_right = rospy.Publisher('right/current', Float64, queue_size=2)
self.i2t_publisher_left = rospy.Publisher('left/i2t', Float64, queue_size=2)
self.i2t_publisher_right = rospy.Publisher('right/i2t', Float64, queue_size=2)
rospy.logdebug("ODrive will publish motor currents.")
self.i2t_resume_threshold = get_param('~i2t_resume_threshold', 222)
self.i2t_warning_threshold = get_param('~i2t_warning_threshold', 333)
self.i2t_error_threshold = get_param('~i2t_error_threshold', 666)
self.last_cmd_vel_time = rospy.Time.now()
if self.publish_raw_odom:
self.raw_odom_publisher_encoder_left = rospy.Publisher('left/raw_odom/encoder', Int32, queue_size=2) if self.publish_raw_odom else None
self.raw_odom_publisher_encoder_right = rospy.Publisher('right/raw_odom/encoder', Int32, queue_size=2) if self.publish_raw_odom else None
self.raw_odom_publisher_vel_left = rospy.Publisher('left/raw_odom/velocity', Int32, queue_size=2) if self.publish_raw_odom else None
self.raw_odom_publisher_vel_right = rospy.Publisher('right/raw_odom/velocity', Int32, queue_size=2) if self.publish_raw_odom else None
if self.publish_odom:
rospy.Service('reset_odometry', std_srvs.srv.Trigger, self.reset_odometry)
self.old_pos_l = 0
self.old_pos_r = 0
self.odom_publisher = rospy.Publisher(self.odom_topic, Odometry, tcp_nodelay=True, queue_size=2)
# setup message
self.odom_msg = Odometry()
#print(dir(self.odom_msg))
self.odom_msg.header.frame_id = self.odom_frame
self.odom_msg.child_frame_id = self.base_frame
self.odom_msg.pose.pose.position.x = 0.0
self.odom_msg.pose.pose.position.y = 0.0
self.odom_msg.pose.pose.position.z = 0.0 # always on the ground, we hope
self.odom_msg.pose.pose.orientation.x = 0.0 # always vertical
self.odom_msg.pose.pose.orientation.y = 0.0 # always vertical
self.odom_msg.pose.pose.orientation.z = 0.0
self.odom_msg.pose.pose.orientation.w = 1.0
self.odom_msg.twist.twist.linear.x = 0.0
self.odom_msg.twist.twist.linear.y = 0.0 # no sideways
self.odom_msg.twist.twist.linear.z = 0.0 # or upwards... only forward
self.odom_msg.twist.twist.angular.x = 0.0 # or roll
self.odom_msg.twist.twist.angular.y = 0.0 # or pitch... only yaw
self.odom_msg.twist.twist.angular.z = 0.0
# store current location to be updated.
self.x = 0.0
self.y = 0.0
self.theta = 0.0
# setup transform
self.tf_publisher = tf2_ros.TransformBroadcaster()
self.tf_msg = TransformStamped()
self.tf_msg.header.frame_id = self.odom_frame
self.tf_msg.child_frame_id = self.base_frame
self.tf_msg.transform.translation.x = 0.0
self.tf_msg.transform.translation.y = 0.0
self.tf_msg.transform.translation.z = 0.0
self.tf_msg.transform.rotation.x = 0.0
self.tf_msg.transform.rotation.y = 0.0
self.tf_msg.transform.rotation.w = 0.0
self.tf_msg.transform.rotation.z = 1.0
if self.publish_joint_angles:
self.joint_state_publisher = rospy.Publisher('/odrive/joint_states', JointState, queue_size=2)
jsm = JointState()
self.joint_state_msg = jsm
#jsm.name.resize(2)
#jsm.position.resize(2)
jsm.name = ['joint_left_wheel','joint_right_wheel']
jsm.position = [0.0, 0.0]
def main_loop(self):
# Main control, handle startup and error handling
# while a ROS timer will handle the high-rate (~50Hz) comms + odometry calcs
main_rate = rospy.Rate(1) # hz
# Start timer to run high-rate comms
self.fast_timer = rospy.Timer(rospy.Duration(1/float(self.odom_calc_hz)), self.fast_timer)
self.fast_timer_comms_active = False
while not rospy.is_shutdown():
try:
main_rate.sleep()
except rospy.ROSInterruptException: # shutdown / stop ODrive??
break;
# fast timer running, so do nothing and wait for any errors
if self.fast_timer_comms_active:
continue
# check for errors
if self.driver:
try:
# driver connected, but fast_comms not active -> must be an error?
# TODO: try resetting errors and recalibrating, not just a full disconnection
error_string = self.driver.get_errors(clear=True)
if error_string:
rospy.logerr("Had errors, disconnecting and retrying connection.")
rospy.logerr(error_string)
self.driver.disconnect()
self.status = "disconnected"
self.status_pub.publish(self.status)
self.driver = None
else:
# must have called connect service from another node
self.fast_timer_comms_active = True
except (ChannelBrokenException, ChannelDamagedException, AttributeError):
rospy.logerr("ODrive USB connection failure in main_loop.")
self.status = "disconnected"
self.status_pub.publish(self.status)
self.driver = None
except:
rospy.logerr("Unknown errors accessing ODrive:" + traceback.format_exc())
self.status = "disconnected"
self.status_pub.publish(self.status)
self.driver = None
if not self.driver:
if not self.connect_on_startup:
#rospy.loginfo("ODrive node started, but not connected.")
continue
if not self.connect_driver(None)[0]:
rospy.logerr("Failed to connect.") # TODO: can we check for timeout here?
continue
if self.publish_diagnostics:
self.diagnostic_updater.setHardwareID(self.driver.get_version_string())
else:
pass # loop around and try again
def fast_timer(self, timer_event):
time_now = rospy.Time.now()
# in case of failure, assume some values are zero
self.vel_l = 0
self.vel_r = 0
self.new_pos_l = 0
self.new_pos_r = 0
self.current_l = 0
self.current_r = 0
self.temp_v_l = 0.
self.temp_v_r = 0.
self.motor_state_l = "not connected" # undefined
self.motor_state_r = "not connected"
self.bus_voltage = 0.
# Handle reading from Odrive and sending odometry
if self.fast_timer_comms_active:
try:
# check errors
error_string = self.driver.get_errors()
if error_string:
self.fast_timer_comms_active = False
else:
# reset watchdog
self.driver.feed_watchdog()
# read all required values from ODrive for odometry
self.motor_state_l = self.driver.left_state()
self.motor_state_r = self.driver.right_state()
self.encoder_cpr = self.driver.encoder_cpr
self.m_s_to_value = self.encoder_cpr/self.tyre_circumference # calculated
self.driver.update_time(time_now.to_sec())
self.vel_l = self.driver.left_vel_estimate() # units: encoder counts/s
self.vel_r = -self.driver.right_vel_estimate() # neg is forward for right
self.new_pos_l = self.driver.left_pos() # units: encoder counts
self.new_pos_r = -self.driver.right_pos() # sign!
# for temperatures
self.temp_v_l = self.driver.left_temperature()
self.temp_v_r = self.driver.right_temperature()
# for current
self.current_l = self.driver.left_current()
self.current_r = self.driver.right_current()
# voltage
self.bus_voltage = self.driver.bus_voltage()
except (ChannelBrokenException, ChannelDamagedException):
rospy.logerr("ODrive USB connection failure in fast_timer." + traceback.format_exc(1))
self.fast_timer_comms_active = False
self.status = "disconnected"
self.status_pub.publish(self.status)
self.driver = None
except:
rospy.logerr("Fast timer ODrive failure:" + traceback.format_exc())
self.fast_timer_comms_active = False
# odometry is published regardless of ODrive connection or failure (but assumed zero for those)
# as required by SLAM
if self.publish_odom:
self.pub_odometry(time_now)
if self.publish_temperatures:
self.pub_temperatures()
if self.publish_current:
self.pub_current()
if self.publish_joint_angles:
self.pub_joint_angles(time_now)
if self.publish_diagnostics:
self.diagnostic_updater.update()
try:
# check and stop motor if no vel command has been received in > 1s
#if self.fast_timer_comms_active:
if self.driver:
if (time_now - self.last_cmd_vel_time).to_sec() > 0.5 and self.last_speed > 0:
self.driver.drive(0,0)
self.last_speed = 0
self.last_cmd_vel_time = time_now
# release motor after 10s stopped
if (time_now - self.last_cmd_vel_time).to_sec() > 10.0 and self.driver.engaged():
self.driver.release() # and release
except (ChannelBrokenException, ChannelDamagedException):
rospy.logerr("ODrive USB connection failure in cmd_vel timeout." + traceback.format_exc(1))
self.fast_timer_comms_active = False
self.driver = None
except:
rospy.logerr("cmd_vel timeout unknown failure:" + traceback.format_exc())
self.fast_timer_comms_active = False
# handle sending drive commands.
# from here, any errors return to get out
if self.fast_timer_comms_active and not self.command_queue.empty():
# check to see if we're initialised and engaged motor
try:
if not self.driver.has_prerolled(): #ensure_prerolled():
rospy.logwarn_throttle(5.0, "ODrive has not been prerolled, ignoring drive command.")
motor_command = self.command_queue.get_nowait()
return
except:
rospy.logerr("Fast timer exception on preroll." + traceback.format_exc())
self.fast_timer_comms_active = False
try:
motor_command = self.command_queue.get_nowait()
except queue.Empty:
rospy.logerr("Queue was empty??" + traceback.format_exc())
return
if motor_command[0] == 'drive':
try:
if self.publish_current and self.i2t_error_latch:
# have exceeded i2t bounds
return
if not self.driver.engaged():
self.driver.engage()
self.status = "engaged"
left_linear_val, right_linear_val = motor_command[1]
self.driver.drive(left_linear_val, right_linear_val)
self.last_speed = max(abs(left_linear_val), abs(right_linear_val))
self.last_cmd_vel_time = time_now
except (ChannelBrokenException, ChannelDamagedException):
rospy.logerr("ODrive USB connection failure in drive_cmd." + traceback.format_exc(1))
self.fast_timer_comms_active = False
self.driver = None
except:
rospy.logerr("motor drive unknown failure:" + traceback.format_exc())
self.fast_timer_comms_active = False
elif motor_command[0] == 'release':
pass
# ?
else:
pass
def terminate(self):
self.fast_timer.shutdown()
if self.driver:
self.driver.release()
# ROS services
def connect_driver(self, request):
if self.driver:
return (False, "Already connected.")
ODriveClass = ODriveInterfaceAPI if not self.sim_mode else ODriveInterfaceSimulator
self.driver = ODriveInterfaceAPI(logger=ROSLogger())
rospy.loginfo("Connecting to ODrive...")
if not self.driver.connect(right_axis=self.axis_for_right):
self.driver = None
#rospy.logerr("Failed to connect.")
return (False, "Failed to connect.")
#rospy.loginfo("ODrive connected.")
# okay, connected,
self.m_s_to_value = self.driver.encoder_cpr/self.tyre_circumference
if self.publish_odom:
self.old_pos_l = self.driver.left_axis.encoder.pos_cpr
self.old_pos_r = self.driver.right_axis.encoder.pos_cpr
self.fast_timer_comms_active = True
self.status = "connected"
self.status_pub.publish(self.status)
return (True, "ODrive connected successfully")
def disconnect_driver(self, request):
if not self.driver:
rospy.logerr("Not connected.")
return (False, "Not connected.")
try:
if not self.driver.disconnect():
return (False, "Failed disconnection, but try reconnecting.")
except:
rospy.logerr('Error while disconnecting: {}'.format(traceback.format_exc()))
finally:
self.status = "disconnected"
self.status_pub.publish(self.status_pub)
self.driver = None
return (True, "Disconnection success.")
def calibrate_motor(self, request):
if not self.driver:
rospy.logerr("Not connected.")
return (False, "Not connected.")
if self.has_preroll:
self.odometry_update_enabled = False # disable odometry updates while we preroll
if not self.driver.preroll(wait=True):
self.status = "preroll_fail"
self.status_pub.publish(self.status)
return (False, "Failed preroll.")
self.status_pub.publish("ready")
rospy.sleep(1)
self.odometry_update_enabled = True
else:
if not self.driver.calibrate():
return (False, "Failed calibration.")
return (True, "Calibration success.")
def engage_motor(self, request):
if not self.driver:
rospy.logerr("Not connected.")
return (False, "Not connected.")
if not self.driver.has_prerolled():
return (False, "Not prerolled.")
if not self.driver.engage():
return (False, "Failed to engage motor.")
return (True, "Engage motor success.")
def release_motor(self, request):
if not self.driver:
rospy.logerr("Not connected.")
return (False, "Not connected.")
if not self.driver.release():
return (False, "Failed to release motor.")
return (True, "Release motor success.")
def enable_odometry_update_service(self, request):
enable = request.data
if enable:
self.odometry_update_enabled = True
return(True, "Odometry enabled.")
else:
self.odometry_update_enabled = False
return(True, "Odometry disabled.")
def reset_odometry(self, request):
self.x = 0.0
self.y = 0.0
self.theta = 0.0
return(True, "Odometry reset.")
# Helpers and callbacks
def convert(self, forward, ccw):
angular_to_linear = ccw * (self.wheel_track/2.0)
left_linear_val = int((forward - angular_to_linear) * self.m_s_to_value)
right_linear_val = int((forward + angular_to_linear) * self.m_s_to_value)
return left_linear_val, right_linear_val
def cmd_vel_callback(self, msg):
#rospy.loginfo("Received a /cmd_vel message!")
#rospy.loginfo("Linear Components: [%f, %f, %f]"%(msg.linear.x, msg.linear.y, msg.linear.z))
#rospy.loginfo("Angular Components: [%f, %f, %f]"%(msg.angular.x, msg.angular.y, msg.angular.z))
# rostopic pub -r 1 /commands/motor/current std_msgs/Float64 -- -1.0
# Do velocity processing here:
# Use the kinematics of your robot to map linear and angular velocities into motor commands
# 3600 ERPM = 360 RPM ~= 6 km/hr
#angular_to_linear = msg.angular.z * (wheel_track/2.0)
#left_linear_rpm = (msg.linear.x - angular_to_linear) * m_s_to_erpm
#right_linear_rpm = (msg.linear.x + angular_to_linear) * m_s_to_erpm
left_linear_val, right_linear_val = self.convert(msg.linear.x, msg.angular.z)
# if wheel speed = 0, stop publishing after sending 0 once. #TODO add error term, work out why VESC turns on for 0 rpm
# Then set your wheel speeds (using wheel_left and wheel_right as examples)
#self.left_motor_pub.publish(left_linear_rpm)
#self.right_motor_pub.publish(right_linear_rpm)
#wheel_left.set_speed(v_l)
#wheel_right.set_speed(v_r)
#rospy.logdebug("Driving left: %d, right: %d, from linear.x %.2f and angular.z %.2f" % (left_linear_val, right_linear_val, msg.linear.x, msg.angular.z))
try:
drive_command = ('drive', (left_linear_val, right_linear_val))
self.command_queue.put_nowait(drive_command)
except queue.Full:
pass
self.last_cmd_vel_time = rospy.Time.now()
def pub_diagnostics(self, stat):
stat.add("Status", self.status)
stat.add("Motor state L", self.motor_state_l)
stat.add("Motor state R", self.motor_state_r)
stat.add("FET temp L (C)", round(self.temp_v_l,1))
stat.add("FET temp R (C)", round(self.temp_v_r,1))
stat.add("Motor temp L (C)", "unimplemented")
stat.add("Motor temp R (C)", "unimplemented")
stat.add("Motor current L (A)", round(self.current_l,1))
stat.add("Motor current R (A)", round(self.current_r,1))
stat.add("Voltage (V)", round(self.bus_voltage,2))
stat.add("Motor i2t L", round(self.left_energy_acc,1))
stat.add("Motor i2t R", round(self.right_energy_acc,1))
# https://github.com/ros/common_msgs/blob/jade-devel/diagnostic_msgs/msg/DiagnosticStatus.msg
if self.status == "disconnected":
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.WARN, "Not connected")
else:
if self.i2t_error_latch:
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.ERROR, "i2t overheated, drive ignored until cool")
elif self.left_energy_acc > self.i2t_warning_threshold:
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.WARN, "Left motor over i2t warning threshold")
elif self.left_energy_acc > self.i2t_error_threshold:
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.ERROR, "Left motor over i2t error threshold")
elif self.right_energy_acc > self.i2t_warning_threshold:
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.WARN, "Right motor over i2t warning threshold")
elif self.right_energy_acc > self.i2t_error_threshold:
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.ERROR, "Right motor over i2t error threshold")
# Everything is okay:
else:
stat.summary(diagnostic_msgs.msg.DiagnosticStatus.OK, "Running")
def pub_temperatures(self):
# https://discourse.odriverobotics.com/t/odrive-mosfet-temperature-rise-measurements-using-the-onboard-thermistor/972
# https://discourse.odriverobotics.com/t/thermistors-on-the-odrive/813/7
# https://www.digikey.com/product-detail/en/murata-electronics-north-america/NCP15XH103F03RC/490-4801-1-ND/1644682
#p3 = 363.0
#p2 = -459.2
#p1 = 308.3
#p0 = -28.1
#
#vl = self.temp_v_l
#vr = self.temp_v_r
#temperature_l = p3*vl**3 + p2*vl**2 + p1*vl + p0
#temperature_r = p3*vr**3 + p2*vr**2 + p1*vr + p0
#print(temperature_l, temperature_r)
self.temperature_publisher_left.publish(self.temp_v_l)
self.temperature_publisher_right.publish(self.temp_v_r)
# Current publishing and i2t calculation
i2t_current_nominal = 2.0
i2t_update_rate = 0.01
def pub_current(self):
self.current_publisher_left.publish(float(self.current_l))
self.current_publisher_right.publish(float(self.current_r))
now = time.time()
if not hasattr(self, 'last_pub_current_time'):
self.last_pub_current_time = now
self.left_energy_acc = 0
self.right_energy_acc = 0
return
# calculate and publish i2t
dt = now - self.last_pub_current_time
power = max(0, self.current_l**2 - self.i2t_current_nominal**2)
energy = power * dt
self.left_energy_acc *= 1 - self.i2t_update_rate * dt
self.left_energy_acc += energy
power = max(0, self.current_r**2 - self.i2t_current_nominal**2)
energy = power * dt
self.right_energy_acc *= 1 - self.i2t_update_rate * dt
self.right_energy_acc += energy
self.last_pub_current_time = now
self.i2t_publisher_left.publish(float(self.left_energy_acc))
self.i2t_publisher_right.publish(float(self.right_energy_acc))
# stop odrive if overheated
if self.left_energy_acc > self.i2t_error_threshold or self.right_energy_acc > self.i2t_error_threshold:
if not self.i2t_error_latch:
self.driver.release()
self.status = "overheated"
self.i2t_error_latch = True
rospy.logerr("ODrive has exceeded i2t error threshold, ignoring drive commands. Waiting to cool down.")
elif self.i2t_error_latch:
if self.left_energy_acc < self.i2t_resume_threshold and self.right_energy_acc < self.i2t_resume_threshold:
# have cooled enough now
self.status = "ready"
self.i2t_error_latch = False
rospy.logerr("ODrive has cooled below i2t resume threshold, ignoring drive commands. Waiting to cool down.")
# current_quantizer = 5
#
# self.left_current_accumulator += self.current_l
# self.right_current_accumulator += self.current_r
#
# self.current_loop_count += 1
# if self.current_loop_count >= current_quantizer:
# self.current_publisher_left.publish(float(self.left_current_accumulator) / current_quantizer)
# self.current_publisher_right.publish(float(self.right_current_accumulator) / current_quantizer)
#
# self.current_loop_count = 0
# self.left_current_accumulator = 0.0
# self.right_current_accumulator = 0.0
def pub_odometry(self, time_now):
now = time_now
self.odom_msg.header.stamp = now
self.tf_msg.header.stamp = now
wheel_track = self.wheel_track # check these. Values in m
tyre_circumference = self.tyre_circumference
# self.m_s_to_value = encoder_cpr/tyre_circumference set earlier
# if odometry updates disabled, just return the old position and zero twist.
if not self.odometry_update_enabled:
self.odom_msg.twist.twist.linear.x = 0.
self.odom_msg.twist.twist.angular.z = 0.
# but update the old encoder positions, so when we restart updates
# it will start by giving zero change from the old position.
self.old_pos_l = self.new_pos_l
self.old_pos_r = self.new_pos_r
self.odom_publisher.publish(self.odom_msg)
if self.publish_tf:
self.tf_publisher.sendTransform(self.tf_msg)
return
# Twist/velocity: calculated from motor values only
s = tyre_circumference * (self.vel_l+self.vel_r) / (2.0*self.encoder_cpr)
w = tyre_circumference * (self.vel_r-self.vel_l) / (wheel_track * self.encoder_cpr) # angle: vel_r*tyre_radius - vel_l*tyre_radius
self.odom_msg.twist.twist.linear.x = s
self.odom_msg.twist.twist.angular.z = w
#rospy.loginfo("vel_l: % 2.2f vel_r: % 2.2f vel_l: % 2.2f vel_r: % 2.2f x: % 2.2f th: % 2.2f pos_l: % 5.1f pos_r: % 5.1f " % (
# vel_l, -vel_r,
# vel_l/encoder_cpr, vel_r/encoder_cpr, self.odom_msg.twist.twist.linear.x, self.odom_msg.twist.twist.angular.z,
# self.driver.left_axis.encoder.pos_cpr, self.driver.right_axis.encoder.pos_cpr))
# Position
delta_pos_l = self.new_pos_l - self.old_pos_l
delta_pos_r = self.new_pos_r - self.old_pos_r
self.old_pos_l = self.new_pos_l
self.old_pos_r = self.new_pos_r
# Check for overflow. Assume we can't move more than half a circumference in a single timestep.
half_cpr = self.encoder_cpr/2.0
if delta_pos_l > half_cpr: delta_pos_l = delta_pos_l - self.encoder_cpr
elif delta_pos_l < -half_cpr: delta_pos_l = delta_pos_l + self.encoder_cpr
if delta_pos_r > half_cpr: delta_pos_r = delta_pos_r - self.encoder_cpr
elif delta_pos_r < -half_cpr: delta_pos_r = delta_pos_r + self.encoder_cpr
# counts to metres
delta_pos_l_m = delta_pos_l / self.m_s_to_value
delta_pos_r_m = delta_pos_r / self.m_s_to_value
# Distance travelled
d = (delta_pos_l_m+delta_pos_r_m)/2.0 # delta_ps
th = (delta_pos_r_m-delta_pos_l_m)/wheel_track # works for small angles
xd = math.cos(th)*d
yd = -math.sin(th)*d
# elapsed time = event.last_real, event.current_real
#elapsed = (event.current_real-event.last_real).to_sec()
# calc_vel: d/elapsed, th/elapsed
# Pose: updated from previous pose + position delta
self.x += math.cos(self.theta)*xd - math.sin(self.theta)*yd
self.y += math.sin(self.theta)*xd + math.cos(self.theta)*yd
self.theta = (self.theta + th) % (2*math.pi)
#rospy.loginfo("dl_m: % 2.2f dr_m: % 2.2f d: % 2.2f th: % 2.2f xd: % 2.2f yd: % 2.2f x: % 5.1f y: % 5.1f th: % 5.1f" % (
# delta_pos_l_m, delta_pos_r_m,
# d, th, xd, yd,
# self.x, self.y, self.theta
# ))
# fill odom message and publish
self.odom_msg.pose.pose.position.x = self.x
self.odom_msg.pose.pose.position.y = self.y
q = tf_conversions.transformations.quaternion_from_euler(0.0, 0.0, self.theta)
self.odom_msg.pose.pose.orientation.z = q[2] # math.sin(self.theta)/2
self.odom_msg.pose.pose.orientation.w = q[3] # math.cos(self.theta)/2
#rospy.loginfo("theta: % 2.2f z_m: % 2.2f w_m: % 2.2f q[2]: % 2.2f q[3]: % 2.2f (q[0]: %2.2f q[1]: %2.2f)" % (
# self.theta,
# math.sin(self.theta)/2, math.cos(self.theta)/2,
# q[2],q[3],q[0],q[1]
# ))
#self.odom_msg.pose.covariance
# x y z
# x y z
self.tf_msg.transform.translation.x = self.x
self.tf_msg.transform.translation.y = self.y
#self.tf_msg.transform.rotation.x
#self.tf_msg.transform.rotation.x
self.tf_msg.transform.rotation.z = q[2]
self.tf_msg.transform.rotation.w = q[3]
if self.publish_raw_odom:
self.raw_odom_publisher_encoder_left.publish(self.new_pos_l)
self.raw_odom_publisher_encoder_right.publish(self.new_pos_r)
self.raw_odom_publisher_vel_left.publish(self.vel_l)
self.raw_odom_publisher_vel_right.publish(self.vel_r)
# ... and publish!
self.odom_publisher.publish(self.odom_msg)
if self.publish_tf:
self.tf_publisher.sendTransform(self.tf_msg)
def pub_joint_angles(self, time_now):
jsm = self.joint_state_msg
jsm.header.stamp = time_now
if self.driver:
jsm.position[0] = 2*math.pi * self.new_pos_l / self.encoder_cpr
jsm.position[1] = 2*math.pi * self.new_pos_r / self.encoder_cpr
self.joint_state_publisher.publish(jsm)
def start_odrive():
rospy.init_node('odrive')
odrive_node = ODriveNode()
odrive_node.main_loop()
#rospy.spin()
if __name__ == '__main__':
try:
start_odrive()
except rospy.ROSInterruptException:
pass
|
# qubit number=4
# total number=38
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(3) # number=15
prog += CZ(0,3) # number=16
prog += H(3) # number=17
prog += CNOT(0,3) # number=35
prog += X(3) # number=36
prog += CNOT(0,3) # number=37
prog += H(3) # number=20
prog += CZ(0,3) # number=21
prog += H(3) # number=22
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += Z(3) # number=33
prog += H(0) # number=5
prog += X(3) # number=32
prog += H(1) # number=6
prog += H(1) # number=29
prog += H(2) # number=7
prog += H(3) # number=8
prog += H(0) # number=9
prog += H(0) # number=23
prog += RX(0.6063273821428302,3) # number=34
prog += CZ(2,0) # number=24
prog += H(0) # number=25
prog += Y(2) # number=30
prog += CNOT(2,0) # number=11
prog += CNOT(2,0) # number=18
prog += H(0) # number=26
prog += X(2) # number=31
prog += CZ(2,0) # number=27
prog += H(0) # number=28
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil2867.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
|
import logging
import os
import angr
import pyvex
import archinfo
import ailment
import ailment.analyses
def test_block_simplifier():
arch = archinfo.arch_from_id('AMD64')
manager = ailment.Manager(arch=arch)
block_bytes = bytes.fromhex("554889E54883EC40897DCC488975C048C745F89508400048C745F0B6064000488B45C04883C008488B00BEA70840004889C7E883FEFFFF")
irsb = pyvex.IRSB(block_bytes, 0x4006c6, arch, opt_level=0)
ablock = ailment.IRSBConverter.convert(irsb, manager)
# we need a project...
project = angr.Project(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', 'binaries', 'tests', 'x86_64', 'all'), auto_load_libs=False)
simp = project.analyses.AILBlockSimplifier(ablock)
if __name__ == "__main__":
test_block_simplifier()
|
# coding=utf-8
# Copyright 2019-present, the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import subprocess
import sys
import warnings
from io import BufferedIOBase, RawIOBase
from os.path import expanduser
from typing import IO, Dict, Iterable, List, Optional, Tuple, Union
import requests
from requests.exceptions import HTTPError
from .constants import (
ENDPOINT,
REPO_TYPES,
REPO_TYPES_MAPPING,
REPO_TYPES_URL_PREFIXES,
SPACES_SDK_TYPES,
)
from .utils import logging
from .utils._deprecation import _deprecate_positional_args
from .utils._fixes import JSONDecodeError
from .utils.endpoint_helpers import (
AttributeDictionary,
DatasetFilter,
DatasetTags,
ModelFilter,
ModelTags,
_filter_emissions,
)
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
USERNAME_PLACEHOLDER = "hf_user"
logger = logging.get_logger(__name__)
# TODO: remove after deprecation period is over (v0.7)
def _validate_repo_id_deprecation(repo_id, name, organization):
"""Returns (name, organization) from the input."""
if repo_id and not name and organization:
# this means the user had passed name as positional, now mapped to
# repo_id and is passing organization as well. This wouldn't be an
# issue if they pass everything as kwarg. So we switch the parameters
# here:
repo_id, name = name, repo_id
if not (repo_id or name):
raise ValueError(
"No name provided. Please pass `repo_id` with a valid repository name."
)
if repo_id and (name or organization):
raise ValueError(
"Only pass `repo_id` and leave deprecated `name` and "
"`organization` to be None."
)
elif name or organization:
warnings.warn(
"`name` and `organization` input arguments are deprecated and "
"will be removed in v0.7. Pass `repo_id` instead.",
FutureWarning,
)
else:
if "/" in repo_id:
organization, name = repo_id.split("/")
else:
organization, name = None, repo_id
return name, organization
def repo_type_and_id_from_hf_id(hf_id: str, hub_url: Optional[str] = None):
"""
Returns the repo type and ID from a huggingface.co URL linking to a
repository
Args:
hf_id (`str`):
An URL or ID of a repository on the HF hub. Accepted values are:
- https://huggingface.co/<repo_type>/<namespace>/<repo_id>
- https://huggingface.co/<namespace>/<repo_id>
- <repo_type>/<namespace>/<repo_id>
- <namespace>/<repo_id>
- <repo_id>
hub_url (`str`, *optional*):
The URL of the HuggingFace Hub, defaults to https://huggingface.co
"""
hub_url = re.sub(r"https?://", "", hub_url if hub_url is not None else ENDPOINT)
is_hf_url = hub_url in hf_id and "@" not in hf_id
url_segments = hf_id.split("/")
is_hf_id = len(url_segments) <= 3
if is_hf_url:
namespace, repo_id = url_segments[-2:]
if namespace == hub_url:
namespace = None
if len(url_segments) > 2 and hub_url not in url_segments[-3]:
repo_type = url_segments[-3]
else:
repo_type = None
elif is_hf_id:
if len(url_segments) == 3:
# Passed <repo_type>/<user>/<model_id> or <repo_type>/<org>/<model_id>
repo_type, namespace, repo_id = url_segments[-3:]
elif len(url_segments) == 2:
# Passed <user>/<model_id> or <org>/<model_id>
namespace, repo_id = hf_id.split("/")[-2:]
repo_type = None
else:
# Passed <model_id>
repo_id = url_segments[0]
namespace, repo_type = None, None
else:
raise ValueError(
f"Unable to retrieve user and repo ID from the passed HF ID: {hf_id}"
)
repo_type = (
repo_type if repo_type in REPO_TYPES else REPO_TYPES_MAPPING.get(repo_type)
)
return repo_type, namespace, repo_id
class RepoObj:
"""
HuggingFace git-based system, data structure that represents a file
belonging to the current user.
"""
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
items = (f"{k}='{v}'" for k, v in self.__dict__.items())
return f"{self.__class__.__name__}({', '.join(items)})"
class ModelFile:
"""
Data structure that represents a public file inside a model, accessible from
huggingface.co
"""
def __init__(self, rfilename: str, **kwargs):
self.rfilename = rfilename # filename relative to the model root
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
items = (f"{k}='{v}'" for k, v in self.__dict__.items())
return f"{self.__class__.__name__}({', '.join(items)})"
class DatasetFile:
"""
Data structure that represents a public file inside a dataset, accessible
from huggingface.co
"""
def __init__(self, rfilename: str, **kwargs):
self.rfilename = rfilename # filename relative to the dataset root
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
items = (f"{k}='{v}'" for k, v in self.__dict__.items())
return f"{self.__class__.__name__}({', '.join(items)})"
class ModelInfo:
"""
Info about a public model accessible from huggingface.co
"""
@_deprecate_positional_args
def __init__(
self,
*,
modelId: Optional[str] = None, # id of model
sha: Optional[str] = None, # commit sha at the specified revision
lastModified: Optional[str] = None, # date of last commit to repo
tags: List[str] = [],
pipeline_tag: Optional[str] = None,
siblings: Optional[
List[Dict]
] = None, # list of files that constitute the model
config: Optional[Dict] = None, # information about model configuration
**kwargs,
):
self.modelId = modelId
self.sha = sha
self.lastModified = lastModified
self.tags = tags
self.pipeline_tag = pipeline_tag
self.siblings = (
[ModelFile(**x) for x in siblings] if siblings is not None else None
)
self.config = config
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
s = f"{self.__class__.__name__}:" + " {"
for key, val in self.__dict__.items():
s += f"\n\t{key}: {val}"
return s + "\n}"
def __str__(self):
r = f"Model Name: {self.modelId}, Tags: {self.tags}"
if self.pipeline_tag:
r += f", Task: {self.pipeline_tag}"
return r
class DatasetInfo:
"""
Info about a public dataset accessible from huggingface.co
"""
@_deprecate_positional_args
def __init__(
self,
*,
id: Optional[str] = None, # id of dataset
lastModified: Optional[str] = None, # date of last commit to repo
tags: List[str] = [], # tags of the dataset
siblings: Optional[
List[Dict]
] = None, # list of files that constitute the dataset
private: Optional[bool] = None, # community datasets only
author: Optional[str] = None, # community datasets only
description: Optional[str] = None,
citation: Optional[str] = None,
cardData: Optional[dict] = None,
**kwargs,
):
self.id = id
self.lastModified = lastModified
self.tags = tags
self.private = private
self.author = author
self.description = description
self.citation = citation
self.cardData = cardData
self.siblings = (
[DatasetFile(**x) for x in siblings] if siblings is not None else None
)
# Legacy stuff, "key" is always returned with an empty string
# because of old versions of the datasets lib that need this field
kwargs.pop("key", None)
# Store all the other fields returned by the API
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
s = f"{self.__class__.__name__}:" + " {"
for key, val in self.__dict__.items():
s += f"\n\t{key}: {val}"
return s + "\n}"
def __str__(self):
r = f"Dataset Name: {self.id}, Tags: {self.tags}"
return r
class MetricInfo:
"""
Info about a public metric accessible from huggingface.co
"""
@_deprecate_positional_args
def __init__(
self,
*,
id: Optional[str] = None, # id of metric
description: Optional[str] = None,
citation: Optional[str] = None,
**kwargs,
):
self.id = id
self.description = description
self.citation = citation
# Legacy stuff, "key" is always returned with an empty string
# because of old versions of the datasets lib that need this field
kwargs.pop("key", None)
# Store all the other fields returned by the API
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
s = f"{self.__class__.__name__}:" + " {"
for key, val in self.__dict__.items():
s += f"\n\t{key}: {val}"
return s + "\n}"
def __str__(self):
r = f"Metric Name: {self.id}"
return r
class ModelSearchArguments(AttributeDictionary):
"""
A nested namespace object holding all possible values for properties of
models currently hosted in the Hub with tab-completion. If a value starts
with a number, it will only exist in the dictionary
Example:
```python
>>> args = ModelSearchArguments()
>>> args.author_or_organization.huggingface
>>> args.language.en
```
"""
def __init__(self):
self._api = HfApi()
tags = self._api.get_model_tags()
super().__init__(tags)
self._process_models()
def _process_models(self):
def clean(s: str):
return s.replace(" ", "").replace("-", "_").replace(".", "_")
models = self._api.list_models()
author_dict, model_name_dict = AttributeDictionary(), AttributeDictionary()
for model in models:
if "/" in model.modelId:
author, name = model.modelId.split("/")
author_dict[author] = clean(author)
else:
name = model.modelId
model_name_dict[name] = clean(name)
self["model_name"] = model_name_dict
self["author"] = author_dict
class DatasetSearchArguments(AttributeDictionary):
"""
A nested namespace object holding all possible values for properties of
datasets currently hosted in the Hub with tab-completion. If a value starts
with a number, it will only exist in the dictionary
Example:
```python
>>> args = DatasetSearchArguments()
>>> args.author_or_organization.huggingface
>>> args.language.en
```
"""
def __init__(self):
self._api = HfApi()
tags = self._api.get_dataset_tags()
super().__init__(tags)
self._process_models()
def _process_models(self):
def clean(s: str):
return s.replace(" ", "").replace("-", "_").replace(".", "_")
datasets = self._api.list_datasets()
author_dict, dataset_name_dict = AttributeDictionary(), AttributeDictionary()
for dataset in datasets:
if "/" in dataset.id:
author, name = dataset.id.split("/")
author_dict[author] = clean(author)
else:
name = dataset.id
dataset_name_dict[name] = clean(name)
self["dataset_name"] = dataset_name_dict
self["author"] = author_dict
def write_to_credential_store(username: str, password: str):
with subprocess.Popen(
"git credential-store store".split(),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as process:
input_username = f"username={username.lower()}"
input_password = f"password={password}"
process.stdin.write(
f"url={ENDPOINT}\n{input_username}\n{input_password}\n\n".encode("utf-8")
)
process.stdin.flush()
def read_from_credential_store(
username=None,
) -> Tuple[Union[str, None], Union[str, None]]:
"""
Reads the credential store relative to huggingface.co. If no `username` is
specified, will read the first entry for huggingface.co, otherwise will read
the entry corresponding to the username specified.
The username returned will be all lowercase.
"""
with subprocess.Popen(
"git credential-store get".split(),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as process:
standard_input = f"url={ENDPOINT}\n"
if username is not None:
standard_input += f"username={username.lower()}\n"
standard_input += "\n"
process.stdin.write(standard_input.encode("utf-8"))
process.stdin.flush()
output = process.stdout.read()
output = output.decode("utf-8")
if len(output) == 0:
return None, None
username, password = [line for line in output.split("\n") if len(line) != 0]
return username.split("=")[1], password.split("=")[1]
def erase_from_credential_store(username=None):
"""
Erases the credential store relative to huggingface.co. If no `username` is
specified, will erase the first entry for huggingface.co, otherwise will
erase the entry corresponding to the username specified.
"""
with subprocess.Popen(
"git credential-store erase".split(),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) as process:
standard_input = f"url={ENDPOINT}\n"
if username is not None:
standard_input += f"username={username.lower()}\n"
standard_input += "\n"
process.stdin.write(standard_input.encode("utf-8"))
process.stdin.flush()
class HfApi:
def __init__(self, endpoint=None):
self.endpoint = endpoint if endpoint is not None else ENDPOINT
def login(self, username: str, password: str) -> str:
"""
Call HF API to sign in a user and get a token if credentials are valid.
<Tip>
Warning: Deprecated, will be removed in v0.7. Please use
[`HfApi.set_access_token`] instead.
</Tip>
Args:
username (`str`):
The username of the account with which to login.
password (`str`):
The password of the account with which to login.
Returns:
`str`: token if credentials are valid
<Tip>
Raises the following errors:
- [`HTTPError`](https://2.python-requests.org/en/master/api/#requests.HTTPError)
if credentials are invalid
</Tip>
"""
warnings.warn(
"HfApi.login: This method is deprecated in favor of `set_access_token`"
" and will be removed in v0.7.",
FutureWarning,
)
path = f"{self.endpoint}/api/login"
r = requests.post(path, json={"username": username, "password": password})
r.raise_for_status()
d = r.json()
write_to_credential_store(username, password)
return d["token"]
def whoami(self, token: Optional[str] = None) -> Dict:
"""
Call HF API to know "whoami".
Args:
token (`str`, *optional*):
Hugging Face token. Will default to the locally saved token if
not provided.
"""
if token is None:
token = HfFolder.get_token()
if token is None:
raise ValueError(
"You need to pass a valid `token` or login by using `huggingface-cli "
"login`"
)
path = f"{self.endpoint}/api/whoami-v2"
r = requests.get(path, headers={"authorization": f"Bearer {token}"})
try:
r.raise_for_status()
except HTTPError as e:
raise HTTPError(
"Invalid user token. If you didn't pass a user token, make sure you "
"are properly logged in by executing `huggingface-cli login`, and "
"if you did pass a user token, double-check it's correct."
) from e
return r.json()
def _is_valid_token(self, token: str):
"""
Determines whether `token` is a valid token or not.
Args:
token (`str`):
The token to check for validity.
Returns:
`bool`: `True` if valid, `False` otherwise.
"""
try:
self.whoami(token=token)
return True
except HTTPError:
return False
def _validate_or_retrieve_token(
self,
token: Optional[str] = None,
name: Optional[str] = None,
function_name: Optional[str] = None,
):
"""
Retrieves and validates stored token or validates passed token.
Args:
token (``str``, `optional`):
Hugging Face token. Will default to the locally saved token if not provided.
name (``str``, `optional`):
Name of the repository. This is deprecated in favor of repo_id and will be removed in v0.7.
function_name (``str``, `optional`):
If _validate_or_retrieve_token is called from a function, name of that function to be passed inside deprecation warning.
Returns:
Validated token and the name of the repository.
Raises:
:class:`EnvironmentError`: If the token is not passed and there's no token saved locally.
:class:`ValueError`: If organization token or invalid token is passed.
"""
if token is None or token is True:
token = HfFolder.get_token()
if token is None:
raise EnvironmentError(
"You need to provide a `token` or be logged in to Hugging "
"Face with `huggingface-cli login`."
)
if name is not None:
if self._is_valid_token(name):
# TODO(0.6) REMOVE
warnings.warn(
f"`{function_name}` now takes `token` as an optional positional argument. "
"Be sure to adapt your code!",
FutureWarning,
)
token, name = name, token
if isinstance(token, str):
if token.startswith("api_org"):
raise ValueError("You must use your personal account token.")
if not self._is_valid_token(token):
raise ValueError("Invalid token passed!")
return token, name
def logout(self, token: Optional[str] = None) -> None:
"""
Call HF API to log out.
<Tip>
Warning: Deprecated, will be removed in v0.7. Please use
[`HfApi.unset_access_token`] instead.
</Tip>
Args:
token (`str`, *optional*):
Hugging Face token. Will default to the locally saved token if
not provided.
"""
warnings.warn(
"HfApi.logout: This method is deprecated in favor of `unset_access_token` "
"and will be removed in v0.7.",
FutureWarning,
)
if token is None:
token = HfFolder.get_token()
if token is None:
raise ValueError(
"You need to pass a valid `token` or login by using `huggingface-cli "
"login`"
)
username = self.whoami(token)["name"]
erase_from_credential_store(username)
path = f"{self.endpoint}/api/logout"
r = requests.post(path, headers={"authorization": f"Bearer {token}"})
r.raise_for_status()
@staticmethod
def set_access_token(access_token: str):
"""
Saves the passed access token so git can correctly authenticate the
user.
Args:
access_token (`str`):
The access token to save.
"""
write_to_credential_store(USERNAME_PLACEHOLDER, access_token)
@staticmethod
def unset_access_token():
"""
Resets the user's access token.
"""
erase_from_credential_store(USERNAME_PLACEHOLDER)
def get_model_tags(self) -> ModelTags:
"Gets all valid model tags as a nested namespace object"
path = f"{self.endpoint}/api/models-tags-by-type"
r = requests.get(path)
r.raise_for_status()
d = r.json()
return ModelTags(d)
def get_dataset_tags(self) -> DatasetTags:
"""
Gets all valid dataset tags as a nested namespace object.
"""
path = f"{self.endpoint}/api/datasets-tags-by-type"
r = requests.get(path)
r.raise_for_status()
d = r.json()
return DatasetTags(d)
@_deprecate_positional_args
def list_models(
self,
*,
filter: Union[ModelFilter, str, Iterable[str], None] = None,
author: Optional[str] = None,
search: Optional[str] = None,
emissions_thresholds: Optional[Tuple[float, float]] = None,
sort: Union[Literal["lastModified"], str, None] = None,
direction: Optional[Literal[-1]] = None,
limit: Optional[int] = None,
full: Optional[bool] = None,
cardData: Optional[bool] = None,
fetch_config: Optional[bool] = None,
use_auth_token: Optional[Union[bool, str]] = None,
) -> List[ModelInfo]:
"""
Get the public list of all the models on huggingface.co
Args:
filter ([`ModelFilter`] or `str` or `Iterable`, *optional*):
A string or [`ModelFilter`] which can be used to identify models
on the Hub.
author (`str`, *optional*):
A string which identify the author (user or organization) of the
returned models
search (`str`, *optional*):
A string that will be contained in the returned models Example
usage:
emissions_thresholds (`Tuple`, *optional*):
A tuple of two ints or floats representing a minimum and maximum
carbon footprint to filter the resulting models with in grams.
sort (`Literal["lastModified"]` or `str`, *optional*):
The key with which to sort the resulting models. Possible values
are the properties of the `ModelInfo` class.
direction (`Literal[-1]` or `int`, *optional*):
Direction in which to sort. The value `-1` sorts by descending
order while all other values sort by ascending order.
limit (`int`, *optional*):
The limit on the number of models fetched. Leaving this option
to `None` fetches all models.
full (`bool`, *optional*):
Whether to fetch all model data, including the `lastModified`,
the `sha`, the files and the `tags`. This is set to `True` by
default when using a filter.
cardData (`bool`, *optional*):
Whether to grab the metadata for the model as well. Can contain
useful information such as carbon emissions, metrics, and
datasets trained on.
fetch_config (`bool`, *optional*):
Whether to fetch the model configs as well. This is not included
in `full` due to its size.
use_auth_token (`bool` or `str`, *optional*):
Whether to use the `auth_token` provided from the
`huggingface_hub` cli. If not logged in, a valid `auth_token`
can be passed in as a string.
Example usage with the `filter` argument:
```python
>>> from huggingface_hub import HfApi
>>> api = HfApi()
>>> # List all models
>>> api.list_models()
>>> # Get all valid search arguments
>>> args = ModelSearchArguments()
>>> # List only the text classification models
>>> api.list_models(filter="text-classification")
>>> # Using the `ModelFilter`
>>> filt = ModelFilter(task="text-classification")
>>> # With `ModelSearchArguments`
>>> filt = ModelFilter(task=args.pipeline_tags.TextClassification)
>>> api.list_models(filter=filt)
>>> # Using `ModelFilter` and `ModelSearchArguments` to find text classification in both PyTorch and TensorFlow
>>> filt = ModelFilter(
... task=args.pipeline_tags.TextClassification,
... library=[args.library.PyTorch, args.library.TensorFlow],
... )
>>> api.list_models(filter=filt)
>>> # List only models from the AllenNLP library
>>> api.list_models(filter="allennlp")
>>> # Using `ModelFilter` and `ModelSearchArguments`
>>> filt = ModelFilter(library=args.library.allennlp)
```
Example usage with the `search` argument:
```python
>>> from huggingface_hub import HfApi
>>> api = HfApi()
>>> # List all models with "bert" in their name
>>> api.list_models(search="bert")
>>> # List all models with "bert" in their name made by google
>>> api.list_models(search="bert", author="google")
```
"""
path = f"{self.endpoint}/api/models"
if use_auth_token:
token, name = self._validate_or_retrieve_token(use_auth_token)
headers = {"authorization": f"Bearer {token}"} if use_auth_token else None
params = {}
if filter is not None:
if isinstance(filter, ModelFilter):
params = self._unpack_model_filter(filter)
else:
params.update({"filter": filter})
params.update({"full": True})
if author is not None:
params.update({"author": author})
if search is not None:
params.update({"search": search})
if sort is not None:
params.update({"sort": sort})
if direction is not None:
params.update({"direction": direction})
if limit is not None:
params.update({"limit": limit})
if full is not None:
if full:
params.update({"full": True})
elif "full" in params:
del params["full"]
if fetch_config is not None:
params.update({"config": fetch_config})
if cardData is not None:
params.update({"cardData": cardData})
r = requests.get(path, params=params, headers=headers)
r.raise_for_status()
d = r.json()
res = [ModelInfo(**x) for x in d]
if emissions_thresholds is not None:
if cardData is None:
raise ValueError(
"`emissions_thresholds` were passed without setting `cardData=True`."
)
else:
return _filter_emissions(res, *emissions_thresholds)
return res
def _unpack_model_filter(self, model_filter: ModelFilter):
"""
Unpacks a [`ModelFilter`] into something readable for `list_models`
"""
model_str = ""
tags = []
# Handling author
if model_filter.author is not None:
model_str = f"{model_filter.author}/"
# Handling model_name
if model_filter.model_name is not None:
model_str += model_filter.model_name
filter_tuple = []
# Handling tasks
if model_filter.task is not None:
filter_tuple.extend(
[model_filter.task]
if isinstance(model_filter.task, str)
else model_filter.task
)
# Handling dataset
if model_filter.trained_dataset is not None:
if not isinstance(model_filter.trained_dataset, (list, tuple)):
model_filter.trained_dataset = [model_filter.trained_dataset]
for dataset in model_filter.trained_dataset:
if "dataset:" not in dataset:
dataset = f"dataset:{dataset}"
filter_tuple.append(dataset)
# Handling library
if model_filter.library:
filter_tuple.extend(
[model_filter.library]
if isinstance(model_filter.library, str)
else model_filter.library
)
# Handling tags
if model_filter.tags:
tags.extend(
[model_filter.tags]
if isinstance(model_filter.tags, str)
else model_filter.tags
)
query_dict = {}
if model_str is not None:
query_dict["search"] = model_str
if len(tags) > 0:
query_dict["tags"] = tags
if model_filter.language is not None:
filter_tuple.append(model_filter.language)
query_dict["filter"] = tuple(filter_tuple)
return query_dict
@_deprecate_positional_args
def list_datasets(
self,
*,
filter: Union[DatasetFilter, str, Iterable[str], None] = None,
author: Optional[str] = None,
search: Optional[str] = None,
sort: Union[Literal["lastModified"], str, None] = None,
direction: Optional[Literal[-1]] = None,
limit: Optional[int] = None,
cardData: Optional[bool] = None,
full: Optional[bool] = None,
use_auth_token: Optional[str] = None,
) -> List[DatasetInfo]:
"""
Get the public list of all the datasets on huggingface.co
Args:
filter ([`DatasetFilter`] or `str` or `Iterable`, *optional*):
A string or [`DatasetFilter`] which can be used to identify
datasets on the hub.
author (`str`, *optional*):
A string which identify the author of the returned models
search (`str`, *optional*):
A string that will be contained in the returned models.
sort (`Literal["lastModified"]` or `str`, *optional*):
The key with which to sort the resulting datasets. Possible
values are the properties of the `DatasetInfo` class.
direction (`Literal[-1]` or `int`, *optional*):
Direction in which to sort. The value `-1` sorts by descending
order while all other values sort by ascending order.
limit (`int`, *optional*):
The limit on the number of datasets fetched. Leaving this option
to `None` fetches all datasets.
cardData (`bool`, *optional*):
Whether to grab the metadata for the dataset as well. Can
contain useful information such as the PapersWithCode ID.
full (`bool`, *optional*):
Whether to fetch all dataset data, including the `lastModified`
and the `cardData`.
use_auth_token (`bool` or `str`, *optional*):
Whether to use the `auth_token` provided from the
`huggingface_hub` cli. If not logged in, a valid `auth_token`
can be passed in as a string.
Example usage with the `filter` argument:
```python
>>> from huggingface_hub import HfApi
>>> api = HfApi()
>>> # List all datasets
>>> api.list_datasets()
>>> # Get all valid search arguments
>>> args = DatasetSearchArguments()
>>> # List only the text classification datasets
>>> api.list_datasets(filter="task_categories:text-classification")
>>> # Using the `DatasetFilter`
>>> filt = DatasetFilter(task_categories="text-classification")
>>> # With `DatasetSearchArguments`
>>> filt = DatasetFilter(task=args.task_categories.text_classification)
>>> api.list_models(filter=filt)
>>> # List only the datasets in russian for language modeling
>>> api.list_datasets(
... filter=("languages:ru", "task_ids:language-modeling")
... )
>>> # Using the `DatasetFilter`
>>> filt = DatasetFilter(languages="ru", task_ids="language-modeling")
>>> # With `DatasetSearchArguments`
>>> filt = DatasetFilter(
... languages=args.languages.ru,
... task_ids=args.task_ids.language_modeling,
... )
>>> api.list_datasets(filter=filt)
```
Example usage with the `search` argument:
```python
>>> from huggingface_hub import HfApi
>>> api = HfApi()
>>> # List all datasets with "text" in their name
>>> api.list_datasets(search="text")
>>> # List all datasets with "text" in their name made by google
>>> api.list_datasets(search="text", author="google")
```
"""
path = f"{self.endpoint}/api/datasets"
if use_auth_token:
token, name = self._validate_or_retrieve_token(use_auth_token)
headers = {"authorization": f"Bearer {token}"} if use_auth_token else None
params = {}
if filter is not None:
if isinstance(filter, DatasetFilter):
params = self._unpack_dataset_filter(filter)
else:
params.update({"filter": filter})
if author is not None:
params.update({"author": author})
if search is not None:
params.update({"search": search})
if sort is not None:
params.update({"sort": sort})
if direction is not None:
params.update({"direction": direction})
if limit is not None:
params.update({"limit": limit})
if full is not None:
if full:
params.update({"full": True})
if cardData is not None:
if cardData:
params.update({"full": True})
r = requests.get(path, params=params, headers=headers)
r.raise_for_status()
d = r.json()
return [DatasetInfo(**x) for x in d]
def _unpack_dataset_filter(self, dataset_filter: DatasetFilter):
"""
Unpacks a [`DatasetFilter`] into something readable for `list_datasets`
"""
dataset_str = ""
# Handling author
if dataset_filter.author is not None:
dataset_str = f"{dataset_filter.author}/"
# Handling dataset_name
if dataset_filter.dataset_name is not None:
dataset_str += dataset_filter.dataset_name
filter_tuple = []
data_attributes = [
"benchmark",
"language_creators",
"languages",
"multilinguality",
"size_categories",
"task_categories",
"task_ids",
]
for attr in data_attributes:
curr_attr = getattr(dataset_filter, attr)
if curr_attr is not None:
if not isinstance(curr_attr, (list, tuple)):
curr_attr = [curr_attr]
for data in curr_attr:
if f"{attr}:" not in data:
data = f"{attr}:{data}"
filter_tuple.append(data)
query_dict = {}
if dataset_str is not None:
query_dict["search"] = dataset_str
query_dict["filter"] = tuple(filter_tuple)
return query_dict
def list_metrics(self) -> List[MetricInfo]:
"""
Get the public list of all the metrics on huggingface.co
Returns:
`List[MetricInfo]`: a list of [`MetricInfo`] objects which.
"""
path = f"{self.endpoint}/api/metrics"
params = {}
r = requests.get(path, params=params)
r.raise_for_status()
d = r.json()
return [MetricInfo(**x) for x in d]
@_deprecate_positional_args
def model_info(
self,
repo_id: str,
*,
revision: Optional[str] = None,
token: Optional[str] = None,
timeout: Optional[float] = None,
securityStatus: Optional[bool] = None,
) -> ModelInfo:
"""
Get info on one specific model on huggingface.co
Model can be private if you pass an acceptable token or are logged in.
Args:
repo_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`.
revision (`str`, *optional*):
The revision of the model repository from which to get the
information.
token (`str`, *optional*):
An authentication token [1]_.
timeout (`float`, *optional*):
Whether to set a timeout for the request to the Hub.
securityStatus (`bool`, *optional*):
Whether to retrieve the security status from the model
repository as well.
Returns:
[`ModelInfo`]: The model repository information.
References:
- [1] https://huggingface.co/settings/tokens
"""
if token is None:
token = HfFolder.get_token()
path = (
f"{self.endpoint}/api/models/{repo_id}"
if revision is None
else f"{self.endpoint}/api/models/{repo_id}/revision/{revision}"
)
headers = {"authorization": f"Bearer {token}"} if token is not None else None
status_query_param = {"securityStatus": True} if securityStatus else None
r = requests.get(
path, headers=headers, timeout=timeout, params=status_query_param
)
r.raise_for_status()
d = r.json()
return ModelInfo(**d)
@_deprecate_positional_args
def list_repo_files(
self,
repo_id: str,
*,
revision: Optional[str] = None,
repo_type: Optional[str] = None,
token: Optional[str] = None,
timeout: Optional[float] = None,
) -> List[str]:
"""
Get the list of files in a given repo.
Args:
repo_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`.
revision (`str`, *optional*):
The revision of the model repository from which to get the
information.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if uploading to a dataset or
space, `None` or `"model"` if uploading to a model. Default is
`None`.
token (`str`, *optional*):
An authentication token [1]_.
timeout (`float`, *optional*):
Whether to set a timeout for the request to the Hub.
Returns:
`List[str]`: the list of files in a given repository.
References:
- [1] https://huggingface.co/settings/tokens
"""
if repo_type is None or repo_type == "model":
info = self.model_info(
repo_id=repo_id, revision=revision, token=token, timeout=timeout
)
elif repo_type == "dataset":
info = self.dataset_info(
repo_id=repo_id, revision=revision, token=token, timeout=timeout
)
else:
raise ValueError("Spaces are not available yet.")
return [f.rfilename for f in info.siblings]
@_deprecate_positional_args
def dataset_info(
self,
repo_id: str,
*,
revision: Optional[str] = None,
token: Optional[str] = None,
timeout: Optional[float] = None,
) -> DatasetInfo:
"""
Get info on one specific dataset on huggingface.co
Dataset can be private if you pass an acceptable token.
Args:
repo_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`.
revision (`str`, *optional*):
The revision of the dataset repository from which to get the
information.
token (`str`, *optional*):
An authentication token [1]_.
timeout (`float`, *optional*):
Whether to set a timeout for the request to the Hub.
Returns:
[`DatasetInfo`]: The dataset repository information.
References:
- [1] https://huggingface.co/settings/tokens
"""
if token is None:
token = HfFolder.get_token()
path = (
f"{self.endpoint}/api/datasets/{repo_id}"
if revision is None
else f"{self.endpoint}/api/datasets/{repo_id}/revision/{revision}"
)
headers = {"authorization": f"Bearer {token}"} if token is not None else None
params = {"full": "true"}
r = requests.get(path, headers=headers, params=params, timeout=timeout)
r.raise_for_status()
d = r.json()
return DatasetInfo(**d)
@_deprecate_positional_args
def create_repo(
self,
repo_id: str = None,
*,
token: Optional[str] = None,
organization: Optional[str] = None,
private: Optional[bool] = None,
repo_type: Optional[str] = None,
exist_ok: Optional[bool] = False,
space_sdk: Optional[str] = None,
name: Optional[str] = None,
) -> str:
"""Create an empty repo on the HuggingFace Hub.
Args:
repo_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`.
<Tip>
Version added: 0.5
</Tip>
token (`str`, *optional*):
An authentication token [1]_.
private (`bool`, *optional*):
Whether the model repo should be private.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if uploading to a dataset or
space, `None` or `"model"` if uploading to a model. Default is
`None`.
exist_ok (`bool`, *optional*, defaults to `False`):
If `True`, do not raise an error if repo already exists.
space_sdk (`str`, *optional*):
Choice of SDK to use if repo_type is "space". Can be
"streamlit", "gradio", or "static".
Returns:
`str`: URL to the newly created repo.
References:
- [1] https://huggingface.co/settings/tokens
"""
name, organization = _validate_repo_id_deprecation(repo_id, name, organization)
path = f"{self.endpoint}/api/repos/create"
token, name = self._validate_or_retrieve_token(
token, name, function_name="create_repo"
)
checked_name = repo_type_and_id_from_hf_id(name)
if (
repo_type is not None
and checked_name[0] is not None
and repo_type != checked_name[0]
):
raise ValueError(
f"""Passed `repo_type` and found `repo_type` are not the same ({repo_type},
{checked_name[0]}).
Please make sure you are expecting the right type of repository to
exist."""
)
if (
organization is not None
and checked_name[1] is not None
and organization != checked_name[1]
):
raise ValueError(
f"""Passed `organization` and `name` organization are not the same ({organization},
{checked_name[1]}).
Please either include the organization in only `name` or the
`organization` parameter, such as
`api.create_repo({checked_name[0]}, organization={organization})` or
`api.create_repo({checked_name[1]}/{checked_name[2]})`"""
)
repo_type = repo_type or checked_name[0]
organization = organization or checked_name[1]
name = checked_name[2]
if repo_type not in REPO_TYPES:
raise ValueError("Invalid repo type")
json = {"name": name, "organization": organization, "private": private}
if repo_type is not None:
json["type"] = repo_type
if repo_type == "space":
if space_sdk is None:
raise ValueError(
"No space_sdk provided. `create_repo` expects space_sdk to be one of "
f"{SPACES_SDK_TYPES} when repo_type is 'space'`"
)
if space_sdk not in SPACES_SDK_TYPES:
raise ValueError(
f"Invalid space_sdk. Please choose one of {SPACES_SDK_TYPES}."
)
json["sdk"] = space_sdk
if space_sdk is not None and repo_type != "space":
warnings.warn(
"Ignoring provided space_sdk because repo_type is not 'space'."
)
if getattr(self, "_lfsmultipartthresh", None):
json["lfsmultipartthresh"] = self._lfsmultipartthresh
r = requests.post(
path,
headers={"authorization": f"Bearer {token}"},
json=json,
)
try:
r.raise_for_status()
except HTTPError as err:
if not (exist_ok and err.response.status_code == 409):
try:
additional_info = r.json().get("error", None)
if additional_info:
new_err = f"{err.args[0]} - {additional_info}"
err.args = (new_err,) + err.args[1:]
except ValueError:
pass
raise err
d = r.json()
return d["url"]
@_deprecate_positional_args
def delete_repo(
self,
repo_id: str = None,
*,
token: Optional[str] = None,
organization: Optional[str] = None,
repo_type: Optional[str] = None,
name: str = None,
):
"""
Delete a repo from the HuggingFace Hub. CAUTION: this is irreversible.
Args:
repo_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`.
<Tip>
Version added: 0.5
</Tip>
token (`str`, *optional*):
An authentication token [1]_.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if uploading to a dataset or
space, `None` or `"model"` if uploading to a model.
References:
- [1] https://huggingface.co/settings/tokens
"""
name, organization = _validate_repo_id_deprecation(repo_id, name, organization)
path = f"{self.endpoint}/api/repos/delete"
token, name = self._validate_or_retrieve_token(
token, name, function_name="delete_repo"
)
checked_name = repo_type_and_id_from_hf_id(name)
if (
repo_type is not None
and checked_name[0] is not None
and repo_type != checked_name[0]
):
raise ValueError(
f"""Passed `repo_type` and found `repo_type` are not the same ({repo_type},
{checked_name[0]}).
Please make sure you are expecting the right type of repository to
exist."""
)
if (
organization is not None
and checked_name[1] is not None
and organization != checked_name[1]
):
raise ValueError(
"Passed `organization` and `name` organization are not the same"
f" ({organization}, {checked_name[1]})."
"\nPlease either include the organization in only `name` or the"
" `organization` parameter, such as "
f"`api.create_repo({checked_name[0]}, organization={organization})` "
f"or `api.create_repo({checked_name[1]}/{checked_name[2]})`"
)
repo_type = repo_type or checked_name[0]
organization = organization or checked_name[1]
name = checked_name[2]
if repo_type not in REPO_TYPES:
raise ValueError("Invalid repo type")
json = {"name": name, "organization": organization}
if repo_type is not None:
json["type"] = repo_type
r = requests.delete(
path,
headers={"authorization": f"Bearer {token}"},
json=json,
)
try:
r.raise_for_status()
except requests.exceptions.RequestException as e:
try:
message = e.response.json()["error"]
except JSONDecodeError:
message = e.response.text
raise type(e)(message) from e
@_deprecate_positional_args
def update_repo_visibility(
self,
repo_id: str = None,
private: bool = False,
*,
token: Optional[str] = None,
organization: Optional[str] = None,
repo_type: Optional[str] = None,
name: str = None,
) -> Dict[str, bool]:
"""Update the visibility setting of a repository.
Args:
repo_id (`str`, *optional*):
A namespace (user or an organization) and a repo name separated
by a `/`.
<Tip>
Version added: 0.5
</Tip>
private (`bool`, *optional*, defaults to `False`):
Whether the model repo should be private.
token (`str`, *optional*):
An authentication token [1]_.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if uploading to a dataset or
space, `None` or `"model"` if uploading to a model. Default is
`None`.
Returns:
The HTTP response in json.
References:
- [1] https://huggingface.co/settings/tokens
"""
if repo_type not in REPO_TYPES:
raise ValueError("Invalid repo type")
name, organization = _validate_repo_id_deprecation(repo_id, name, organization)
token, name = self._validate_or_retrieve_token(
token, name, function_name="update_repo_visibility"
)
if organization is None:
namespace = self.whoami(token)["name"]
else:
namespace = organization
path_prefix = f"{self.endpoint}/api/"
if repo_type in REPO_TYPES_URL_PREFIXES:
path_prefix += REPO_TYPES_URL_PREFIXES[repo_type]
path = f"{path_prefix}{namespace}/{name}/settings"
json = {"private": private}
r = requests.put(
path,
headers={"authorization": f"Bearer {token}"},
json=json,
)
r.raise_for_status()
return r.json()
@_deprecate_positional_args
def move_repo(
self,
from_id: str,
to_id: str,
*,
repo_type: Optional[str] = None,
token: Optional[str] = None,
):
"""
Moving a repository from namespace1/repo_name1 to namespace2/repo_name2
Note there are certain limitations. For more information about moving
repositories, please see
https://hf.co/docs/hub/main#how-can-i-rename-or-transfer-a-repo.
Args:
from_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`. Original repository identifier.
to_id (`str`):
A namespace (user or an organization) and a repo name separated
by a `/`. Final repository identifier.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if uploading to a dataset or
space, `None` or `"model"` if uploading to a model. Default is
`None`.
token (`str`, *optional*):
An authentication token [1]_.
References:
- [1] https://huggingface.co/settings/tokens
"""
token, name = self._validate_or_retrieve_token(token)
if len(from_id.split("/")) != 2:
raise ValueError(
f"Invalid repo_id: {from_id}. It should have a namespace (:namespace:/:repo_name:)"
)
if len(to_id.split("/")) != 2:
raise ValueError(
f"Invalid repo_id: {to_id}. It should have a namespace (:namespace:/:repo_name:)"
)
json = {"fromRepo": from_id, "toRepo": to_id, "type": repo_type}
path = f"{self.endpoint}/api/repos/move"
r = requests.post(
path,
headers={"authorization": f"Bearer {token}"},
json=json,
)
try:
r.raise_for_status()
except HTTPError as e:
if r.text:
raise HTTPError(
f"{r.status_code} Error Message: {r.text}. For additional documentation "
"please see https://hf.co/docs/hub/main#how-can-i-rename-or-transfer-a-repo."
) from e
else:
raise e
logger.info(
"Accepted transfer request. You will get an email once this is successfully completed."
)
@_deprecate_positional_args
def upload_file(
self,
*,
path_or_fileobj: Union[str, bytes, IO],
path_in_repo: str,
repo_id: str,
token: Optional[str] = None,
repo_type: Optional[str] = None,
revision: Optional[str] = None,
identical_ok: bool = True,
) -> str:
"""
Upload a local file (up to 5GB) to the given repo. The upload is done
through a HTTP post request, and doesn't require git or git-lfs to be
installed.
Args:
path_or_fileobj (`str`, `bytes`, or `IO`):
Path to a file on the local machine or binary data stream /
fileobj / buffer.
path_in_repo (`str`):
Relative filepath in the repo, for example:
`"checkpoints/1fec34a/weights.bin"`
repo_id (`str`):
The repository to which the file will be uploaded, for example:
`"username/custom_transformers"`
token (`str`, *optional*):
Authentication token, obtained with `HfApi.login` method. Will
default to the stored token.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if uploading to a dataset or
space, `None` or `"model"` if uploading to a model. Default is
`None`.
revision (`str`, *optional*):
The git revision to commit from. Defaults to the head of the
`"main"` branch.
identical_ok (`bool`, *optional*, defaults to `True`):
When set to false, will raise an [HTTPError](
https://2.python-requests.org/en/master/api/#requests.HTTPError)
when the file you're trying to upload already exists on the hub
and its content did not change.
Returns:
`str`: The URL to visualize the uploaded file on the hub
<Tip>
Raises the following errors:
- [`HTTPError`](https://2.python-requests.org/en/master/api/#requests.HTTPError)
if the HuggingFace API returned an error
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
</Tip>
Example usage:
```python
>>> with open("./local/filepath", "rb") as fobj:
... upload_file(
... path_or_fileobj=fileobj,
... path_in_repo="remote/file/path.h5",
... repo_id="username/my-dataset",
... repo_type="datasets",
... token="my_token",
... )
"https://huggingface.co/datasets/username/my-dataset/blob/main/remote/file/path.h5"
>>> upload_file(
... path_or_fileobj=".\\\\local\\\\file\\\\path",
... path_in_repo="remote/file/path.h5",
... repo_id="username/my-model",
... token="my_token",
... )
"https://huggingface.co/username/my-model/blob/main/remote/file/path.h5"
```
"""
if repo_type not in REPO_TYPES:
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
try:
token, name = self._validate_or_retrieve_token(
token, function_name="upload_file"
)
except ValueError: # if token is invalid or organization token
if self._is_valid_token(path_or_fileobj):
warnings.warn(
"`upload_file` now takes `token` as an optional positional argument. "
"Be sure to adapt your code!",
FutureWarning,
)
token, path_or_fileobj, path_in_repo, repo_id = (
path_or_fileobj,
path_in_repo,
repo_id,
token,
)
else:
raise ValueError("Invalid token passed!")
# Validate path_or_fileobj
if isinstance(path_or_fileobj, str):
path_or_fileobj = os.path.normpath(os.path.expanduser(path_or_fileobj))
if not os.path.isfile(path_or_fileobj):
raise ValueError(f"Provided path: '{path_or_fileobj}' is not a file")
elif not isinstance(path_or_fileobj, (RawIOBase, BufferedIOBase, bytes)):
# ^^ Test from: https://stackoverflow.com/questions/44584829/how-to-determine-if-file-is-opened-in-binary-or-text-mode
raise ValueError(
"path_or_fileobj must be either an instance of str or BinaryIO. "
"If you passed a fileobj, make sure you've opened the file in binary mode."
)
if repo_type in REPO_TYPES_URL_PREFIXES:
repo_id = REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
revision = revision if revision is not None else "main"
path = f"{self.endpoint}/api/{repo_id}/upload/{revision}/{path_in_repo}"
headers = {"authorization": f"Bearer {token}"} if token is not None else None
if isinstance(path_or_fileobj, str):
with open(path_or_fileobj, "rb") as bytestream:
r = requests.post(path, headers=headers, data=bytestream)
else:
r = requests.post(path, headers=headers, data=path_or_fileobj)
try:
r.raise_for_status()
except HTTPError as err:
if identical_ok and err.response.status_code == 409:
from .file_download import hf_hub_url
return hf_hub_url(
repo_id, path_in_repo, revision=revision, repo_type=repo_type
)
else:
raise err
d = r.json()
return d["url"]
@_deprecate_positional_args
def delete_file(
self,
path_in_repo: str,
repo_id: str,
*,
token: Optional[str] = None,
repo_type: Optional[str] = None,
revision: Optional[str] = None,
):
"""
Deletes a file in the given repo.
Args:
path_in_repo (`str`):
Relative filepath in the repo, for example:
`"checkpoints/1fec34a/weights.bin"`
repo_id (`str`):
The repository from which the file will be deleted, for example:
`"username/custom_transformers"`
token (`str`, *optional*):
Authentication token, obtained with `HfApi.login` method. Will
default to the stored token.
repo_type (`str`, *optional*):
Set to `"dataset"` or `"space"` if the file is in a dataset or
space, `None` or `"model"` if in a model. Default is `None`.
revision (`str`, *optional*):
The git revision to commit from. Defaults to the head of the
`"main"` branch.
<Tip>
Raises the following errors:
- [`HTTPError`](https://2.python-requests.org/en/master/api/#requests.HTTPError)
if the HuggingFace API returned an error
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
if some parameter value is invalid
</Tip>
"""
if repo_type not in REPO_TYPES:
raise ValueError(f"Invalid repo type, must be one of {REPO_TYPES}")
token, name = self._validate_or_retrieve_token(token)
if repo_type in REPO_TYPES_URL_PREFIXES:
repo_id = REPO_TYPES_URL_PREFIXES[repo_type] + repo_id
revision = revision if revision is not None else "main"
path = f"{self.endpoint}/api/{repo_id}/delete/{revision}/{path_in_repo}"
headers = {"authorization": f"Bearer {token}"}
r = requests.delete(path, headers=headers)
r.raise_for_status()
@_deprecate_positional_args
def get_full_repo_name(
self,
model_id: str,
*,
organization: Optional[str] = None,
token: Optional[str] = None,
):
"""
Returns the repository name for a given model ID and optional
organization.
Args:
model_id (`str`):
The name of the model.
organization (`str`, *optional*):
If passed, the repository name will be in the organization
namespace instead of the user namespace.
token (`str`, *optional*):
The Hugging Face authentication token
Returns:
`str`: The repository name in the user's namespace
({username}/{model_id}) if no organization is passed, and under the
organization namespace ({organization}/{model_id}) otherwise.
"""
if organization is None:
if "/" in model_id:
username = model_id.split("/")[0]
else:
username = self.whoami(token=token)["name"]
return f"{username}/{model_id}"
else:
return f"{organization}/{model_id}"
class HfFolder:
path_token = expanduser("~/.huggingface/token")
@classmethod
def save_token(cls, token):
"""
Save token, creating folder as needed.
Args:
token (`str`):
The token to save to the [`HfFolder`]
"""
os.makedirs(os.path.dirname(cls.path_token), exist_ok=True)
with open(cls.path_token, "w+") as f:
f.write(token)
@classmethod
def get_token(cls) -> Optional[str]:
"""
Get token or None if not existent.
Note that a token can be also provided using the `HUGGING_FACE_HUB_TOKEN`
environment variable.
Returns:
`str` or `None`: The token, `None` if it doesn't exist.
"""
token: Optional[str] = os.environ.get("HUGGING_FACE_HUB_TOKEN")
if token is None:
try:
with open(cls.path_token, "r") as f:
token = f.read()
except FileNotFoundError:
pass
return token
@classmethod
def delete_token(cls):
"""
Deletes the token from storage. Does not fail if token does not exist.
"""
try:
os.remove(cls.path_token)
except FileNotFoundError:
pass
api = HfApi()
login = api.login
logout = api.logout
set_access_token = api.set_access_token
unset_access_token = api.unset_access_token
whoami = api.whoami
list_models = api.list_models
model_info = api.model_info
list_repo_files = api.list_repo_files
list_datasets = api.list_datasets
dataset_info = api.dataset_info
list_metrics = api.list_metrics
get_model_tags = api.get_model_tags
get_dataset_tags = api.get_dataset_tags
create_repo = api.create_repo
delete_repo = api.delete_repo
update_repo_visibility = api.update_repo_visibility
move_repo = api.move_repo
upload_file = api.upload_file
delete_file = api.delete_file
get_full_repo_name = api.get_full_repo_name
|
void user_interrupt() {}
|
from django.core.mail import send_mail
from django.conf import settings
from django.contrib.auth.models import Group,User
from django.shortcuts import render,get_object_or_404
from django.utils import timezone
import httplib, urllib, base64
from json import JSONEncoder
from json import JSONDecoder
from time import time
import math
import datetime
now = datetime.datetime.now(timezone.utc)
def get_rank(t,likes,Comment_mean=0,tags=0,gender='female'):
kk=0.0
time = (t-datetime.datetime.now(timezone.utc)).total_seconds()
# print time
# time = (t-datetime.datetime(1970,1,1)).total_seconds()
# time = t.total_seconds()
if(likes>0):
kk=kk+math.log(likes,2)
kk=kk+(time/10000.0)
kk=kk+Comment_mean*2 + tags*2 +10000
if gender=='female':
kk = kk + 2.6;
print kk
return kk
def send_email(user, password):
print "Sending Email:"
mail_title = 'Welcome to Colog!'
message = 'Hi '+user.first_name+'. Kindly verify by logging in using your username '+user.username+' and password '+password+'.\n\nRegards\nColog Team\n'
print message
if send_mail(mail_title, message, settings.EMAIL_HOST_USER, [user.email], fail_silently=False):
print "Email Sent"
return True
# get_object_or_404(Group,name='Company To Verify').user_set.remove(user)
# get_object_or_404(Group,name='Company Head').user_set.add(user)
else:
print "Error Sending Email"
return False
def get_experience(text="My name is Robin Chawla. I study in Computer Science and Engineering in Banaras Hindu Unversity. I cracked IIT-JEE with rank 912 in year 2014. I was very happy at that time"):
body={
"documents": [
{
"id": "abcd",
"text": text
}
]
}
headers = {
# Request headers
'Content-Type': 'application/json',
'Ocp-Apim-Subscription-Key': '183f71b817464e95a22f669bcfad216e',
}
strng=JSONEncoder().encode(body)
params = urllib.urlencode({
})
experience="Internet Problem."
try:
conn = httplib.HTTPSConnection('westus.api.cognitive.microsoft.com')
conn.request("POST", "/text/analytics/v2.0/sentiment?%s" % params, strng, headers)
response = conn.getresponse()
data = response.read()
newdict=JSONDecoder().decode(data)
L=newdict["documents"][0]["score"]
if(L<=0.15):
experience="Worst"
elif(L<=0.3):
experience="Bad"
elif(L<=0.5):
experience="Average"
elif(L<=0.75):
experience="Good"
else:
experience="Wonderful"
conn.close()
except Exception as e:
print("Internet Connection Error")
return experience
def get_tags(text="My name is Robin Chawla. I study in Computer Scince and Engineering in Banaras Hindu Unversity. I cracked IIT-JEE with rank 912 in year 2014. I was very happy at that time"):
body={
"documents": [
{
"id": "abcd",
"text": text
}
]
}
headers = {
# Request headers
'Content-Type': 'application/json',
'Ocp-Apim-Subscription-Key': '183f71b817464e95a22f669bcfad216e',
}
strng=JSONEncoder().encode(body)
params = urllib.urlencode({
})
L = []
try:
conn = httplib.HTTPSConnection('westus.api.cognitive.microsoft.com')
conn.request("POST", "/text/analytics/v2.0/keyPhrases?%s" % params, strng, headers)
response = conn.getresponse()
data = response.read()
# print(data)
newdict=JSONDecoder().decode(data)
# print(newdict)
L=newdict["documents"][0]["keyPhrases"]
conn.close()
except Exception as e:
print("Internet Connection Error")
return L
|
from google import api_core
from google.cloud import storage
def main(args, settings):
bucket_name = settings.get('GCS_BUCKET_NAME')
backup_dir = settings.get('BACKUP_DIR')
timestamp = settings.get('TIMESTAMP')
storage_client = storage.Client()
gcs_file_name = '{0}.tar.gz'.format(timestamp)
archive_file = '{0}/{1}'.format(backup_dir, gcs_file_name)
try:
bucket = storage_client.get_bucket(bucket_name)
blob = bucket.blob(gcs_file_name)
blob.upload_from_filename(archive_file)
print("Upload to gcs: was successful")
except FileNotFoundError: # noqa: F821
print("The file: {0} was not found".format(gcs_file_name))
return False
except api_core.exceptions.Forbidden as e:
print("Permission denied: {0}, please grant `Storage Admin` to service account you used".format(str(e)))
return False
except api_core.exceptions.NotFound:
print("The gcs bucket: {0} doesn't exist".format(bucket_name))
return False
except Exception as e:
print("Exception: {0}".format(str(e)))
return False
return True
|
'use strict'
jest.mock('fs')
const { readFile, writeFile } = require('fs').promises
const { readServerProperties, writeServerProperties } = require('./serverproperties')
test('readServerProperties', async () => {
readFile.mockResolvedValue('#notaproperty\nproperty1=value\nproperty2=value=value')
await expect(readServerProperties()).resolves.toStrictEqual({
property1: 'value',
property2: 'value=value'
})
expect(readFile.mock.calls[0][0]).toBe('../test/server/common/server.properties')
expect(readFile.mock.calls[0][1]).toBe('utf8')
})
test('writeServerProperties', async () => {
await writeServerProperties({
property2: 'value=value',
property1: 'value'
})
expect(writeFile).toHaveBeenCalledWith('../test/server/common/server.properties', 'property1=value\nproperty2=value=value', 'utf8')
})
|
from aiidalab_widgets_base.utils import string_range_to_list, list_to_string_range
from aiida.orm import StructureData , SinglefileData, Str
from aiida.engine import calcfunction
from ase import Atoms
import numpy as np
@calcfunction
def make_geom_file(structure, filename,
selection=None,
spin_u=lambda: Str(''),
spin_d=lambda: Str(''),
ic_plane_z=None):
import tempfile
import shutil
from io import StringIO
filename = filename.value
### the two ways of defining spin seem not to be compatible
###spin from ase struct
#spin_guess = extract_spin_guess(structure)
###spin_from widgets
spin_guess = [string_range_to_list(spin_u.value)[0],string_range_to_list(spin_d.value)[0]]
if selection is None:
atoms = structure.get_ase()
tags=np.zeros(len(atoms))
atoms.set_tags(tags)
else:
atoms = structure.get_ase()[selection]
tags=np.zeros(len(atoms))
atoms.set_tags(tags)
n_atoms = len(atoms)
tmpdir = tempfile.mkdtemp()
file_path = tmpdir + "/" + filename
orig_file = StringIO()
atoms.write(orig_file, format='xyz')
orig_file.seek(0)
all_lines = orig_file.readlines()
comment = all_lines[1].strip()
orig_lines = all_lines[2:]
#### modify specie of atoms for spin guess
modif_lines = []
for i_line, line in enumerate(orig_lines):
new_line = line
lsp = line.split()
#if spin_guess is not None:
if i_line in spin_guess[0]:
new_line = lsp[0]+"1 " + " ".join(lsp[1:])+"\n"
if i_line in spin_guess[1]:
new_line = lsp[0]+"2 " + " ".join(lsp[1:])+"\n"
modif_lines.append(new_line)
#### adding ghost atoms for image charge calculations
#### chemical symbol will have a G at the end. No spin guess
imag_lines = []
if ic_plane_z is not None:
image = atoms.copy()
image.positions[:, 2] = 2*ic_plane_z.value - atoms.positions[:, 2]
imag_file = StringIO()
image.write(imag_file, format='xyz')
imag_file.seek(0)
imag_lines = imag_file.readlines()[2:]
imag_lines = [r.split()[0]+"G "+" ".join(r.split()[1:])+"\n" for r in imag_lines]
n_atoms = 2*len(atoms)
final_str = "%d\n%s\n" % (n_atoms, comment) + "".join(modif_lines+imag_lines)
with open(file_path, 'w') as f:
f.write(final_str)
aiida_f = SinglefileData(file=file_path)
shutil.rmtree(tmpdir)
return aiida_f
def extract_spin_guess(struct_node):
sites_list = struct_node.attributes['sites']
spin_up_inds = []
spin_dw_inds = []
for i_site, site in enumerate(sites_list):
if site['kind_name'][-1] == '1':
spin_up_inds.append(i_site)
elif site['kind_name'][-1] == '2':
spin_dw_inds.append(i_site)
return [spin_up_inds, spin_dw_inds]
# ==========================================================================
|
import { appSchema, tableSchema } from '@nozbe/watermelondb';
export default appSchema({
version: 6,
tables: [
tableSchema({
name: 'users',
columns: [
{ name: 'token', type: 'string', isOptional: true },
{ name: 'username', type: 'string', isOptional: true },
{ name: 'name', type: 'string', isOptional: true },
{ name: 'language', type: 'string', isOptional: true },
{ name: 'status', type: 'string', isOptional: true },
{ name: 'statusText', type: 'string', isOptional: true },
{ name: 'roles', type: 'string', isOptional: true }
]
}),
tableSchema({
name: 'servers',
columns: [
{ name: 'name', type: 'string', isOptional: true },
{ name: 'icon_url', type: 'string', isOptional: true },
{ name: 'use_real_name', type: 'boolean', isOptional: true },
{ name: 'file_upload_media_type_white_list', type: 'string', isOptional: true },
{ name: 'file_upload_max_file_size', type: 'number', isOptional: true },
{ name: 'rooms_updated_at', type: 'number', isOptional: true },
{ name: 'version', type: 'string', isOptional: true },
{ name: 'last_local_authenticated_session', type: 'number', isOptional: true },
{ name: 'auto_lock', type: 'boolean', isOptional: true },
{ name: 'auto_lock_time', type: 'number', isOptional: true },
{ name: 'biometry', type: 'boolean', isOptional: true },
{ name: 'unique_id', type: 'string', isOptional: true },
{ name: 'enterprise_modules', type: 'string', isOptional: true }
]
})
]
});
|
var searchData=
[
['path',['path',['../structs___p_w_m.html#a44196e6a5696d10442c29e639437196e',1,'s_PWM']]],
['pwm',['pwm',['../structs___p_i_n.html#a6208909b76c84a75b931c78e24a0cc42',1,'s_PIN']]],
['pwm_5fpresent',['pwm_present',['../structs___p_i_n.html#ad9d4ffb703dd541884e772f4490febd9',1,'s_PIN']]]
];
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 7
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from setuptools import setup, find_packages # noqa: H301
NAME = "isi-sdk-8-2-0"
VERSION = "0.2.11"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"]
setup(
name=NAME,
version=VERSION,
description="Isilon SDK",
author_email="sdk@isilon.com",
url="",
keywords=["Swagger", "Isilon SDK"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
About
-----
This package is part of the Isilon SDK. It includes language bindings
for easier programmatic access to the OneFS API for cluster
configuration (on your cluster this is the REST API made up of all the
URIs underneath ``https://[cluster]:8080/platform/*``, also called the
"Platform API" or "PAPI"). The SDK also includes language bindings for
the OneFS RAN (i.e. RESTful Access to Namespace) interface, which
provides access to the OneFS filesystem namespace.
Installation
------------
``pip install PKG_NAME``
Documentation
-------------
The SDK documentation is auto generated by Swagger Codegen and is
located in the
`isilon\_sdk\_python <https://github.com/Isilon/isilon_sdk_python>`__
repository. Please select the repository branch that is applicable to
the SDK package and OneFS version for accurate documentation references.
All SDK methods and models are linked from the top level README file.
Example program
---------------
Here's an example of using the Python PAPI bindings to retrieve a list
of NFS exports from your cluster:
.. code:: python
from pprint import pprint
import urllib3
import PKG_NAME
from PKG_NAME.rest import ApiException
urllib3.disable_warnings()
# configure username and password
configuration = PKG_NAME.Configuration()
configuration.username = "YOUR_USERNAME"
configuration.password = "YOUR_PASSWORD"
configuration.verify_ssl = False
# configure host
configuration.host = "https://YOUR_CLUSTER_HOSTNAME_OR_NODE_IP:8080"
api_client = PKG_NAME.ApiClient(configuration)
protocols_api = PKG_NAME.ProtocolsApi(api_client)
# get all exports
sort = "description"
limit = 50
dir = "ASC"
try:
api_response = protocols_api.list_nfs_exports(sort=sort, limit=limit, dir=dir)
pprint(api_response)
except ApiException as e:
print "Exception when calling ProtocolsApi->list_nfs_exports: %s" % e
There are more examples of coding to the Python PAPI bindings in the
`tests <https://github.com/Isilon/isilon_sdk/tree/master/tests>`__
subdirectory of the repo. The tests currently run against a generic
``isi_sdk`` import which is how the bindings library is named by default
if you build your own bindings. If you want to run the tests against one
of the libraries you've downloaded from the prebuilt releases page, you
should change the ``import isi_sdk`` lines to ``import isi_sdk_7_2`` or
``import isi_sdk_9_0_0`` depending on which one you downloaded.
More info
---------
See the Github repo for more information:
https://github.com/isilon/isilon_sdk
""".replace('PKG_NAME', NAME.replace('-', '_'))
)
|
/*
* This header is generated by classdump-dyld 1.0
* on Saturday, August 24, 2019 at 9:50:59 PM Mountain Standard Time
* Operating System: Version 12.4 (Build 16M568)
* Image Source: /System/Library/PrivateFrameworks/UIKitCore.framework/UIKitCore
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
#import <UIKitCore/UIKitCore-Structs.h>
@interface _UITouchObservation : NSObject {
SCD_Struct_UI83 _observedTouches[8];
unsigned long long _observedTouchCount;
unsigned long long _observedTouchOffset;
}
-(void)dealloc;
-(void)reset;
-(unsigned long long)touchCount;
-(void)observeTouch:(id)arg1 ;
-(void)enumerateTouchesWithBlock:(/*^block*/id)arg1 ;
@end
|
"""Modules for working with data sources."""
from vectorbt.data.base import symbol_dict, Data
from vectorbt.data.updater import DataUpdater
from vectorbt.data.custom import SyntheticData, GBMData, YFData, BinanceData, CCXTData
__all__ = [
'symbol_dict',
'Data',
'DataUpdater',
'SyntheticData',
'GBMData',
'YFData',
'BinanceData',
'CCXTData'
]
__pdoc__ = {k: False for k in __all__}
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Unit tests for some Fx and Compound learners"""
import numpy as np
from mvpa2.testing import *
from mvpa2.base.learner import Learner, CompoundLearner, \
ChainLearner, CombinedLearner
from mvpa2.base.node import Node, CompoundNode, \
ChainNode, CombinedNode
from mvpa2.datasets.base import AttrDataset
class FxNode(Node):
def __init__(self, f, space='targets',
pass_attr=None, postproc=None, **kwargs):
super(FxNode, self).__init__(space, pass_attr, postproc, **kwargs)
self.f = f
def _call(self, ds):
cp = ds.copy()
cp.samples = self.f(ds.samples)
return cp
class FxyLearner(Learner):
def __init__(self, f):
super(FxyLearner, self).__init__()
self.f = f
self.x = None
def _train(self, ds):
self.x = ds.samples
def _call(self, ds):
cp = ds.copy()
cp.samples = self.f(self.x)(ds.samples)
return cp
class CompoundTests(unittest.TestCase):
def test_compound_node(self):
data = np.asarray([[1, 2, 3, 4]], dtype=np.float_).T
ds = AttrDataset(data, sa=dict(targets=[0, 0, 1, 1]))
add = lambda x: lambda y: x + y
mul = lambda x: lambda y: x * y
add2 = FxNode(add(2))
mul3 = FxNode(mul(3))
assert_array_equal(add2(ds).samples, data + 2)
add2mul3 = ChainNode([add2, mul3])
assert_array_equal(add2mul3(ds), (data + 2) * 3)
add2_mul3v = CombinedNode([add2, mul3], 'v')
add2_mul3h = CombinedNode([add2, mul3], 'h')
assert_array_equal(add2_mul3v(ds).samples,
np.vstack((data + 2, data * 3)))
assert_array_equal(add2_mul3h(ds).samples,
np.hstack((data + 2, data * 3)))
def test_compound_learner(self):
data = np.asarray([[1, 2, 3, 4]], dtype=np.float_).T
ds = AttrDataset(data, sa=dict(targets=[0, 0, 1, 1]))
train = ds[ds.sa.targets == 0]
test = ds[ds.sa.targets == 1]
dtrain = train.samples
dtest = test.samples
sub = FxyLearner(lambda x: lambda y: x - y)
assert_false(sub.is_trained)
sub.train(train)
assert_array_equal(sub(test).samples, dtrain - dtest)
div = FxyLearner(lambda x: lambda y: x / y)
div.train(train)
assert_array_almost_equal(div(test).samples, dtrain / dtest)
div.untrain()
subdiv = ChainLearner((sub, div))
assert_false(subdiv.is_trained)
subdiv.train(train)
assert_true(subdiv.is_trained)
subdiv.untrain()
assert_raises(RuntimeError, subdiv, test)
subdiv.train(train)
assert_array_almost_equal(subdiv(test).samples, dtrain / (dtrain - dtest))
sub_div = CombinedLearner((sub, div), 'v')
assert_true(sub_div.is_trained)
sub_div.untrain()
subdiv.train(train)
assert_true(sub_div.is_trained)
assert_array_almost_equal(sub_div(test).samples,
np.vstack((dtrain - dtest, dtrain / dtest)))
def suite(): # pragma: no cover
return unittest.makeSuite(CompoundTests)
if __name__ == '__main__': # pragma: no cover
import runner
runner.run()
|
/**
* \file PnlWznmIexHeadbar.h
* API code for job PnlWznmIexHeadbar (declarations)
* \copyright (C) 2016-2020 MPSI Technologies GmbH
* \author Alexander Wirthmueller (auto-generation)
* \date created: 5 Dec 2020
*/
// IP header --- ABOVE
#ifndef PNLWZNMIEXHEADBAR_H
#define PNLWZNMIEXHEADBAR_H
#include "ApiWznm_blks.h"
#define StgInfWznmIexHeadbar PnlWznmIexHeadbar::StgInf
#define TagWznmIexHeadbar PnlWznmIexHeadbar::Tag
#define DpchEngWznmIexHeadbarData PnlWznmIexHeadbar::DpchEngData
/**
* PnlWznmIexHeadbar
*/
namespace PnlWznmIexHeadbar {
/**
* StgInf (full: StgInfWznmIexHeadbar)
*/
class StgInf : public Sbecore::Block {
public:
static const Sbecore::uint MENAPPCPTWIDTH = 1;
static const Sbecore::uint MENAPPWIDTH = 2;
static const Sbecore::uint MENCRDCPTWIDTH = 3;
static const Sbecore::uint MENCRDWIDTH = 4;
public:
StgInf(const Sbecore::uint MenAppCptwidth = 100, const Sbecore::uint MenAppWidth = 100, const Sbecore::uint MenCrdCptwidth = 100, const Sbecore::uint MenCrdWidth = 100);
public:
Sbecore::uint MenAppCptwidth;
Sbecore::uint MenAppWidth;
Sbecore::uint MenCrdCptwidth;
Sbecore::uint MenCrdWidth;
public:
bool readXML(xmlXPathContext* docctx, std::string basexpath = "", bool addbasetag = false);
std::set<Sbecore::uint> comm(const StgInf* comp);
std::set<Sbecore::uint> diff(const StgInf* comp);
};
/**
* Tag (full: TagWznmIexHeadbar)
*/
class Tag : public Sbecore::Block {
public:
static const Sbecore::uint MENAPP = 1;
static const Sbecore::uint MENCRD = 2;
public:
Tag(const std::string& MenApp = "", const std::string& MenCrd = "");
public:
std::string MenApp;
std::string MenCrd;
public:
bool readXML(xmlXPathContext* docctx, std::string basexpath = "", bool addbasetag = false);
};
/**
* DpchEngData (full: DpchEngWznmIexHeadbarData)
*/
class DpchEngData : public DpchEngWznm {
public:
static const Sbecore::uint SCRJREF = 1;
static const Sbecore::uint STGINF = 2;
static const Sbecore::uint TAG = 3;
public:
DpchEngData();
public:
StgInf stginf;
Tag tag;
public:
std::string getSrefsMask();
void readXML(xmlXPathContext* docctx, std::string basexpath = "", bool addbasetag = false);
};
};
#endif
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools
import os
class GflagsConan(ConanFile):
name = "gflags"
version = "2.2.1"
description = "The gflags package contains a C++ library that implements commandline flags processing. "
url = "https://github.com/bincrafters/conan-gflags"
license = 'BSD 3-clause'
exports = ["LICENSE.md"]
exports_sources = ["CMakeLists.txt", "Findgflags.cmake"]
source_subfolder = "source_subfolder"
generators = "cmake"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False], "fPIC": [True, False], "nothreads": [True, False], "namespace": "ANY"}
default_options = "shared=False", "fPIC=True", "nothreads=True", "namespace=gflags"
def configure(self):
if self.settings.os == "Windows":
self.options.remove("fPIC")
def source(self):
source_url = "https://github.com/gflags/gflags"
tools.get("{0}/archive/v{1}.tar.gz".format(source_url, self.version))
os.rename("%s-%s" % (self.name, self.version), self.source_subfolder)
def build(self):
cmake = CMake(self)
cmake.definitions["BUILD_SHARED_LIBS"] = self.options.shared
cmake.definitions["BUILD_STATIC_LIBS"] = not self.options.shared
cmake.definitions["BUILD_gflags_LIB"] = not self.options.nothreads
cmake.definitions["BUILD_gflags_nothreads_LIB"] = self.options.nothreads
cmake.definitions["BUILD_PACKAGING"] = False
cmake.definitions["BUILD_TESTING"] = False
cmake.definitions["INSTALL_HEADERS"] = True
cmake.definitions["INSTALL_SHARED_LIBS"] = self.options.shared
cmake.definitions["INSTALL_STATIC_LIBS"] = not self.options.shared
cmake.definitions["REGISTER_BUILD_DIR"] = False
cmake.definitions["REGISTER_INSTALL_PREFIX"] = False
cmake.definitions["GFLAGS_NAMESPACE"] = self.options.namespace
cmake.configure()
cmake.build()
cmake.install()
def package(self):
self.copy("Findgflags.cmake", ".", ".")
self.copy("COPYING.txt", dst="licenses", src=self.source_subfolder)
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
if self.settings.os == "Windows":
self.cpp_info.libs.extend(['shlwapi'])
else:
self.cpp_info.libs.extend(["pthread"])
|
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import select
from esphome.const import CONF_ADDRESS, CONF_ID, CONF_LAMBDA, CONF_OPTIMISTIC
from .. import (
SENSOR_VALUE_TYPE,
TYPE_REGISTER_MAP,
ModbusController,
SensorItem,
modbus_controller_ns,
)
from ..const import (
CONF_FORCE_NEW_RANGE,
CONF_MODBUS_CONTROLLER_ID,
CONF_REGISTER_COUNT,
CONF_SKIP_UPDATES,
CONF_USE_WRITE_MULTIPLE,
CONF_VALUE_TYPE,
CONF_WRITE_LAMBDA,
)
DEPENDENCIES = ["modbus_controller"]
CODEOWNERS = ["@martgras", "@stegm"]
CONF_OPTIONSMAP = "optionsmap"
ModbusSelect = modbus_controller_ns.class_(
"ModbusSelect", cg.Component, select.Select, SensorItem
)
def ensure_option_map():
def validator(value):
cv.check_not_templatable(value)
option = cv.All(cv.string_strict)
mapping = cv.All(cv.int_range(-(2**63), 2**63 - 1))
options_map_schema = cv.Schema({option: mapping})
value = options_map_schema(value)
all_values = list(value.values())
unique_values = set(value.values())
if len(all_values) != len(unique_values):
raise cv.Invalid("Mapping values must be unique.")
return value
return validator
def register_count_value_type_min(value):
reg_count = value.get(CONF_REGISTER_COUNT)
if reg_count is not None:
value_type = value[CONF_VALUE_TYPE]
min_register_count = TYPE_REGISTER_MAP[value_type]
if min_register_count > reg_count:
raise cv.Invalid(
f"Value type {value_type} needs at least {min_register_count} registers"
)
return value
INTEGER_SENSOR_VALUE_TYPE = {
key: value for key, value in SENSOR_VALUE_TYPE.items() if not key.startswith("FP")
}
CONFIG_SCHEMA = cv.All(
select.SELECT_SCHEMA.extend(cv.COMPONENT_SCHEMA).extend(
{
cv.GenerateID(): cv.declare_id(ModbusSelect),
cv.GenerateID(CONF_MODBUS_CONTROLLER_ID): cv.use_id(ModbusController),
cv.Required(CONF_ADDRESS): cv.positive_int,
cv.Optional(CONF_VALUE_TYPE, default="U_WORD"): cv.enum(
INTEGER_SENSOR_VALUE_TYPE
),
cv.Optional(CONF_REGISTER_COUNT): cv.positive_int,
cv.Optional(CONF_SKIP_UPDATES, default=0): cv.positive_int,
cv.Optional(CONF_FORCE_NEW_RANGE, default=False): cv.boolean,
cv.Required(CONF_OPTIONSMAP): ensure_option_map(),
cv.Optional(CONF_USE_WRITE_MULTIPLE, default=False): cv.boolean,
cv.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
cv.Optional(CONF_LAMBDA): cv.returning_lambda,
cv.Optional(CONF_WRITE_LAMBDA): cv.returning_lambda,
},
),
register_count_value_type_min,
)
async def to_code(config):
value_type = config[CONF_VALUE_TYPE]
reg_count = config.get(CONF_REGISTER_COUNT)
if reg_count is None:
reg_count = TYPE_REGISTER_MAP[value_type]
options_map = config[CONF_OPTIONSMAP]
var = cg.new_Pvariable(
config[CONF_ID],
value_type,
config[CONF_ADDRESS],
reg_count,
config[CONF_SKIP_UPDATES],
config[CONF_FORCE_NEW_RANGE],
list(options_map.values()),
)
await cg.register_component(var, config)
await select.register_select(var, config, options=list(options_map.keys()))
parent = await cg.get_variable(config[CONF_MODBUS_CONTROLLER_ID])
cg.add(parent.add_sensor_item(var))
cg.add(var.set_parent(parent))
cg.add(var.set_use_write_mutiple(config[CONF_USE_WRITE_MULTIPLE]))
cg.add(var.set_optimistic(config[CONF_OPTIMISTIC]))
if CONF_LAMBDA in config:
template_ = await cg.process_lambda(
config[CONF_LAMBDA],
[
(ModbusSelect.operator("const_ptr"), "item"),
(cg.int64, "x"),
(
cg.std_vector.template(cg.uint8).operator("const").operator("ref"),
"data",
),
],
return_type=cg.optional.template(cg.std_string),
)
cg.add(var.set_template(template_))
if CONF_WRITE_LAMBDA in config:
template_ = await cg.process_lambda(
config[CONF_WRITE_LAMBDA],
[
(ModbusSelect.operator("const_ptr"), "item"),
(cg.std_string.operator("const").operator("ref"), "x"),
(cg.int64, "value"),
(cg.std_vector.template(cg.uint16).operator("ref"), "payload"),
],
return_type=cg.optional.template(cg.int64),
)
cg.add(var.set_write_template(template_))
|
/*global mocha, MocksHelper, loadBodyHTML, MockL10n, ThreadListUI,
MessageManager, WaitingScreen, Threads, Template, MockMessages,
MockThreadList, MockTimeHeaders, Draft, Drafts, Thread, ThreadUI,
MockOptionMenu
*/
'use strict';
// remove this when https://github.com/visionmedia/mocha/issues/819 is merged in
// mocha and when we have that new mocha in test agent
mocha.setup({ globals: ['alert', 'confirm'] });
requireApp('sms/js/utils.js');
requireApp('sms/js/recipients.js');
requireApp('sms/js/drafts.js');
requireApp('sms/js/threads.js');
requireApp('sms/js/thread_list_ui.js');
requireApp('sms/test/unit/mock_async_storage.js');
requireApp('sms/test/unit/mock_contacts.js');
requireApp('sms/test/unit/mock_time_headers.js');
requireApp('sms/test/unit/mock_l10n.js');
requireApp('sms/test/unit/mock_message_manager.js');
requireApp('sms/test/unit/mock_messages.js');
requireApp('sms/test/unit/mock_utils.js');
requireApp('sms/test/unit/mock_waiting_screen.js');
require('/shared/test/unit/mocks/mock_contact_photo_helper.js');
require('/test/unit/thread_list_mockup.js');
require('/test/unit/utils_mockup.js');
requireApp('sms/test/unit/mock_thread_ui.js');
requireApp('sms/test/unit/mock_action_menu.js');
var mocksHelperForThreadListUI = new MocksHelper([
'asyncStorage',
'Contacts',
'MessageManager',
'Utils',
'WaitingScreen',
'TimeHeaders',
'ThreadUI',
'ContactPhotoHelper',
'OptionMenu'
]).init();
suite('thread_list_ui', function() {
var nativeMozL10n = navigator.mozL10n;
var draftSavedBanner;
mocksHelperForThreadListUI.attachTestHelpers();
suiteSetup(function() {
loadBodyHTML('/index.html');
navigator.mozL10n = MockL10n;
draftSavedBanner = document.getElementById('threads-draft-saved-banner');
ThreadListUI.init();
// Clear drafts as leftovers in the profile might break the tests
Drafts.clear();
});
suiteTeardown(function() {
navigator.mozL10n = nativeMozL10n;
});
function insertMockMarkup(someDate) {
someDate = +someDate;
var markup =
'<header></header>' +
'<ul id="threadsContainer_' + someDate + '">' +
'<li id="thread-1" data-time="' + someDate + '"></li>' +
'<li id="thread-2" data-time="' + someDate + '"></li>' +
'</ul>';
ThreadListUI.container.innerHTML = markup;
}
suite('delayed rendering loops', function() {
suite('multiple render calls', function() {
var appendThread;
var appendCallCount;
suiteSetup(function() {
appendThread = ThreadListUI.appendThread;
ThreadListUI.appendThread = function(thread) {
appendCallCount++;
assert.ok(thread.okay);
};
});
suiteTeardown(function() {
ThreadListUI.appendThread = appendThread;
});
setup(function() {
appendCallCount = 0;
});
});
});
suite('setEmpty', function() {
suite('(true)', function() {
setup(function() {
// set wrong states
ThreadListUI.noMessages.classList.add('hide');
ThreadListUI.container.classList.remove('hide');
// make sure it sets em all
ThreadListUI.setEmpty(true);
});
test('removes noMessages hide', function() {
assert.isFalse(ThreadListUI.noMessages.classList.contains('hide'));
});
test('adds container hide', function() {
assert.isTrue(ThreadListUI.container.classList.contains('hide'));
});
});
suite('(false)', function() {
setup(function() {
// set wrong states
ThreadListUI.noMessages.classList.remove('hide');
ThreadListUI.container.classList.add('hide');
// make sure it sets em all
ThreadListUI.setEmpty(false);
});
test('adds noMessages hide', function() {
assert.isTrue(ThreadListUI.noMessages.classList.contains('hide'));
});
test('removes container hide', function() {
assert.isFalse(ThreadListUI.container.classList.contains('hide'));
});
});
});
suite('showOptions', function() {
setup(function() {
MockOptionMenu.mSetup();
});
teardown(function() {
MockOptionMenu.mTeardown();
});
test('show settings/cancel options when list is empty', function() {
ThreadListUI.setEmpty(true);
ThreadListUI.showOptions();
var optionItems = MockOptionMenu.calls[0].items;
assert.equal(optionItems.length, 2);
assert.equal(optionItems[0].l10nId, 'settings');
assert.equal(optionItems[1].l10nId, 'cancel');
});
test('show delete/settings/cancel options when list existed', function() {
ThreadListUI.setEmpty(false);
ThreadListUI.showOptions();
var optionItems = MockOptionMenu.calls[0].items;
assert.equal(optionItems.length, 3);
assert.equal(optionItems[0].l10nId, 'deleteMessages-label');
assert.equal(optionItems[1].l10nId, 'settings');
assert.equal(optionItems[2].l10nId, 'cancel');
});
});
suite('removeThread', function() {
setup(function() {
ThreadListUI.container.innerHTML = '<h2 id="header-1"></h2>' +
'<ul id="list-1"><li id="thread-1"></li>' +
'<li id="thread-2"></li></ul>' +
'<h2 id="header-2"></h2>' +
'<ul id="list-2"><li id="thread-3"></li></ul>';
});
suite('remove last thread in header', function() {
setup(function() {
ThreadListUI.removeThread(3);
});
test('leaves other threads alone', function() {
assert.ok(ThreadListUI.container.querySelector('#thread-1'));
assert.ok(ThreadListUI.container.querySelector('#thread-2'));
});
test('removes threads', function() {
assert.ok(!ThreadListUI.container.querySelector('#thread-3'));
});
test('removes empty header', function() {
assert.ok(!ThreadListUI.container.querySelector('#header-2'));
});
test('removes empty list', function() {
assert.ok(!ThreadListUI.container.querySelector('#list-2'));
});
});
suite('remove thread with others in header', function() {
setup(function() {
ThreadListUI.removeThread(2);
});
test('leaves other threads alone', function() {
assert.ok(ThreadListUI.container.querySelector('#thread-1'));
assert.ok(ThreadListUI.container.querySelector('#thread-3'));
});
test('removes threads', function() {
assert.ok(!ThreadListUI.container.querySelector('#thread-2'));
});
test('retains non-empty header', function() {
assert.ok(ThreadListUI.container.querySelector('#header-1'));
});
test('retains non-empty list', function() {
assert.ok(ThreadListUI.container.querySelector('#list-1'));
});
});
suite('remove all threads', function() {
setup(function() {
this.sinon.stub(ThreadListUI, 'setEmpty');
ThreadListUI.removeThread(1);
ThreadListUI.removeThread(2);
ThreadListUI.removeThread(3);
});
test('calls setEmpty(true)', function() {
assert.ok(ThreadListUI.setEmpty.calledWith(true));
});
});
suite('remove draft links', function() {
setup(function() {
this.sinon.stub(ThreadListUI.draftLinks, 'get').returns(1);
this.sinon.stub(ThreadListUI.draftLinks, 'delete');
ThreadListUI.removeThread(1);
});
test('calls draftLinks.get()', function() {
assert.isTrue(ThreadListUI.draftLinks.get.called);
});
test('calls draftLinks.delete()', function() {
assert.isTrue(ThreadListUI.draftLinks.delete.called);
});
});
suite('remove draft registry item', function() {
setup(function() {
ThreadListUI.draftRegistry = {1: true};
this.sinon.stub(ThreadListUI.draftLinks, 'get').returns(1);
this.sinon.stub(ThreadListUI.draftLinks, 'delete');
ThreadListUI.removeThread(1);
});
test('clears draftRegistry', function() {
assert.isTrue(
typeof ThreadListUI.draftRegistry[1] === 'undefined'
);
});
});
});
suite('updateThread', function() {
setup(function() {
this.sinon.spy(Thread, 'create');
this.sinon.spy(Threads, 'has');
this.sinon.spy(Threads, 'set');
this.sinon.spy(ThreadListUI, 'removeThread');
this.sinon.spy(ThreadListUI, 'appendThread');
this.sinon.spy(ThreadListUI, 'mark');
this.sinon.spy(ThreadListUI, 'setEmpty');
});
teardown(function() {
Threads.clear();
ThreadListUI.container.innerHTML = '';
});
suite(' > in empty welcome screen,', function() {
var message;
setup(function() {
message = MockMessages.sms();
ThreadListUI.updateThread(message);
});
test('setEmpty & appended', function() {
sinon.assert.calledOnce(ThreadListUI.setEmpty);
sinon.assert.calledWithMatch(ThreadListUI.appendThread, {
id: message.threadId,
body: message.body,
lastMessageSubject: message.lastMessageSubject,
lastMessageType: 'sms',
messages: [],
participants: ['sender'],
timestamp: message.timestamp,
unreadCount: 0
});
});
});
suite(' > Method ', function() {
var message;
setup(function() {
var someDate = new Date(2013, 1, 1);
insertMockMarkup(someDate);
// A new message of a previous thread
var nextDate = new Date(2013, 1, 2);
message = MockMessages.sms({
threadId: 2,
timestamp: +nextDate
});
ThreadListUI.updateThread(message);
});
test(' > create is called', function() {
sinon.assert.calledOnce(Thread.create);
});
test(' > removeThread is called', function() {
sinon.assert.calledOnce(ThreadListUI.removeThread);
sinon.assert.calledOnce(ThreadListUI.appendThread);
});
test(' > new message, new thread.', function() {
var newDate = new Date(2013, 1, 2);
var newMessage = MockMessages.sms({
threadId: 20,
timestamp: +newDate
});
ThreadListUI.updateThread(newMessage, { unread: true });
// As this is a new message we dont have to remove threads
// So we have only one removeThread for the first appending
sinon.assert.calledOnce(ThreadListUI.removeThread);
// But we have appended twice
sinon.assert.calledTwice(ThreadListUI.appendThread);
});
});
suite(' > same thread exist, older', function() {
var message, thread;
setup(function() {
var someDate = new Date(2013, 1, 1);
insertMockMarkup(someDate);
var nextDate = new Date(2013, 1, 2);
message = MockMessages.sms({
threadId: 2,
timestamp: +nextDate
});
thread = Thread.create(message);
ThreadListUI.updateThread(message);
});
teardown(function() {
message = null;
thread = null;
});
test('new thread is appended/updated', function() {
sinon.assert.calledOnce(ThreadListUI.appendThread);
// first call, first argument
sinon.assert.calledWith(ThreadListUI.appendThread, thread);
});
test('old thread is removed', function() {
sinon.assert.calledOnce(ThreadListUI.removeThread);
sinon.assert.calledWith(ThreadListUI.removeThread, message.threadId);
});
});
suite(' > other threads exist', function() {
var message, thread;
setup(function() {
var someDate = new Date(2013, 1, 1);
insertMockMarkup(someDate);
var nextDate = new Date(2013, 1, 2);
message = MockMessages.sms({
threadId: 3,
timestamp: +nextDate
});
thread = Thread.create(message);
ThreadListUI.updateThread(message);
});
teardown(function() {
message = null;
thread = null;
});
test('new thread is appended', function() {
sinon.assert.calledOnce(ThreadListUI.appendThread);
// first call, first argument
sinon.assert.calledWith(ThreadListUI.appendThread, thread);
});
test('no thread is removed', function() {
assert.isFalse(ThreadListUI.removeThread.called);
});
});
suite(' > same thread exist, but newer', function() {
var message;
setup(function() {
var someDate = new Date(2013, 1, 1);
insertMockMarkup(someDate);
var prevDate = new Date(2013, 1, 0);
message = MockMessages.sms({
threadId: 2,
timestamp: +prevDate
});
ThreadListUI.updateThread(message, { unread: true });
});
test('no new thread is appended', function() {
assert.isFalse(ThreadListUI.appendThread.called);
});
test('no old thread is removed', function() {
assert.isFalse(ThreadListUI.removeThread.called);
});
test('old thread is marked unread', function() {
sinon.assert.called(ThreadListUI.mark);
sinon.assert.calledWith(ThreadListUI.mark, message.threadId, 'unread');
var container = document.getElementById('thread-2');
assert.isTrue(container.classList.contains('unread'));
});
});
suite(' > delete old message in a thread', function() {
var message, threadContainer;
/**
* When an old message is deleted, the thread UI has the same timestamp
* as the last message.
*/
setup(function() {
var someDate = new Date(2013, 1, 1);
insertMockMarkup(someDate);
message = MockMessages.sms({
threadId: 2,
timestamp: +someDate
});
threadContainer = document.getElementById('thread-2');
ThreadListUI.updateThread(message, { deleted: true });
});
test('> the thread is not updated', function() {
assert.equal(threadContainer, document.getElementById('thread-2'));
});
});
suite(' > delete latest message in a thread', function() {
var message, threadContainer;
/**
* When the latest message is deleted, the thread UI has a newer timestamp
* than the last message.
*/
setup(function() {
var someDate = new Date(2013, 1, 1);
insertMockMarkup(someDate);
var newDate = new Date(2013, 1, 2);
message = MockMessages.sms({
threadId: 2,
timestamp: +newDate
});
threadContainer = document.getElementById('thread-2');
ThreadListUI.updateThread(message, { deleted: true });
});
test('> the thread is updated', function() {
assert.ok(threadContainer !== document.getElementById('thread-2'));
});
test('> the thread is marked as read', function() {
var newContainer = document.getElementById('thread-2');
assert.isFalse(newContainer.classList.contains('unread'));
});
});
suite(' > update in-memory threads', function() {
setup(function() {
Threads.set(1, {
id: 1,
participants: ['555'],
lastMessageType: 'sms',
body: 'Hello 555',
timestamp: Date.now(),
unreadCount: 0
});
// This is used to reset the spy record
Threads.set.reset();
});
test('Threads.has is called', function() {
ThreadListUI.updateThread({
id: 1
});
assert.isTrue(Threads.has.calledOnce);
});
test('Threads.set is called', function() {
ThreadListUI.updateThread({
id: 1
});
assert.isTrue(Threads.set.calledOnce);
});
test('Threads.set is not called when id has no match', function() {
ThreadListUI.updateThread({
id: 2
});
assert.isTrue(Threads.has.calledOnce);
assert.isFalse(Threads.set.calledOnce);
});
});
});
suite('delete', function() {
setup(function() {
this.selectedInputs = [
{value: 1, dataset: { mode: 'threads'} },
{value: 2, dataset: { mode: 'threads'} }
];
this.sinon.stub(ThreadListUI, 'getSelectedInputs', function() {
return this.selectedInputs;
}.bind(this));
this.sinon.stub(MessageManager, 'getMessages');
});
suite('confirm false', function() {
setup(function() {
this.sinon.stub(window, 'confirm').returns(false);
ThreadListUI.delete();
});
test('called confirm with proper message', function() {
assert.deepEqual(window.confirm.args[0],
['deleteThreads-confirmation2']);
});
});
suite('confirm true', function() {
setup(function() {
this.sinon.stub(WaitingScreen, 'show');
this.sinon.stub(WaitingScreen, 'hide');
this.sinon.stub(window, 'confirm').returns(true);
ThreadListUI.delete();
});
test('shows WaitingScreen', function() {
assert.ok(WaitingScreen.show.called);
});
test('called confirm with proper message', function() {
assert.deepEqual(window.confirm.args[0],
['deleteThreads-confirmation2']);
});
test('called MessageManager.getMessages twice', function() {
assert.equal(MessageManager.getMessages.args.length, 2);
});
suite('getMessages({ each: })', function() {
setup(function() {
this.sinon.stub(MessageManager, 'deleteMessage');
// call the "each" function passed to getMessages with fake message
MessageManager.getMessages.args[0][0].each({ id: 3 });
});
test('MessageManager.deleteMessage called', function() {
assert.ok(MessageManager.deleteMessage.calledWith(3));
});
});
suite('first getMessages', function() {
setup(function() {
this.sinon.stub(Threads, 'delete');
this.sinon.stub(ThreadListUI, 'removeThread');
// call the "end" function passed to getMessages with fake message
MessageManager.getMessages.args[0][0].end();
});
test('is for the right thread', function() {
assert.equal(
MessageManager.getMessages.args[0][0].filter.threadId, 2);
});
test('end calls removeThread for correct thread', function() {
assert.equal(ThreadListUI.removeThread.args[0][0], 2);
});
test('end calls Threads.delete with correct thread', function() {
assert.equal(Threads.delete.args[0][0], 2);
});
test('end doesnt hide waiting screen (yet)', function() {
assert.isFalse(WaitingScreen.hide.called);
});
suite('sencond getMessages', function() {
setup(function() {
MessageManager.getMessages.args[1][0].end();
});
test('is for the right thread', function() {
assert.equal(
MessageManager.getMessages.args[1][0].filter.threadId, 1);
});
test('end calls removeThread for correct thread', function() {
assert.equal(ThreadListUI.removeThread.args[1][0], 1);
});
test('end calls Threads.delete with correct thread', function() {
assert.equal(Threads.delete.args[1][0], 1);
});
test('end calls hide waiting screen', function() {
assert.isTrue(WaitingScreen.hide.called);
});
});
});
});
});
suite('createThread', function() {
setup(function() {
this.sinon.spy(Template, 'escape');
this.sinon.spy(MockTimeHeaders, 'update');
});
function buildSMSThread(payload) {
var o = {
id: 1,
lastMessageType: 'sms',
participants: ['1234'],
body: payload,
timestamp: Date.now()
};
return o;
}
function buildMMSThread(payload) {
var o = {
id: 1,
lastMessageType: 'mms',
participants: ['1234', '5678'],
body: payload,
timestamp: Date.now()
};
return o;
}
test('escapes the body for SMS', function() {
var payload = 'hello <a href="world">world</a>';
ThreadListUI.createThread(buildSMSThread(payload));
assert.ok(Template.escape.calledWith(payload));
assert.ok(MockTimeHeaders.update.called);
});
test('escapes the body for MMS', function() {
var payload = 'hello <a href="world">world</a>';
ThreadListUI.createThread(buildMMSThread(payload));
assert.ok(Template.escape.calledWith(payload));
assert.ok(MockTimeHeaders.update.called);
});
suite('Correctly displayed content', function() {
var now, message, li;
setup(function() {
this.sinon.stub(Threads, 'get').returns({
hasDrafts: true
});
now = Date.now();
message = MockMessages.sms({
delivery: 'delivered',
threadId: 1,
timestamp: now,
body: 'from a message'
});
});
test('Message newer than draft is used', function() {
this.sinon.stub(Drafts, 'byThreadId').returns({
latest: {
timestamp: now - 60000,
content: ['from a draft']
}
});
li = ThreadListUI.createThread(
Thread.create(message)
);
assert.equal(
li.querySelector('.body-text').textContent, 'from a message'
);
});
test('Draft newer than content is used', function() {
this.sinon.stub(Drafts, 'byThreadId').returns({
latest: {
timestamp: now,
content: ['from a draft']
}
});
message.timestamp = now - 60000;
li = ThreadListUI.createThread(
Thread.create(message)
);
assert.equal(
li.querySelector('.body-text').textContent, 'from a draft'
);
});
test('Draft newer, but has no content', function() {
this.sinon.stub(Drafts, 'byThreadId').returns({
latest: {
timestamp: now,
content: []
}
});
message.timestamp = now - 60000;
li = ThreadListUI.createThread(
Thread.create(message)
);
assert.equal(
li.querySelector('.body-text').textContent, ''
);
});
test('Last message type for draft', function() {
this.sinon.stub(Drafts, 'byThreadId').returns({
latest: {
timestamp: now,
content: [],
type: 'mms'
}
});
li = ThreadListUI.createThread(
Thread.create(message)
);
assert.ok(li.dataset.lastMessageType, 'mms');
});
});
});
suite('onMessageReceived', function() {
var updateThreadSpy;
setup(function() {
updateThreadSpy = this.sinon.spy(ThreadListUI, 'updateThread');
var message = MockMessages.sms();
ThreadListUI.onMessageReceived(message);
});
teardown(function() {
updateThreadSpy = null;
});
test(' updateThread is called when a new message is received', function() {
assert.ok(updateThreadSpy.called);
});
});
suite('appendThread', function() {
setup(function() {
this.sinon.stub(ThreadListUI, 'setContact');
this.sinon.stub(ThreadListUI, 'checkInputs');
});
suite('new thread and new message in a day', function() {
var thread;
setup(function() {
var someDate = new Date(2013, 1, 1).getTime();
insertMockMarkup(someDate);
var nextDate = new Date(2013, 1, 2);
var message = MockMessages.sms({
threadId: 3,
timestamp: +nextDate
});
thread = Thread.create(message);
ThreadListUI.appendThread(thread);
});
test('show up in a new container', function() {
var newContainerId = 'threadsContainer_' + (+thread.timestamp);
var newContainer = document.getElementById(newContainerId);
assert.ok(newContainer);
assert.ok(newContainer.querySelector('li'));
var expectedThreadId = 'thread-' + thread.id;
assert.equal(newContainer.querySelector('li').id, expectedThreadId);
});
});
suite('existing thread and new message in a day', function() {
var thread;
setup(function() {
var someDate = new Date(2013, 1, 1).getTime();
insertMockMarkup(someDate);
var nextDate = new Date(2013, 1, 2);
var message = MockMessages.sms({
threadId: 2,
timestamp: +nextDate
});
thread = Thread.create(message);
ThreadListUI.appendThread(thread);
});
test('show up in a new container', function() {
var newContainerId = 'threadsContainer_' + (+thread.timestamp);
var newContainer = document.getElementById(newContainerId);
assert.ok(newContainer);
assert.ok(newContainer.querySelector('li'));
var expectedThreadId = 'thread-' + thread.id;
assert.equal(newContainer.querySelector('li').id, expectedThreadId);
});
});
});
suite('renderThreads', function() {
setup(function() {
this.sinon.spy(ThreadListUI, 'setEmpty');
this.sinon.spy(ThreadListUI, 'prepareRendering');
this.sinon.spy(ThreadListUI, 'startRendering');
this.sinon.spy(ThreadListUI, 'finalizeRendering');
this.sinon.spy(ThreadListUI, 'renderThreads');
this.sinon.spy(ThreadListUI, 'appendThread');
this.sinon.spy(ThreadListUI, 'createThread');
this.sinon.spy(ThreadListUI, 'setContact');
this.sinon.spy(ThreadListUI, 'renderDrafts');
});
test('Rendering an empty screen', function(done) {
this.sinon.stub(MessageManager, 'getThreads', function(options) {
options.end();
options.done();
});
ThreadListUI.renderThreads(function() {
done(function checks() {
sinon.assert.called(ThreadListUI.renderDrafts);
sinon.assert.calledWith(ThreadListUI.finalizeRendering, true);
assert.isFalse(ThreadListUI.noMessages.classList.contains('hide'));
assert.isTrue(ThreadListUI.container.classList.contains('hide'));
});
});
});
test('Rendering a few threads', function(done) {
var container = ThreadListUI.container;
this.sinon.stub(MessageManager, 'getThreads',
function(options) {
var threadsMockup = new MockThreadList();
var each = options.each;
var end = options.end;
var done = options.done;
for (var i = 0; i < threadsMockup.length; i++) {
each && each(threadsMockup[i]);
var threads = container.querySelectorAll(
'[data-last-message-type="sms"],' +
'[data-last-message-type="mms"]'
);
// Check that a thread is inserted per iteration
assert.equal(threads.length, i + 1);
}
end && end();
done && done();
});
ThreadListUI.renderThreads(function() {
done(function checks() {
sinon.assert.calledWith(ThreadListUI.finalizeRendering, false);
assert.isTrue(ThreadListUI.noMessages.classList.contains('hide'));
assert.isFalse(ThreadListUI.container.classList.contains('hide'));
var mmsThreads = container.querySelectorAll(
'[data-last-message-type="mms"]'
);
var smsThreads = container.querySelectorAll(
'[data-last-message-type="sms"]'
);
// Check that all threads have been properly inserted in the list
assert.equal(mmsThreads.length, 1);
assert.equal(smsThreads.length, 4);
});
});
});
});
suite('renderDrafts', function() {
var draft;
var thread, threadDraft;
setup(function() {
this.sinon.spy(ThreadListUI, 'renderThreads');
this.sinon.spy(ThreadListUI, 'appendThread');
this.sinon.spy(ThreadListUI, 'createThread');
this.sinon.spy(ThreadListUI, 'updateThread');
this.sinon.spy(ThreadListUI, 'setContact');
var someDate = new Date(2013, 1, 1).getTime();
insertMockMarkup(someDate);
var nextDate = new Date(2013, 1, 2);
var message = MockMessages.sms({
threadId: 3,
timestamp: +nextDate
});
Threads.registerMessage(message);
thread = Threads.get(3);
ThreadListUI.appendThread(thread);
threadDraft = new Draft({
id: 102,
threadId: 3,
recipients: [],
content: ['An explicit id'],
timestamp: Date.now(),
type: 'sms'
});
Drafts.add(threadDraft);
draft = new Draft({
id: 101,
threadId: null,
recipients: [],
content: ['An explicit id'],
timestamp: Date.now(),
type: 'sms'
});
Drafts.add(draft);
this.sinon.stub(Drafts, 'request', function(callback) {
callback([draft, threadDraft]);
});
ThreadListUI.draftLinks = new Map();
ThreadListUI.draftRegistry = {};
ThreadListUI.renderDrafts();
});
teardown(function() {
Drafts.clear();
});
test('Draft.request is called', function() {
sinon.assert.called(Drafts.request);
});
test('ThreadListUI.appendThread is called', function() {
sinon.assert.called(ThreadListUI.appendThread);
});
test('ThreadListUI.createThread is called', function() {
sinon.assert.called(ThreadListUI.createThread);
});
test('ThreadListUI.updateThread is called', function() {
sinon.assert.called(ThreadListUI.updateThread);
});
test('ThreadListUI.setContact is called', function() {
sinon.assert.called(ThreadListUI.setContact);
});
test('click on a draft populates ThreadUI.draft', function() {
document.querySelector('#thread-101 a').click();
assert.equal(ThreadUI.draft, draft);
});
});
suite('draftSaved', function() {
setup(function() {
this.sinon.useFakeTimers();
});
test('draft saved banner shown and hidden', function() {
assert.isTrue(draftSavedBanner.classList.contains('hide'));
ThreadListUI.onDraftSaved();
assert.isFalse(draftSavedBanner.classList.contains('hide'));
this.sinon.clock.tick(ThreadListUI.DRAFT_SAVED_DURATION - 1);
assert.isFalse(draftSavedBanner.classList.contains('hide'));
this.sinon.clock.tick(1);
assert.isTrue(draftSavedBanner.classList.contains('hide'));
});
});
});
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import testtools
from tests.functional import utils
class DeploymentTestCase(testtools.TestCase):
def test_create_deployment_from_env(self):
os.environ.update(
{
"OS_AUTH_URL": "http://fake",
"OS_USERNAME": "fake",
"OS_PASSWORD": "fake"
}
)
rally = utils.Rally(plugin_path="tests/functional/extra")
rally("deployment create --name fromenv --fromenv")
spec = rally("deployment config", getjson=True)
self.assertEqual({
"good@fake": {
"auth_url": "http://fake",
"username": "fake",
"password": "fake"
}
}, spec)
|
#!/usr/bin/env python
from __future__ import division, absolute_import, print_function
from future.builtins import super
from iris_sdk.models.base_resource import BaseResource
from iris_sdk.models.data.lnpchecker import LnpCheckerData
from iris_sdk.models.lnpchecker_response import LnpCheckerResponse
XML_NAME_LNP_CHECKER = "NumberPortabilityRequest"
XPATH_LNP_CHECKER = "/lnpchecker"
class LnpChecker(BaseResource, LnpCheckerData):
"""Check local number portability"""
_save_post = True
_node_name = XML_NAME_LNP_CHECKER
_xpath = XPATH_LNP_CHECKER
def __call__(self, numbers, params=None):
self.clear()
self.tn_list.items.extend(numbers)
return self._post_data(LnpCheckerResponse(), params)
def __init__(self, parent=None, client=None):
super().__init__(parent, client)
LnpCheckerData.__init__(self)
|