text
stringlengths 3
1.05M
|
|---|
#ifndef _cGameObject_HG_
#define _cGameObject_HG_
#include <glm/vec3.hpp>
#include <glm/vec4.hpp>
#include <string>
#define GLM_ENABLE_EXPERIMENTAL
#include <glm/gtx/quaternion.hpp>
#include <queue>
#include "iDebugRenderer.h"
#include "Physics/cPhysicalProperties.h"
#include "Physics/iPhysicalObject.h" // For the access to things that will update the positions, etc.
#include "sMeshDrawInfo.h"
class cSimpleAssimpSkinnedMesh; // Forward declare
class cAnimationState; // Forward declare
class cGameObject : public iPhysicalObject
{
public:
cGameObject(); // constructor
~cGameObject(); // destructor
// cGameObject(const cGameObject &obj); // copy constructor
//***** from the iPhysicalObject interface ********************
virtual void SetPhysState( cPhysicalProperties &PhysState );
virtual cPhysicalProperties GetPhysState( void );
virtual void GetPhysState( cPhysicalProperties &PhysState );
//*************************************************************
glm::vec3 getPosition(void);
// bOverwritePositionToo effectively stops the object if the "past position" is being used
void overwritePotition( glm::vec3 newPosition, bool bOverwiteOldPositionToo = true );
glm::quat getQOrientation(void); // Post-rotation
void overwriteQOrientation( glm::quat newOrientation );
void overwriteQOrientationEuler( glm::vec3 newOrientationEuler, bool bIsDegrees = true );
void adjQOrientation( glm::quat newOrientation );
void adjQOrientationEuler( glm::vec3 newOrientationEuler, bool bIsDegrees = true );
//****************************************************************************************
//TODO: Add the mediator code
std::string friendlyName;
inline unsigned int getUniqueID(void) { return this->m_UniqueID; }
iDebugRenderer* pDebugRenderer;
// Mesh information (if drawn)
// Note: Meshes have a separate orientation and offset from
// the object, in case you want the mesh(es) to be
// loaded in different alignment from the game object.
// If the object alignment is the same as the mesh
// alignment, then don't set the orientation and offset
// in the mesh information.
std::vector<sMeshDrawInfo> vecMeshes;
glm::quat getFinalMeshQOrientation(unsigned int meshID);
glm::quat getFinalMeshQOrientation(glm::quat &meshQOrientation);
bool bIsVisible; // If false, any meshes are NOT drawn (not child objects could still be visible)
// Our "child" objects
std::vector< cGameObject* > vec_pChildObjects;
void DeleteChildren(void);
// Returns NULL if not found
cGameObject* FindChildByFriendlyName( std::string name );
cGameObject* FindChildByID( unsigned int ID );
// Used when there is only one game object (like with text), but we're drawing it many times
//void pushRenderingState(void);
//// Ignores call if nothing on stack
//void popRenderingState(void);
// If NULL, then object ISN'T a skinned mesh
cSimpleAssimpSkinnedMesh* pSimpleSkinnedMesh;
cAnimationState* pAniState;
private:
unsigned int m_UniqueID;
// Used when creating objects
static unsigned int m_nextUniqueID;
// All the properties of a physical object
cPhysicalProperties m_PhysicalProps;
//// Any former render states (that have been pushed)
//struct sRenderState
//{
// glm::vec3 position;
// glm::quat oriention;
// bool bIsWireframe;
// glm::vec4 diffuse; // Alpha is 4th value
// glm::vec4 ambient;
// glm::vec3 specular;
// float shininess;
// glm::vec3 debugColour;
// bool bUseDebugColour;
// float scale;
// bool bIsVisible;
//};
//std::queue< sRenderState > m_stackRenderState;
};
#endif
|
const date = new Date;
let ecriture = new TypeIt('#line-1', {
speed: 30,
startDelay: 900
})
.type(date.toLocaleString())
.pause(500)
.break()
.break()
.type("Après quelques jours, les parents de Paul ont remarqué que son comportement avait changé. Après qu'ils lui eurent tiré les vers du nez, ils ont engagé des procédure judiciaires contre Paul, qui a fini par être expulsé de l'établissement. Vous vous en sortez sans dommages, mais Eric ne vous adresse plus la parole...")
.break()
.break()
.type("<a href='./index.html'>Fin n°5</a>")
.go();
|
const path = require(`path`)
const _ = require(`lodash`)
const { createFilePath } = require(`gatsby-source-filesystem`)
const createPaginatedPages = require('gatsby-paginate')
exports.createPages = ({ graphql, actions }) => {
const { createPage } = actions
return new Promise((resolve, reject) => {
const blogPost = path.resolve(`./src/templates/blog-post.jsx`)
const tagsPage = path.resolve(`./src/templates/tag.js`)
resolve(
graphql(
`
{
allMarkdownRemark(
sort: { fields: [frontmatter___date], order: DESC }
limit: 1000
) {
edges {
node {
id
fields {
slug
}
frontmatter {
title
tags
}
excerpt(pruneLength: 230)
}
}
}
}
`
).then(result => {
if (result.errors) {
console.log(result.errors)
reject(result.errors)
}
createPaginatedPages({
edges: result.data.allMarkdownRemark.edges,
createPage: createPage,
pageTemplate: 'src/templates/index.js',
pageLength: 8, // This is optional and defaults to 10 if not used
pathPrefix: '', // This is optional and defaults to an empty string if not used
context: {}, // This is optional and defaults to an empty object if not used
})
// Create blog posts pages.
const posts = result.data.allMarkdownRemark.edges
const tagsSet = new Set()
posts.forEach((post, index) => {
const previous = index === posts.length - 1 ? null : posts[index + 1].node
const next = index === 0 ? null : posts[index - 1].node
if (post.node.frontmatter.tags) {
post.node.frontmatter.tags.forEach(tag => {
tagsSet.add(tag)
})
}
createPage({
path: post.node.fields.slug,
component: blogPost,
context: {
slug: post.node.fields.slug,
previous,
next,
},
})
})
// Create tag pages.
const tagsList = Array.from(tagsSet)
tagsList.forEach(tag => {
createPage({
path: `/tag/${_.kebabCase(tag)}/`,
component: tagsPage,
context: {
tag
}
})
})
})
)
})
}
exports.onCreateNode = ({ node, actions, getNode }) => {
const { createNodeField } = actions
if (node.internal.type === `MarkdownRemark`) {
const value = createFilePath({ node, getNode })
createNodeField({
name: `slug`,
node,
value,
})
}
}
|
"""
Demo/test program for the MQTT utilities.
See https://github.com/sensemakersamsterdam/astroplant_explorer
"""
#
# (c) Sensemakersams.org and others. See https://github.com/sensemakersamsterdam/astroplant_explorer
# Author: Gijs Mos
#
##
# H O W T O U S E
#
# Edit configuration.json and pick a nice 'ae_id' for yourself.
#
# Now start a terminal window #1 on your Pi and run:
# python 1_mqtt_receiver_demo.py
# To monitor MQTT traffic open a second terminal window #2 and run:
# mosquitto_sub -v -t "#"
# Then open a terminal window #3 and run:
# python 1_mqtt_sender_demo.py
# This should get things starting. You can run rhe 1_mqtt_sender_demo.py
# repeatedly. The 1_mqtt_receiver_demo and mosquitto_sub will show the
# messages each time you run it.
# And if you want to send the stop-request to the 1_mqtt_receiver_demo.py, run
# python 1_mqtt_stop_demo.py
# in terminal window #3.
# The mosquitto_sub in terminal #2 you can abort with control-c.
###
# Warning: if import of ae_* module(s) fails, then you need to set up PYTHONPATH.
# To test start python, import sys and type sys.path. The ae 'lib' directory
# should be included in the printed path
# First we import the variable 'cfg' from the configuration library.
# it reads the JSON file configuration.json from your current directory
# and makes it available to this script as a the dictionary 'cfg'.
from ae_util.configuration import cfg
import sys
sys.path.append('../lib')
# From the standard time library we now import the function sleep()
from time import sleep
# From the mqtt library we import the AE_Local_MQTT class which contains a bunch
# of functions we will use in this script
from ae_util.mqtt import AE_Local_MQTT
# Here we initialize our local MQTT agent.
# It imports your MQTT settings automatically from the configuration.json file.
loc_mqtt = AE_Local_MQTT()
# And now we activate the MQTT connection.
loc_mqtt.setup()
# For the rest it is simple. We send some stuff and print the return code.
# We sleep a bit, send again etc.
# and then we are done.
# Send in sequence a dictionary, a string and an integer.
# A dict will be jsautomatically be JSON formatted before sending.
# All other stuff is formatted to a str first.
print('sub-topic: dict', loc_mqtt.publish('dict', cfg))
sleep(0.4)
print('sub-topic: str', loc_mqtt.publish('string',
'waar eens de boterbloemen bloeiden'))
print('sub-topic: int', loc_mqtt.publish('int', 33))
# sleeeeeeeppppp...
print('Take 5')
sleep(5)
# And send some more
#
print('sub-topic: aap', loc_mqtt.publish('aap', 'payload for aap'))
print('sub-topic: aap/noot', loc_mqtt.publish('aap/noot', 'payload for aap/noot'))
print('sub-topic: test', loc_mqtt.publish('test',
{"topic": "test", "payload": [44, 33]}))
print('And 4 more...')
# Sleep and quit.
sleep(1)
print('Bye bye..')
print('Remember, you can run 1_mqtt_stop_demo.py to stop the 1_mqtt_receiver_demo,')
print('or run this program again.')
|
/* Created RJudd December 30, 1997 */
/* SPAWARSYSCEN D881 */
/**********************************************************************
// For TASP VSIPL Documentation and Code neither the United States /
// Government, the United States Navy, nor any of their employees, /
// makes any warranty, express or implied, including the warranties /
// of merchantability and fitness for a particular purpose, or /
// assumes any legal liability or responsibility for the accuracy, /
// completeness, or usefulness of any information, apparatus, /
// product, or process disclosed, or represents that its use would /
// not infringe privately owned rights /
**********************************************************************/
/* $Id: VI_vmul_d.h,v 2.0 2003/02/22 15:18:34 judd Exp $ */
#include"vsip.h"
#include"vsip_vviewattributes_d.h"
#ifndef VI_VMUL_D_H
#define VI_VMUL_D_H
static
void
VI_vmul_d(
const vsip_vview_d *a,
const vsip_vview_d *b,
const vsip_vview_d *r) {
{
/*define variables*/
/* register */ vsip_length n = r->length;
/* register */ vsip_stride ast = a->stride * a->block->rstride,
bst = b->stride * b->block->rstride,
rst = r->stride * r->block->rstride;
vsip_scalar_d *ap = (a->block->array) + a->offset * a->block->rstride,
*bp = (b->block->array) + b->offset * b->block->rstride,
*rp = (r->block->array) + r->offset * r->block->rstride;
/*end define*/
while(n-- > 0){
*rp = *ap * *bp;
ap += ast; bp += bst; rp += rst;
}
}
}
#endif
|
/*
* Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
*
* Copyright (c) 2011, 2012, Intel Corporation.
*
* This file is part of Portals
* http://sourceforge.net/projects/sandiaportals/
*
* Portals is free software; you can redistribute it and/or
* modify it under the terms of version 2 of the GNU General Public
* License as published by the Free Software Foundation.
*
* Portals is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*/
#define DEBUG_SUBSYSTEM S_LNET
#include "../../include/linux/libcfs/libcfs.h"
#include "../../include/linux/lnet/lib-lnet.h"
/*
* This is really lnet_proc.c. You might need to update sanity test 215
* if any file format is changed.
*/
#define LNET_LOFFT_BITS (sizeof(loff_t) * 8)
/*
* NB: max allowed LNET_CPT_BITS is 8 on 64-bit system and 2 on 32-bit system
*/
#define LNET_PROC_CPT_BITS (LNET_CPT_BITS + 1)
/* change version, 16 bits or 8 bits */
#define LNET_PROC_VER_BITS max_t(size_t, min_t(size_t, LNET_LOFFT_BITS, 64) / 4, 8)
#define LNET_PROC_HASH_BITS LNET_PEER_HASH_BITS
/*
* bits for peer hash offset
* NB: we don't use the highest bit of *ppos because it's signed
*/
#define LNET_PROC_HOFF_BITS (LNET_LOFFT_BITS - \
LNET_PROC_CPT_BITS - \
LNET_PROC_VER_BITS - \
LNET_PROC_HASH_BITS - 1)
/* bits for hash index + position */
#define LNET_PROC_HPOS_BITS (LNET_PROC_HASH_BITS + LNET_PROC_HOFF_BITS)
/* bits for peer hash table + hash version */
#define LNET_PROC_VPOS_BITS (LNET_PROC_HPOS_BITS + LNET_PROC_VER_BITS)
#define LNET_PROC_CPT_MASK ((1ULL << LNET_PROC_CPT_BITS) - 1)
#define LNET_PROC_VER_MASK ((1ULL << LNET_PROC_VER_BITS) - 1)
#define LNET_PROC_HASH_MASK ((1ULL << LNET_PROC_HASH_BITS) - 1)
#define LNET_PROC_HOFF_MASK ((1ULL << LNET_PROC_HOFF_BITS) - 1)
#define LNET_PROC_CPT_GET(pos) \
(int)(((pos) >> LNET_PROC_VPOS_BITS) & LNET_PROC_CPT_MASK)
#define LNET_PROC_VER_GET(pos) \
(int)(((pos) >> LNET_PROC_HPOS_BITS) & LNET_PROC_VER_MASK)
#define LNET_PROC_HASH_GET(pos) \
(int)(((pos) >> LNET_PROC_HOFF_BITS) & LNET_PROC_HASH_MASK)
#define LNET_PROC_HOFF_GET(pos) \
(int)((pos) & LNET_PROC_HOFF_MASK)
#define LNET_PROC_POS_MAKE(cpt, ver, hash, off) \
(((((loff_t)(cpt)) & LNET_PROC_CPT_MASK) << LNET_PROC_VPOS_BITS) | \
((((loff_t)(ver)) & LNET_PROC_VER_MASK) << LNET_PROC_HPOS_BITS) | \
((((loff_t)(hash)) & LNET_PROC_HASH_MASK) << LNET_PROC_HOFF_BITS) | \
((off) & LNET_PROC_HOFF_MASK))
#define LNET_PROC_VERSION(v) ((unsigned int)((v) & LNET_PROC_VER_MASK))
static int __proc_lnet_stats(void *data, int write,
loff_t pos, void __user *buffer, int nob)
{
int rc;
lnet_counters_t *ctrs;
int len;
char *tmpstr;
const int tmpsiz = 256; /* 7 %u and 4 %llu */
if (write) {
lnet_counters_reset();
return 0;
}
/* read */
LIBCFS_ALLOC(ctrs, sizeof(*ctrs));
if (!ctrs)
return -ENOMEM;
LIBCFS_ALLOC(tmpstr, tmpsiz);
if (!tmpstr) {
LIBCFS_FREE(ctrs, sizeof(*ctrs));
return -ENOMEM;
}
lnet_counters_get(ctrs);
len = snprintf(tmpstr, tmpsiz,
"%u %u %u %u %u %u %u %llu %llu %llu %llu",
ctrs->msgs_alloc, ctrs->msgs_max,
ctrs->errors,
ctrs->send_count, ctrs->recv_count,
ctrs->route_count, ctrs->drop_count,
ctrs->send_length, ctrs->recv_length,
ctrs->route_length, ctrs->drop_length);
if (pos >= min_t(int, len, strlen(tmpstr)))
rc = 0;
else
rc = cfs_trace_copyout_string(buffer, nob,
tmpstr + pos, "\n");
LIBCFS_FREE(tmpstr, tmpsiz);
LIBCFS_FREE(ctrs, sizeof(*ctrs));
return rc;
}
static int proc_lnet_stats(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp, loff_t *ppos)
{
return lprocfs_call_handler(table->data, write, ppos, buffer, lenp,
__proc_lnet_stats);
}
static int proc_lnet_routes(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp, loff_t *ppos)
{
const int tmpsiz = 256;
char *tmpstr;
char *s;
int rc = 0;
int len;
int ver;
int off;
CLASSERT(sizeof(loff_t) >= 4);
off = LNET_PROC_HOFF_GET(*ppos);
ver = LNET_PROC_VER_GET(*ppos);
LASSERT(!write);
if (!*lenp)
return 0;
LIBCFS_ALLOC(tmpstr, tmpsiz);
if (!tmpstr)
return -ENOMEM;
s = tmpstr; /* points to current position in tmpstr[] */
if (!*ppos) {
s += snprintf(s, tmpstr + tmpsiz - s, "Routing %s\n",
the_lnet.ln_routing ? "enabled" : "disabled");
LASSERT(tmpstr + tmpsiz - s > 0);
s += snprintf(s, tmpstr + tmpsiz - s, "%-8s %4s %8s %7s %s\n",
"net", "hops", "priority", "state", "router");
LASSERT(tmpstr + tmpsiz - s > 0);
lnet_net_lock(0);
ver = (unsigned int)the_lnet.ln_remote_nets_version;
lnet_net_unlock(0);
*ppos = LNET_PROC_POS_MAKE(0, ver, 0, off);
} else {
struct list_head *n;
struct list_head *r;
lnet_route_t *route = NULL;
lnet_remotenet_t *rnet = NULL;
int skip = off - 1;
struct list_head *rn_list;
int i;
lnet_net_lock(0);
if (ver != LNET_PROC_VERSION(the_lnet.ln_remote_nets_version)) {
lnet_net_unlock(0);
LIBCFS_FREE(tmpstr, tmpsiz);
return -ESTALE;
}
for (i = 0; i < LNET_REMOTE_NETS_HASH_SIZE && !route; i++) {
rn_list = &the_lnet.ln_remote_nets_hash[i];
n = rn_list->next;
while (n != rn_list && !route) {
rnet = list_entry(n, lnet_remotenet_t,
lrn_list);
r = rnet->lrn_routes.next;
while (r != &rnet->lrn_routes) {
lnet_route_t *re =
list_entry(r, lnet_route_t,
lr_list);
if (!skip) {
route = re;
break;
}
skip--;
r = r->next;
}
n = n->next;
}
}
if (route) {
__u32 net = rnet->lrn_net;
__u32 hops = route->lr_hops;
unsigned int priority = route->lr_priority;
lnet_nid_t nid = route->lr_gateway->lp_nid;
int alive = lnet_is_route_alive(route);
s += snprintf(s, tmpstr + tmpsiz - s,
"%-8s %4u %8u %7s %s\n",
libcfs_net2str(net), hops,
priority,
alive ? "up" : "down",
libcfs_nid2str(nid));
LASSERT(tmpstr + tmpsiz - s > 0);
}
lnet_net_unlock(0);
}
len = s - tmpstr; /* how many bytes was written */
if (len > *lenp) { /* linux-supplied buffer is too small */
rc = -EINVAL;
} else if (len > 0) { /* wrote something */
if (copy_to_user(buffer, tmpstr, len)) {
rc = -EFAULT;
} else {
off += 1;
*ppos = LNET_PROC_POS_MAKE(0, ver, 0, off);
}
}
LIBCFS_FREE(tmpstr, tmpsiz);
if (!rc)
*lenp = len;
return rc;
}
static int proc_lnet_routers(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp, loff_t *ppos)
{
int rc = 0;
char *tmpstr;
char *s;
const int tmpsiz = 256;
int len;
int ver;
int off;
off = LNET_PROC_HOFF_GET(*ppos);
ver = LNET_PROC_VER_GET(*ppos);
LASSERT(!write);
if (!*lenp)
return 0;
LIBCFS_ALLOC(tmpstr, tmpsiz);
if (!tmpstr)
return -ENOMEM;
s = tmpstr; /* points to current position in tmpstr[] */
if (!*ppos) {
s += snprintf(s, tmpstr + tmpsiz - s,
"%-4s %7s %9s %6s %12s %9s %8s %7s %s\n",
"ref", "rtr_ref", "alive_cnt", "state",
"last_ping", "ping_sent", "deadline",
"down_ni", "router");
LASSERT(tmpstr + tmpsiz - s > 0);
lnet_net_lock(0);
ver = (unsigned int)the_lnet.ln_routers_version;
lnet_net_unlock(0);
*ppos = LNET_PROC_POS_MAKE(0, ver, 0, off);
} else {
struct list_head *r;
struct lnet_peer *peer = NULL;
int skip = off - 1;
lnet_net_lock(0);
if (ver != LNET_PROC_VERSION(the_lnet.ln_routers_version)) {
lnet_net_unlock(0);
LIBCFS_FREE(tmpstr, tmpsiz);
return -ESTALE;
}
r = the_lnet.ln_routers.next;
while (r != &the_lnet.ln_routers) {
lnet_peer_t *lp = list_entry(r, lnet_peer_t,
lp_rtr_list);
if (!skip) {
peer = lp;
break;
}
skip--;
r = r->next;
}
if (peer) {
lnet_nid_t nid = peer->lp_nid;
unsigned long now = cfs_time_current();
unsigned long deadline = peer->lp_ping_deadline;
int nrefs = peer->lp_refcount;
int nrtrrefs = peer->lp_rtr_refcount;
int alive_cnt = peer->lp_alive_count;
int alive = peer->lp_alive;
int pingsent = !peer->lp_ping_notsent;
int last_ping = cfs_duration_sec(cfs_time_sub(now,
peer->lp_ping_timestamp));
int down_ni = 0;
lnet_route_t *rtr;
if ((peer->lp_ping_feats &
LNET_PING_FEAT_NI_STATUS)) {
list_for_each_entry(rtr, &peer->lp_routes,
lr_gwlist) {
/*
* downis on any route should be the
* number of downis on the gateway
*/
if (rtr->lr_downis) {
down_ni = rtr->lr_downis;
break;
}
}
}
if (!deadline)
s += snprintf(s, tmpstr + tmpsiz - s,
"%-4d %7d %9d %6s %12d %9d %8s %7d %s\n",
nrefs, nrtrrefs, alive_cnt,
alive ? "up" : "down", last_ping,
pingsent, "NA", down_ni,
libcfs_nid2str(nid));
else
s += snprintf(s, tmpstr + tmpsiz - s,
"%-4d %7d %9d %6s %12d %9d %8lu %7d %s\n",
nrefs, nrtrrefs, alive_cnt,
alive ? "up" : "down", last_ping,
pingsent,
cfs_duration_sec(cfs_time_sub(deadline, now)),
down_ni, libcfs_nid2str(nid));
LASSERT(tmpstr + tmpsiz - s > 0);
}
lnet_net_unlock(0);
}
len = s - tmpstr; /* how many bytes was written */
if (len > *lenp) { /* linux-supplied buffer is too small */
rc = -EINVAL;
} else if (len > 0) { /* wrote something */
if (copy_to_user(buffer, tmpstr, len)) {
rc = -EFAULT;
} else {
off += 1;
*ppos = LNET_PROC_POS_MAKE(0, ver, 0, off);
}
}
LIBCFS_FREE(tmpstr, tmpsiz);
if (!rc)
*lenp = len;
return rc;
}
static int proc_lnet_peers(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp, loff_t *ppos)
{
const int tmpsiz = 256;
struct lnet_peer_table *ptable;
char *tmpstr;
char *s;
int cpt = LNET_PROC_CPT_GET(*ppos);
int ver = LNET_PROC_VER_GET(*ppos);
int hash = LNET_PROC_HASH_GET(*ppos);
int hoff = LNET_PROC_HOFF_GET(*ppos);
int rc = 0;
int len;
CLASSERT(LNET_PROC_HASH_BITS >= LNET_PEER_HASH_BITS);
LASSERT(!write);
if (!*lenp)
return 0;
if (cpt >= LNET_CPT_NUMBER) {
*lenp = 0;
return 0;
}
LIBCFS_ALLOC(tmpstr, tmpsiz);
if (!tmpstr)
return -ENOMEM;
s = tmpstr; /* points to current position in tmpstr[] */
if (!*ppos) {
s += snprintf(s, tmpstr + tmpsiz - s,
"%-24s %4s %5s %5s %5s %5s %5s %5s %5s %s\n",
"nid", "refs", "state", "last", "max",
"rtr", "min", "tx", "min", "queue");
LASSERT(tmpstr + tmpsiz - s > 0);
hoff++;
} else {
struct lnet_peer *peer;
struct list_head *p;
int skip;
again:
p = NULL;
peer = NULL;
skip = hoff - 1;
lnet_net_lock(cpt);
ptable = the_lnet.ln_peer_tables[cpt];
if (hoff == 1)
ver = LNET_PROC_VERSION(ptable->pt_version);
if (ver != LNET_PROC_VERSION(ptable->pt_version)) {
lnet_net_unlock(cpt);
LIBCFS_FREE(tmpstr, tmpsiz);
return -ESTALE;
}
while (hash < LNET_PEER_HASH_SIZE) {
if (!p)
p = ptable->pt_hash[hash].next;
while (p != &ptable->pt_hash[hash]) {
lnet_peer_t *lp = list_entry(p, lnet_peer_t,
lp_hashlist);
if (!skip) {
peer = lp;
/*
* minor optimization: start from idx+1
* on next iteration if we've just
* drained lp_hashlist
*/
if (lp->lp_hashlist.next ==
&ptable->pt_hash[hash]) {
hoff = 1;
hash++;
} else {
hoff++;
}
break;
}
skip--;
p = lp->lp_hashlist.next;
}
if (peer)
break;
p = NULL;
hoff = 1;
hash++;
}
if (peer) {
lnet_nid_t nid = peer->lp_nid;
int nrefs = peer->lp_refcount;
int lastalive = -1;
char *aliveness = "NA";
int maxcr = peer->lp_ni->ni_peertxcredits;
int txcr = peer->lp_txcredits;
int mintxcr = peer->lp_mintxcredits;
int rtrcr = peer->lp_rtrcredits;
int minrtrcr = peer->lp_minrtrcredits;
int txqnob = peer->lp_txqnob;
if (lnet_isrouter(peer) ||
lnet_peer_aliveness_enabled(peer))
aliveness = peer->lp_alive ? "up" : "down";
if (lnet_peer_aliveness_enabled(peer)) {
unsigned long now = cfs_time_current();
long delta;
delta = cfs_time_sub(now, peer->lp_last_alive);
lastalive = cfs_duration_sec(delta);
/* No need to mess up peers contents with
* arbitrarily long integers - it suffices to
* know that lastalive is more than 10000s old
*/
if (lastalive >= 10000)
lastalive = 9999;
}
lnet_net_unlock(cpt);
s += snprintf(s, tmpstr + tmpsiz - s,
"%-24s %4d %5s %5d %5d %5d %5d %5d %5d %d\n",
libcfs_nid2str(nid), nrefs, aliveness,
lastalive, maxcr, rtrcr, minrtrcr, txcr,
mintxcr, txqnob);
LASSERT(tmpstr + tmpsiz - s > 0);
} else { /* peer is NULL */
lnet_net_unlock(cpt);
}
if (hash == LNET_PEER_HASH_SIZE) {
cpt++;
hash = 0;
hoff = 1;
if (!peer && cpt < LNET_CPT_NUMBER)
goto again;
}
}
len = s - tmpstr; /* how many bytes was written */
if (len > *lenp) { /* linux-supplied buffer is too small */
rc = -EINVAL;
} else if (len > 0) { /* wrote something */
if (copy_to_user(buffer, tmpstr, len))
rc = -EFAULT;
else
*ppos = LNET_PROC_POS_MAKE(cpt, ver, hash, hoff);
}
LIBCFS_FREE(tmpstr, tmpsiz);
if (!rc)
*lenp = len;
return rc;
}
static int __proc_lnet_buffers(void *data, int write,
loff_t pos, void __user *buffer, int nob)
{
char *s;
char *tmpstr;
int tmpsiz;
int idx;
int len;
int rc;
int i;
LASSERT(!write);
/* (4 %d) * 4 * LNET_CPT_NUMBER */
tmpsiz = 64 * (LNET_NRBPOOLS + 1) * LNET_CPT_NUMBER;
LIBCFS_ALLOC(tmpstr, tmpsiz);
if (!tmpstr)
return -ENOMEM;
s = tmpstr; /* points to current position in tmpstr[] */
s += snprintf(s, tmpstr + tmpsiz - s,
"%5s %5s %7s %7s\n",
"pages", "count", "credits", "min");
LASSERT(tmpstr + tmpsiz - s > 0);
if (!the_lnet.ln_rtrpools)
goto out; /* I'm not a router */
for (idx = 0; idx < LNET_NRBPOOLS; idx++) {
lnet_rtrbufpool_t *rbp;
lnet_net_lock(LNET_LOCK_EX);
cfs_percpt_for_each(rbp, i, the_lnet.ln_rtrpools) {
s += snprintf(s, tmpstr + tmpsiz - s,
"%5d %5d %7d %7d\n",
rbp[idx].rbp_npages,
rbp[idx].rbp_nbuffers,
rbp[idx].rbp_credits,
rbp[idx].rbp_mincredits);
LASSERT(tmpstr + tmpsiz - s > 0);
}
lnet_net_unlock(LNET_LOCK_EX);
}
out:
len = s - tmpstr;
if (pos >= min_t(int, len, strlen(tmpstr)))
rc = 0;
else
rc = cfs_trace_copyout_string(buffer, nob,
tmpstr + pos, NULL);
LIBCFS_FREE(tmpstr, tmpsiz);
return rc;
}
static int proc_lnet_buffers(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp, loff_t *ppos)
{
return lprocfs_call_handler(table->data, write, ppos, buffer, lenp,
__proc_lnet_buffers);
}
static int proc_lnet_nis(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp, loff_t *ppos)
{
int tmpsiz = 128 * LNET_CPT_NUMBER;
int rc = 0;
char *tmpstr;
char *s;
int len;
LASSERT(!write);
if (!*lenp)
return 0;
LIBCFS_ALLOC(tmpstr, tmpsiz);
if (!tmpstr)
return -ENOMEM;
s = tmpstr; /* points to current position in tmpstr[] */
if (!*ppos) {
s += snprintf(s, tmpstr + tmpsiz - s,
"%-24s %6s %5s %4s %4s %4s %5s %5s %5s\n",
"nid", "status", "alive", "refs", "peer",
"rtr", "max", "tx", "min");
LASSERT(tmpstr + tmpsiz - s > 0);
} else {
struct list_head *n;
lnet_ni_t *ni = NULL;
int skip = *ppos - 1;
lnet_net_lock(0);
n = the_lnet.ln_nis.next;
while (n != &the_lnet.ln_nis) {
lnet_ni_t *a_ni = list_entry(n, lnet_ni_t, ni_list);
if (!skip) {
ni = a_ni;
break;
}
skip--;
n = n->next;
}
if (ni) {
struct lnet_tx_queue *tq;
char *stat;
time64_t now = ktime_get_real_seconds();
int last_alive = -1;
int i;
int j;
if (the_lnet.ln_routing)
last_alive = now - ni->ni_last_alive;
/* @lo forever alive */
if (ni->ni_lnd->lnd_type == LOLND)
last_alive = 0;
lnet_ni_lock(ni);
LASSERT(ni->ni_status);
stat = (ni->ni_status->ns_status ==
LNET_NI_STATUS_UP) ? "up" : "down";
lnet_ni_unlock(ni);
/*
* we actually output credits information for
* TX queue of each partition
*/
cfs_percpt_for_each(tq, i, ni->ni_tx_queues) {
for (j = 0; ni->ni_cpts &&
j < ni->ni_ncpts; j++) {
if (i == ni->ni_cpts[j])
break;
}
if (j == ni->ni_ncpts)
continue;
if (i)
lnet_net_lock(i);
s += snprintf(s, tmpstr + tmpsiz - s,
"%-24s %6s %5d %4d %4d %4d %5d %5d %5d\n",
libcfs_nid2str(ni->ni_nid), stat,
last_alive, *ni->ni_refs[i],
ni->ni_peertxcredits,
ni->ni_peerrtrcredits,
tq->tq_credits_max,
tq->tq_credits,
tq->tq_credits_min);
if (i)
lnet_net_unlock(i);
}
LASSERT(tmpstr + tmpsiz - s > 0);
}
lnet_net_unlock(0);
}
len = s - tmpstr; /* how many bytes was written */
if (len > *lenp) { /* linux-supplied buffer is too small */
rc = -EINVAL;
} else if (len > 0) { /* wrote something */
if (copy_to_user(buffer, tmpstr, len))
rc = -EFAULT;
else
*ppos += 1;
}
LIBCFS_FREE(tmpstr, tmpsiz);
if (!rc)
*lenp = len;
return rc;
}
struct lnet_portal_rotors {
int pr_value;
const char *pr_name;
const char *pr_desc;
};
static struct lnet_portal_rotors portal_rotors[] = {
{
.pr_value = LNET_PTL_ROTOR_OFF,
.pr_name = "OFF",
.pr_desc = "Turn off message rotor for wildcard portals"
},
{
.pr_value = LNET_PTL_ROTOR_ON,
.pr_name = "ON",
.pr_desc = "round-robin dispatch all PUT messages for wildcard portals"
},
{
.pr_value = LNET_PTL_ROTOR_RR_RT,
.pr_name = "RR_RT",
.pr_desc = "round-robin dispatch routed PUT message for wildcard portals"
},
{
.pr_value = LNET_PTL_ROTOR_HASH_RT,
.pr_name = "HASH_RT",
.pr_desc = "dispatch routed PUT message by hashing source NID for wildcard portals"
},
{
.pr_value = -1,
.pr_name = NULL,
.pr_desc = NULL
},
};
static int __proc_lnet_portal_rotor(void *data, int write,
loff_t pos, void __user *buffer, int nob)
{
const int buf_len = 128;
char *buf;
char *tmp;
int rc;
int i;
LIBCFS_ALLOC(buf, buf_len);
if (!buf)
return -ENOMEM;
if (!write) {
lnet_res_lock(0);
for (i = 0; portal_rotors[i].pr_value >= 0; i++) {
if (portal_rotors[i].pr_value == portal_rotor)
break;
}
LASSERT(portal_rotors[i].pr_value == portal_rotor);
lnet_res_unlock(0);
rc = snprintf(buf, buf_len,
"{\n\tportals: all\n"
"\trotor: %s\n\tdescription: %s\n}",
portal_rotors[i].pr_name,
portal_rotors[i].pr_desc);
if (pos >= min_t(int, rc, buf_len)) {
rc = 0;
} else {
rc = cfs_trace_copyout_string(buffer, nob,
buf + pos, "\n");
}
goto out;
}
rc = cfs_trace_copyin_string(buf, buf_len, buffer, nob);
if (rc < 0)
goto out;
tmp = cfs_trimwhite(buf);
rc = -EINVAL;
lnet_res_lock(0);
for (i = 0; portal_rotors[i].pr_name; i++) {
if (!strncasecmp(portal_rotors[i].pr_name, tmp,
strlen(portal_rotors[i].pr_name))) {
portal_rotor = portal_rotors[i].pr_value;
rc = 0;
break;
}
}
lnet_res_unlock(0);
out:
LIBCFS_FREE(buf, buf_len);
return rc;
}
static int proc_lnet_portal_rotor(struct ctl_table *table, int write,
void __user *buffer, size_t *lenp,
loff_t *ppos)
{
return lprocfs_call_handler(table->data, write, ppos, buffer, lenp,
__proc_lnet_portal_rotor);
}
static struct ctl_table lnet_table[] = {
/*
* NB No .strategy entries have been provided since sysctl(8) prefers
* to go via /proc for portability.
*/
{
.procname = "stats",
.mode = 0644,
.proc_handler = &proc_lnet_stats,
},
{
.procname = "routes",
.mode = 0444,
.proc_handler = &proc_lnet_routes,
},
{
.procname = "routers",
.mode = 0444,
.proc_handler = &proc_lnet_routers,
},
{
.procname = "peers",
.mode = 0444,
.proc_handler = &proc_lnet_peers,
},
{
.procname = "buffers",
.mode = 0444,
.proc_handler = &proc_lnet_buffers,
},
{
.procname = "nis",
.mode = 0444,
.proc_handler = &proc_lnet_nis,
},
{
.procname = "portal_rotor",
.mode = 0644,
.proc_handler = &proc_lnet_portal_rotor,
},
{
}
};
void lnet_router_debugfs_init(void)
{
lustre_insert_debugfs(lnet_table, NULL);
}
void lnet_router_debugfs_fini(void)
{
}
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[1],{507:function(module,exports,__webpack_require__){__webpack_require__(508),__webpack_require__(895),__webpack_require__(905),__webpack_require__(906),__webpack_require__(901),__webpack_require__(899),__webpack_require__(898),__webpack_require__(897),__webpack_require__(902),__webpack_require__(900),__webpack_require__(896),__webpack_require__(903),__webpack_require__(904),module.exports=__webpack_require__(893)},575:function(module,exports){}},[[507,2,3]]]);
|
#ifndef __NEOOBD2SIM_H_
#define __NEOOBD2SIM_H_
#ifdef __cplusplus
#include "icsneo/device/device.h"
#include "icsneo/device/devicetype.h"
#include "icsneo/platform/cdcacm.h"
namespace icsneo {
class NeoOBD2SIM : public Device {
public:
// Serial numbers are OS****
static constexpr DeviceType::Enum DEVICE_TYPE = DeviceType::OBD2_SIM;
static constexpr const uint16_t PRODUCT_ID = 0x1100;
static std::vector<std::shared_ptr<Device>> Find() {
std::vector<std::shared_ptr<Device>> found;
for(auto neodevice : CDCACM::FindByProduct(PRODUCT_ID))
found.emplace_back(new NeoOBD2SIM(neodevice));
return found;
}
static const std::vector<Network>& GetSupportedNetworks() {
static std::vector<Network> supportedNetworks = {
Network::NetID::HSCAN,
Network::NetID::HSCAN2
};
return supportedNetworks;
}
private:
NeoOBD2SIM(neodevice_t neodevice) : Device(neodevice) {
initialize<CDCACM>();
getWritableNeoDevice().type = DEVICE_TYPE;
productId = PRODUCT_ID;
}
virtual void setupSupportedRXNetworks(std::vector<Network>& rxNetworks) override {
for(auto& netid : GetSupportedNetworks())
rxNetworks.emplace_back(netid);
}
// The supported TX networks are the same as the supported RX networks for this device
virtual void setupSupportedTXNetworks(std::vector<Network>& txNetworks) override { setupSupportedRXNetworks(txNetworks); }
bool requiresVehiclePower() const override { return false; }
};
}
#endif // __cplusplus
#endif
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Nov 11 2015 18:36:30).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#pragma once
#import <Foundation/NSObject.h>
#import <net/if.h>
extern char g_if_name[IF_NAMESIZE];
struct Apple80211;
typedef struct Apple80211 *Apple80211Ref;
int Apple80211Open(Apple80211Ref *handle); //Open Connection
int Apple80211Close(Apple80211Ref handle);
int Apple80211BindToInterface(Apple80211Ref handle, CFStringRef interface);
int Apple80211Scan(Apple80211Ref handle, CFArrayRef *scanResult, CFDictionaryRef parametrs);
int Apple80211SetPower(Apple80211Ref handle, uint32_t power);
int Apple80211GetPower(Apple80211Ref wref, uint32_t *power);
//int Apple80211Set(Apple80211Ref handle, CFStringRef str, uint32_t val);
int Apple80211Disassociate(Apple80211Ref wref);
int Apple80211Associate(Apple80211Ref handle, CFDictionaryRef SSID, CFStringRef pass);
//int Apple80211Associate2();
//int Apple80211Get(Apple80211Ref ref, CFStringRef chr, uint32_t val, CFTypeRef var, uint32_t var2);
int Apple80211Get();
int Apple80211Set();
char *Apple80211ErrToStr(uint32_t errCode);
int Apple80211GetIfListCopy(Apple80211Ref handle, CFArrayRef *If_name_array);
int Apple80211GetInfoCopy(Apple80211Ref wref, CFDictionaryRef *dict);
int Apple80211CopyValue();
@interface AirPort : NSObject
{
struct _AirPort *_ref;
}
+ (id)interfaceNames:(id *)arg1;
- (id)initWithInterfaceName:(id)arg1;
- (void)dealloc;
- (id)interfaceName;
- (void)setPower:(BOOL)arg1 error:(id *)arg2;
- (void)getPower:(id *)arg1 error:(id *)arg2;
- (BOOL)powerWithError:(id *)arg1;
- (void)setChannel:(id)arg1 error:(id *)arg2;
- (id)channelWithError:(id *)arg1;
- (id)staticConnectionParameters:(id *)arg1;
- (id)dynamicConnectionParameters:(id *)arg1;
- (id)regulatoryInfo:(id *)arg1;
- (id)capabilities:(id *)arg1;
- (id)preference:(id)arg1 error:(id *)arg2;
- (void)setPreference:(id)arg1 toValue:(id)arg2 error:(id *)arg3;
- (void)commitPreferencesWithError:(id *)arg1;
- (id)scanWithArgs:(id)arg1 error:(id *)arg2;
- (BOOL)associateToNetwork:(id)arg1 withArgs:(id)arg2 error:(id *)arg3;
- (BOOL)disassociateWithError:(id *)arg1;
- (BOOL)createIBSS:(id)arg1 error:(id *)arg2;
- (void)startMonitoringEvents:(id)arg1 onRunloop:(id)arg2 error:(id *)arg3;
- (void)stopMonitoringEventsWithError:(id *)arg1;
@end
|
/*
* Copyright (c) 2017-2018 ARM Limited.
*
* SPDX-License-Identifier: MIT
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef __ARM_COMPUTE_TEST_GC_HELPER_H__
#define __ARM_COMPUTE_TEST_GC_HELPER_H__
#include "tests/Globals.h"
#include "arm_compute/core/ITensor.h"
#include "arm_compute/runtime/GLES_COMPUTE/GCTensor.h"
#include <iostream>
namespace arm_compute
{
namespace test
{
/** Helper to create an empty tensor.
*
* @param[in] shape Desired shape.
* @param[in] data_type Desired data type.
* @param[in] num_channels (Optional) It indicates the number of channels for each tensor element
* @param[in] fixed_point_position (Optional) Fixed point position that expresses the number of bits for the fractional part of the number when the tensor's data type is QS8 or QS16.
*
* @return Empty @ref GCTensor with the specified shape and data type.
*/
inline GCTensor create_tensor(const TensorShape &shape, DataType data_type, int num_channels = 1, int fixed_point_position = 0)
{
GCTensor tensor;
tensor.allocator()->init(TensorInfo(shape, num_channels, data_type, fixed_point_position));
return tensor;
}
/** Helper to create an empty tensor.
*
* @param[in] name File name from which to get the dimensions.
* @param[in] data_type Desired data type.
*
* @return Empty @ref GCTensor with the specified shape and data type.
*/
inline GCTensor create_tensor(const std::string &name, DataType data_type)
{
constexpr unsigned int num_channels = 1;
const RawTensor &raw = library->get(name);
GCTensor tensor;
tensor.allocator()->init(TensorInfo(raw.shape(), num_channels, data_type));
return tensor;
}
/** Helper to print tensor.
*
* @param[in] tensor Tensor to print.
* @param[in] name Tensor name.
* @param[in] info Format information.
*
* @return Empty @ref GCTensor with the specified shape and data type.
*/
inline void print_tensor(ITensor &tensor, const std::string &name, IOFormatInfo info = IOFormatInfo(IOFormatInfo::PrintRegion::Full))
{
std::ostringstream s;
IGCTensor &t = dynamic_cast<IGCTensor &>(tensor);
t.map();
t.print(s, info);
std::cout << name << ":" << std::endl;
std::cout << s.str().c_str();
t.unmap();
}
} // namespace test
} // namespace arm_compute
#endif /* __ARM_COMPUTE_TEST_GC_HELPER_H__ */
|
angular
.module('addressBook')
.factory('Person', function($http, API_URL) {
return {
getOne(id, cb) { //ES6 enhanced obj literal. Old version - getOne: function(id, cb) {
$http
.get(`${API_URL}/people/${id}.json`) //returning an obj with a prop of getOne
.success(cb);
},
getAll(cb) {
$http
.get(`${API_URL}/people/.json`)
.success(cb);
},
create(data, cb) {
$http
.post(`${API_URL}/people.json`, data)
.success(cb);
},
update(id, data, cb) {
$http
.put(`${API_URL}/people/${id}.json`, data)
.success(cb);
},
destroy(id, cb) {
$http
.delete(`${API_URL}/people/${id}.json`)
.success(cb);
}
}
});
|
# Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import textwrap
import unittest
from skydoc import load_extractor
class LoadExtractorTest(unittest.TestCase):
def check_symbols(self, src, expected):
with tempfile.NamedTemporaryFile(mode='w+') as tf:
tf.write(src)
tf.flush()
extractor = load_extractor.LoadExtractor()
load_symbols = extractor.extract(tf.name)
self.assertEqual(expected, load_symbols)
def test_load(self):
src = textwrap.dedent("""\
load("//foo/bar:bar.bzl", "foo_library")
load("//foo/bar:baz.bzl", "foo_test", orig_foo_binary = "foo_binary")
""")
expected = [
load_extractor.LoadSymbol('//foo/bar:bar.bzl', 'foo_library', None),
load_extractor.LoadSymbol('//foo/bar:baz.bzl', 'foo_test', None),
load_extractor.LoadSymbol('//foo/bar:baz.bzl', 'foo_binary',
'orig_foo_binary'),
]
self.check_symbols(src, expected)
def raises_error(self, src):
with tempfile.NamedTemporaryFile(mode='w+') as tf:
tf.write(src)
tf.flush()
extractor = load_extractor.LoadExtractor()
self.assertRaises(load_extractor.LoadExtractorError,
extractor.extract, tf.name)
def test_invalid_non_string_literal_in_label(self):
src = textwrap.dedent("""\
load(load_label, "foo_library")
""")
self.raises_error(src)
def test_invalid_non_string_literal_in_keywords(self):
src = textwrap.dedent("""\
load("//foo/bar:bar.bzl", loaded_symbol)
""")
self.raises_error(src)
def test_invalid_symbol_conflict(self):
src = textwrap.dedent("""\
load("//foo:bar.bzl", "foo_binary", "foo_library")
load("//foo:baz.bzl", "foo_library")
""")
self.raises_error(src)
def test_invalid_symbol_alias_conflict(self):
src = textwrap.dedent("""\
load("//foo:bar.bzl", foo_library="some_foo_library")
load("//foo:baz.bzl", "foo_library")
""")
self.raises_error(src)
def test_invalid_duplicate_symbol_loaded(self):
src = textwrap.dedent("""\
load("//foo:bar.bzl", "foo_library", "foo_library")
""")
self.raises_error(src)
if __name__ == '__main__':
unittest.main()
|
var Collection, TaskCollection, _,
extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
hasProp = {}.hasOwnProperty;
_ = require('lodash');
Collection = require('../common/Collection');
TaskCollection = (function(superClass) {
extend(TaskCollection, superClass);
function TaskCollection() {}
return TaskCollection;
})(Collection);
module.exports = TaskCollection;
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var React = _interopRequireWildcard(require("react"));
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _default = (0, _createSvgIcon["default"])( /*#__PURE__*/React.createElement(React.Fragment, null, /*#__PURE__*/React.createElement("path", {
d: "M4 17.17L5.17 16H20V4H4v13.17zM15 9h2v2h-2V9zm-4 0h2v2h-2V9zM7 9h2v2H7V9z",
opacity: ".3"
}), /*#__PURE__*/React.createElement("path", {
d: "M20 2H4c-1.1 0-2 .9-2 2v18l4-4h14c1.1 0 2-.9 2-2V4c0-1.1-.9-2-2-2zm0 14H5.17L4 17.17V4h16v12zM7 9h2v2H7zm8 0h2v2h-2zm-4 0h2v2h-2z"
})), 'SmsTwoTone');
exports["default"] = _default;
|
# -*- coding: utf-8 -*-
# This file is part of the Ingram Micro Cloud Blue Connect connect-cli.
# Copyright (c) 2021 Ingram Micro. All Rights Reserved.
from connect.cli.plugins.play.context import Context
from connect.cli.plugins.play.script import Script
class Save(Script):
def do(self, filename=Context.context_file_name, context=None):
super().do(context=context)
self.context.save(filename=filename)
|
def validate_BST_helper(node, min, max):
if node is None:
return True
if node.val < min or node.val > max:
return False
return validate_BST_helper(node.left, min, node.val) and validate_BST_helper(node.right, node.val, max)
def validate_BST(node):
return validate_BST_helper(node, -1*float('inf'), float('inf'))
|
# -*- coding: utf-8 -*-
__version__ = "0.2.5"
|
/* eslint-disable */
var icon = require('vue-svgicon');
icon.register({
maximize: {
width: 32,
height: 32,
viewBox: '0 0 24 24',
data: '<path pid="0" d="M8 3H5a2 2 0 00-2 2v3m18 0V5a2 2 0 00-2-2h-3m0 18h3a2 2 0 002-2v-3M3 16v3a2 2 0 002 2h3"/>',
},
});
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import codecs
import json
import logging
import os
import sys
import time
logger = logging.getLogger(__name__)
class Nefnir(object):
"""
A rule-based lemmatizer
"""
def __init__(self):
"""
Initialize an instance of the Nefnir lemmatizer.
"""
nefnir_dir = os.path.dirname(sys.argv[0])
# Load rules
rules_path = os.path.join(nefnir_dir, 'rules.json')
with open(rules_path, encoding='utf-8') as f:
self.rules = json.load(f)
# Load tagset
tag_path = os.path.join(nefnir_dir, 'tags.json')
with open(tag_path, encoding='utf-8') as f:
self.tagmap = json.load(f)
self.proper = {t for t in self.tagmap if t[0] == 'n' and t[-1] in {'m', 'ö', 's'}}
self.unanalyzed = {t for t in self.tagmap if t[:2] == 'nx'} | {'x', 'e', 'as'}
def lemmatize(self, form, tag):
"""
Lemmatize a word form given its part-of-speech tag.
:param form: A word form.
:param tag: The word form's part-of-speech tag.
:return: The word form's lemma.
"""
try:
ntag = self.tagmap[tag]
except KeyError:
if any((c for c in tag if c.isalpha())):
logger.warning("Unknown tag: {}".format((form, tag)))
return form
# Websites and interjections
if tag in {'v', 'au'}:
return form.lower()
# Unanalyzed words
if tag in self.unanalyzed:
return form
# Words that end with a hyphen
if form[-1] == '-':
if tag in self.proper:
return self.recase(form, tag, form)
return form.lower()
# Words that end with a punctuation mark
if not form[-1].isalpha():
return form
form_lower = form.lower()
if ntag not in self.rules:
logger.debug("No rules for this tag: {} {} {}".format(form, tag, ntag))
return self.recase(form, tag, form)
if form_lower in self.rules[ntag]['form']:
suffix_from, suffix_to = self.rules[ntag]['form'][form_lower]
else:
suffixes = get_suffixes(form_lower)
try:
target = next(s for s in suffixes if s in self.rules[ntag]['suffix'])
suffix_from, suffix_to = self.rules[ntag]['suffix'][target]
except StopIteration:
logger.debug("No rules for this word form: {} {} {}".format(form, tag, ntag))
return self.recase(form, tag, form)
form_prefix = form_lower[:-len(suffix_from)] if suffix_from else form_lower
lemma = form_prefix + suffix_to
if not lemma:
logger.warning("Rule produced an empty lemma: ({}, {}, {}) ('{}' -> '{}')".format(form, tag, ntag,
suffix_from, suffix_to))
lemma = form_lower
return self.recase(form, tag, lemma)
def recase(self, form, tag, lemma):
"""
Determine how to properly case a lemma given the word form and part of speech tag it was derived from.
Nefnir transforms words into lowercase prior to lemmatization. Some words, such as proper nouns, abbreviations
and foreign words therefore need to be re-capitalized or changed back into uppercase.
:param form: A word form, cased as it was written.
:param tag: The word form's part-of-speech tag.
:param lemma: The word form's lemma, in lowercase.
:return: A properly cased lemma.
"""
# Hyphenated words: try to maintain original casing in every part
# 1) (DNA-þræðinum, nþeþg) -> dna-þráður -> DNA-þráður
# 2) (Vestur-Íslendingum, nkfþ-s) -> vestur-íslendingur -> Vestur-Íslendingur
# 3) (Stoke-on-Trent, e) -> stoke-on-trent -> Stoke-on-Trent
if '-' in form[1:-1]:
fparts = form.split('-')
lparts = lemma.split('-')
result = []
for fpart, lpart in zip(fparts, lparts):
if fpart.lower() == lpart.lower():
# part was not transformed by lemmatization
result.append(fpart)
elif fpart.isupper():
# part was transformed and was uppercase
result.append(lpart.upper())
elif fpart.istitle():
# part was transformed and was capitalized
result.append(lpart.title())
else:
# part was transformed and not uppercase or capitalized
result.append(lpart.lower())
if tag in self.proper and not result[0].isupper():
result[0] = result[0].title()
return "-".join(result)
# Proper nouns: capitalize the lemma
# 1) (Halldórs, nken-s) -> halldór -> Halldór
# 2) (HALLDÓRS, nken-s) -> halldór -> Halldór
if tag in self.proper:
# if len(form) > 1 and form.isupper():
# return lemma.upper()
return lemma.title()
# If none of the above applies, return lemma in lowercase
return lemma
def get_suffixes(s):
"""
Return an iterator yielding a string's suffixes, from the largest to the smallest.
:param s: A text string.
:return: An iterator for the string's suffixes.
"""
return (s[pos:] for pos in range(len(s) + 1))
def main():
# Command line interface
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input-file", help="read input from specified file", required=True)
parser.add_argument("-o", "--output-file", help="write output to specified file", required=True)
parser.add_argument("-f", "--from-encoding", help="character encoding of input file (default: utf-8)",
default="utf-8")
parser.add_argument("-t", "--to-encoding", help="character encoding of output file (default: utf-8)",
default="utf-8")
parser.add_argument("-s", "--separator", help="the string separating word forms, tags and lemmas (default: \\t)",
default='\t')
args = parser.parse_args()
args.separator = codecs.decode(args.separator, 'unicode_escape')
# Lemmatize input
time_start = time.time()
nefnir = Nefnir()
logger.info("Reading input from {} ({})".format(args.input_file, args.from_encoding))
logger.info("Separator set to {}".format(repr(args.separator)))
with open(args.input_file, encoding=args.from_encoding) as f:
lines = f.read().splitlines()
num_lines = len(lines)
lines = {l: None for l in lines}
for line in lines:
try:
form, tag = line.split(args.separator)
if form:
lemma = nefnir.lemmatize(form, tag)
lines[line] = args.separator.join((form, tag, lemma))
except ValueError:
if line.strip():
logger.warning('Ignoring line: {}'.format(line))
# Stats
time_elapsed = time.time() - time_start
lines_per_second = num_lines / time_elapsed
stats = "{:,} lines processed in {:.2f} s ({:,.1f} lines/s)".format(num_lines, time_elapsed, lines_per_second)
logger.info(stats)
# Write output
logger.info("Writing output to {} ({})".format(args.output_file, args.to_encoding))
with open(args.input_file, encoding=args.from_encoding) as f_in:
with open(args.output_file, 'w', encoding=args.to_encoding) as f_out:
for line in f_in:
line = line.rstrip('\n')
output = lines[line] or ''
f_out.write(output + '\n')
if __name__ == '__main__':
FORMAT = '%(asctime)s - %(levelname)s %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
main()
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = getTSType;
var _astTypes = require("ast-types");
var _getPropertyName = _interopRequireDefault(require("./getPropertyName"));
var _printValue = _interopRequireDefault(require("./printValue"));
var _getTypeAnnotation = _interopRequireDefault(require("../utils/getTypeAnnotation"));
var _resolveToValue = _interopRequireDefault(require("../utils/resolveToValue"));
var _resolveObjectKeysToArray = require("../utils/resolveObjectKeysToArray");
var _getTypeParameters = _interopRequireDefault(require("../utils/getTypeParameters"));
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
*
*/
const tsTypes = {
TSAnyKeyword: 'any',
TSBooleanKeyword: 'boolean',
TSUnknownKeyword: 'unknown',
TSNeverKeyword: 'never',
TSNullKeyword: 'null',
TSUndefinedKeyword: 'undefined',
TSNumberKeyword: 'number',
TSStringKeyword: 'string',
TSSymbolKeyword: 'symbol',
TSThisType: 'this',
TSObjectKeyword: 'object',
TSVoidKeyword: 'void'
};
const namedTypes = {
TSArrayType: handleTSArrayType,
TSTypeReference: handleTSTypeReference,
TSTypeLiteral: handleTSTypeLiteral,
TSInterfaceDeclaration: handleTSInterfaceDeclaration,
TSUnionType: handleTSUnionType,
TSFunctionType: handleTSFunctionType,
TSIntersectionType: handleTSIntersectionType,
TSMappedType: handleTSMappedType,
TSTupleType: handleTSTupleType,
TSTypeQuery: handleTSTypeQuery,
TSTypeOperator: handleTSTypeOperator,
TSIndexedAccessType: handleTSIndexedAccessType
};
function handleTSArrayType(path, typeParams) {
return {
name: 'Array',
elements: [getTSTypeWithResolvedTypes(path.get('elementType'), typeParams)],
raw: (0, _printValue.default)(path)
};
}
function handleTSTypeReference(path, typeParams) {
let type;
if (_astTypes.namedTypes.TSQualifiedName.check(path.node.typeName)) {
const typeName = path.get('typeName');
if (typeName.node.left.name === 'React') {
type = {
name: `${typeName.node.left.name}${typeName.node.right.name}`,
raw: (0, _printValue.default)(typeName)
};
} else {
type = {
name: (0, _printValue.default)(typeName).replace(/<.*>$/, '')
};
}
} else {
type = {
name: path.node.typeName.name
};
}
const resolvedPath = typeParams && typeParams[type.name] || (0, _resolveToValue.default)(path.get('typeName'));
if (path.node.typeParameters && resolvedPath.node.typeParameters) {
typeParams = (0, _getTypeParameters.default)(resolvedPath.get('typeParameters'), path.get('typeParameters'), typeParams);
}
if (typeParams && typeParams[type.name]) {
type = getTSTypeWithResolvedTypes(resolvedPath);
}
if (resolvedPath && resolvedPath.node.typeAnnotation) {
type = getTSTypeWithResolvedTypes(resolvedPath.get('typeAnnotation'), typeParams);
} else if (path.node.typeParameters) {
const params = path.get('typeParameters').get('params');
type = { ...type,
elements: params.map(param => getTSTypeWithResolvedTypes(param, typeParams)),
raw: (0, _printValue.default)(path)
};
}
return type;
}
function getTSTypeWithRequirements(path, typeParams) {
const type = getTSTypeWithResolvedTypes(path, typeParams);
type.required = !path.parentPath.node.optional;
return type;
}
function handleTSTypeLiteral(path, typeParams) {
const type = {
name: 'signature',
type: 'object',
raw: (0, _printValue.default)(path),
signature: {
properties: []
}
};
path.get('members').each(param => {
if (_astTypes.namedTypes.TSPropertySignature.check(param.node) || _astTypes.namedTypes.TSMethodSignature.check(param.node)) {
const propName = (0, _getPropertyName.default)(param);
if (!propName) {
return;
}
type.signature.properties.push({
key: propName,
value: getTSTypeWithRequirements(param.get('typeAnnotation'), typeParams)
});
} else if (_astTypes.namedTypes.TSCallSignatureDeclaration.check(param.node)) {
type.signature.constructor = handleTSFunctionType(param, typeParams);
} else if (_astTypes.namedTypes.TSIndexSignature.check(param.node)) {
type.signature.properties.push({
key: getTSTypeWithResolvedTypes(param.get('parameters').get(0).get('typeAnnotation'), typeParams),
value: getTSTypeWithRequirements(param.get('typeAnnotation'), typeParams)
});
}
});
return type;
}
function handleTSInterfaceDeclaration(path) {
// Interfaces are handled like references which would be documented separately,
// rather than inlined like type aliases.
return {
name: path.node.id.name
};
}
function handleTSUnionType(path, typeParams) {
return {
name: 'union',
raw: (0, _printValue.default)(path),
elements: path.get('types').map(subType => getTSTypeWithResolvedTypes(subType, typeParams))
};
}
function handleTSIntersectionType(path, typeParams) {
return {
name: 'intersection',
raw: (0, _printValue.default)(path),
elements: path.get('types').map(subType => getTSTypeWithResolvedTypes(subType, typeParams))
};
}
function handleTSMappedType(path, typeParams) {
const key = getTSTypeWithResolvedTypes(path.get('typeParameter').get('constraint'), typeParams);
key.required = !path.node.optional;
return {
name: 'signature',
type: 'object',
raw: (0, _printValue.default)(path),
signature: {
properties: [{
key,
value: getTSTypeWithResolvedTypes(path.get('typeAnnotation'), typeParams)
}]
}
};
}
function handleTSFunctionType(path, typeParams) {
const type = {
name: 'signature',
type: 'function',
raw: (0, _printValue.default)(path),
signature: {
arguments: [],
return: getTSTypeWithResolvedTypes(path.get('typeAnnotation'), typeParams)
}
};
path.get('parameters').each(param => {
const typeAnnotation = (0, _getTypeAnnotation.default)(param);
const arg = {
name: param.node.name || '',
type: typeAnnotation ? getTSTypeWithResolvedTypes(typeAnnotation, typeParams) : undefined
};
if (param.node.name === 'this') {
type.signature.this = arg.type;
return;
}
if (param.node.type === 'RestElement') {
arg.name = param.node.argument.name;
arg.rest = true;
}
type.signature.arguments.push(arg);
});
return type;
}
function handleTSTupleType(path, typeParams) {
const type = {
name: 'tuple',
raw: (0, _printValue.default)(path),
elements: []
};
path.get('elementTypes').each(param => {
type.elements.push(getTSTypeWithResolvedTypes(param, typeParams));
});
return type;
}
function handleTSTypeQuery(path, typeParams) {
const resolvedPath = (0, _resolveToValue.default)(path.get('exprName'));
if (resolvedPath && resolvedPath.node.typeAnnotation) {
return getTSTypeWithResolvedTypes(resolvedPath.get('typeAnnotation'), typeParams);
}
return {
name: path.node.exprName.name
};
}
function handleTSTypeOperator(path) {
if (path.node.operator !== 'keyof') {
return null;
}
let value = path.get('typeAnnotation');
if (_astTypes.namedTypes.TSTypeQuery.check(value.node)) {
value = value.get('exprName');
} else if (value.node.id) {
value = value.get('id');
}
const resolvedPath = (0, _resolveToValue.default)(value);
if (resolvedPath && (_astTypes.namedTypes.ObjectExpression.check(resolvedPath.node) || _astTypes.namedTypes.TSTypeLiteral.check(resolvedPath.node))) {
const keys = (0, _resolveObjectKeysToArray.resolveObjectToNameArray)(resolvedPath, true);
if (keys) {
return {
name: 'union',
raw: (0, _printValue.default)(path),
elements: keys.map(key => ({
name: 'literal',
value: key
}))
};
}
}
}
function handleTSIndexedAccessType(path, typeParams) {
// eslint-disable-next-line no-undef
const objectType = getTSTypeWithResolvedTypes(path.get('objectType'), typeParams); // eslint-disable-next-line no-undef
const indexType = getTSTypeWithResolvedTypes(path.get('indexType'), typeParams); // We only get the signature if the objectType is a type (vs interface)
if (!objectType.signature) return {
name: `${objectType.name}[${indexType.value && indexType.value.toString()}]`,
raw: (0, _printValue.default)(path)
};
const resolvedType = objectType.signature.properties.find(p => {
// indexType.value = "'foo'"
return indexType.value && p.key === indexType.value.replace(/['"]+/g, '');
});
if (!resolvedType) {
return {
name: 'unknown'
};
}
return {
name: resolvedType.value.name,
raw: (0, _printValue.default)(path)
};
}
let visitedTypes = {};
function getTSTypeWithResolvedTypes(path, typeParams) {
if (_astTypes.namedTypes.TSTypeAnnotation.check(path.node)) {
path = path.get('typeAnnotation');
}
const node = path.node;
let type;
const isTypeAlias = _astTypes.namedTypes.TSTypeAliasDeclaration.check(path.parentPath.node); // When we see a typealias mark it as visited so that the next
// call of this function does not run into an endless loop
if (isTypeAlias) {
if (visitedTypes[path.parentPath.node.id.name] === true) {
// if we are currently visiting this node then just return the name
// as we are starting to endless loop
return {
name: path.parentPath.node.id.name
};
} else if (typeof visitedTypes[path.parentPath.node.id.name] === 'object') {
// if we already resolved the type simple return it
return visitedTypes[path.parentPath.node.id.name];
} // mark the type as visited
visitedTypes[path.parentPath.node.id.name] = true;
}
if (node.type in tsTypes) {
type = {
name: tsTypes[node.type]
};
} else if (_astTypes.namedTypes.TSLiteralType.check(node)) {
type = {
name: 'literal',
value: node.literal.raw || `${node.literal.value}`
};
} else if (node.type in namedTypes) {
type = namedTypes[node.type](path, typeParams);
}
if (!type) {
type = {
name: 'unknown'
};
}
if (isTypeAlias) {
// mark the type as unvisited so that further calls can resolve the type again
visitedTypes[path.parentPath.node.id.name] = type;
}
return type;
}
/**
* Tries to identify the typescript type by inspecting the path for known
* typescript type names. This method doesn't check whether the found type is actually
* existing. It simply assumes that a match is always valid.
*
* If there is no match, "unknown" is returned.
*/
function getTSType(path, typeParamMap) {
// Empty visited types before an after run
// Before: in case the detection threw and we rerun again
// After: cleanup memory after we are done here
visitedTypes = {};
const type = getTSTypeWithResolvedTypes(path, typeParamMap);
visitedTypes = {};
return type;
}
|
import React from 'react'
import {Alert} from 'react-bootstrap'
const Message = ({variant , children}) => {
return (
<Alert variant={variant}>
{children}
</Alert>
)
}
Message.defaultProps = {
variant:'info'
}
export default Message
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.network import network_service
from openstack import resource
class Port(resource.Resource):
resource_key = 'port'
resources_key = 'ports'
base_path = '/ports'
service = network_service.NetworkService()
# capabilities
allow_create = True
allow_retrieve = True
allow_update = True
allow_delete = True
allow_list = True
# Properties
#: The administrative state of the port, which is up ``True`` or
#: down ``False``. *Type: bool*
admin_state_up = resource.prop('admin_state_up', type=bool)
#: Allowed address pairs. *Type: dict*
allowed_address_pairs = resource.prop('allowed_address_pairs', type=dict)
#: The ID of the host where the port is allocated. In some cases,
#: different implementations can run on different hosts.
binding_host_id = resource.prop('binding:host_id')
#: A dictionary the enables the application running on the specified
#: host to pass and receive vif port-specific information to the plug-in.
binding_profile = resource.prop('binding:profile')
#: Read-only. A dictionary that enables the application to pass
#: information about functions that the Networking API provides.
#: To enable or disable port filtering features such as security group
#: and anti-MAC/IP spoofing, specify ``port_filter: True`` or
#: ``port_filter: False``.
binding_vif_details = resource.prop('binding:vif_details', type=dict)
#: Read-only. The vif type for the specified port.
binding_vif_type = resource.prop('binding:vif_type')
#: The vnic type that is bound to the neutron port.
#:
#: In POST and PUT operations, specify a value of ``normal`` (virtual nic),
#: ``direct`` (pci passthrough), or ``macvtap``
#: (virtual interface with a tap-like software interface).
#: These values support SR-IOV PCI passthrough networking.
#: The ML2 plug-in supports the vnic_type.
#:
#: In GET operations, the binding:vnic_type extended attribute is
#: visible to only port owners and administrative users.
binding_vnic_type = resource.prop('binding:vnic_type')
#: The ID of the device that uses this port. For example, a virtual server.
device_id = resource.prop('device_id')
#: The ID of the entity that uses this port. For example, a dhcp agent.
device_owner = resource.prop('device_owner')
#: Extra DHCP options.
extra_dhcp_opts = resource.prop('extra_dhcp_opts', type=dict)
#: IP addresses for the port. Includes the IP address and subnet ID.
fixed_ips = resource.prop('fixed_ips')
#: The MAC address of the port.
mac_address = resource.prop('mac_address')
#: The port name.
name = resource.prop('name')
#: The ID of the attached network.
network_id = resource.prop('network_id')
#: The ID of the project who owns the network. Only administrative
#: users can specify a project ID other than their own.
project_id = resource.prop('tenant_id')
#: The IDs of any attached security groups.
security_groups = resource.prop('security_groups')
#: The port status. Value is ``ACTIVE`` or ``DOWN``.
status = resource.prop('status')
|
/* $OpenBSD: signal.h,v 1.7 2011/03/23 16:54:37 pirofti Exp $ */
/* $NetBSD: signal.h,v 1.4 1995/01/10 19:01:52 jtc Exp $ */
/*
* Copyright (c) 1982, 1986, 1989, 1991 Regents of the University of California.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the University nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* @(#)signal.h 7.16 (Berkeley) 3/17/91
*/
/* All bugs are subject to removal without further notice */
#ifndef _MACHINE_SIGNAL_H_
#define _MACHINE_SIGNAL_H_
#include <sys/cdefs.h>
typedef int sig_atomic_t;
#if __BSD_VISIBLE || __XPG_VISIBLE >= 420
/*
* Information pushed on stack when a signal is delivered.
* This is used by the kernel to restore state following
* execution of the signal handler. It is also made available
* to the handler to allow it to restore state properly if
* a non-standard exit is performed.
*/
struct sigcontext {
int sc_onstack; /* sigstack state to restore */
int sc_mask; /* signal mask to restore */
int sc_sp; /* sp to restore */
int sc_fp; /* fp to restore */
int sc_ap; /* ap to restore */
int sc_pc; /* pc to restore */
int sc_ps; /* psl to restore */
int sc_r[12]; /* registers to restore */
};
#endif /* __BSD_VISIBLE || __XPG_VISIBLE >= 420 */
#endif /* !_MACHINE_SIGNAL_H_ */
|
import torch
import numpy as np
import imageio
import cv2
from time import time
def timereps(reps, func):
start = time()
for i in range(0, reps):
func()
end = time()
return (end - start) / reps
path_to_video = '/media/toby/Blade/Users/tobyc/Documents/Voice2Face/data/GRID/s2/swwc5p.mpg'
n_iter = 100
def get_video_length_imageio():
video = imageio.get_reader(path_to_video, 'ffmpeg')
return video._meta['nframes']
def get_video_length_cv2():
cap = cv2.VideoCapture(str(path_to_video))
return int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
duration = timereps(n_iter, get_video_length_imageio)
print(f'image io get length : {duration}')
duration = timereps(n_iter, get_video_length_cv2)
print(f'cv2 get length : {duration}')
|
/*!
* Bootstrap-select v1.14.0-beta2 (https://developer.snapappointments.com/bootstrap-select)
*
* Copyright 2012-2021 SnapAppointments, LLC
* Licensed under MIT (https://github.com/snapappointments/bootstrap-select/blob/master/LICENSE)
*/
(function (root, factory) {
if (root === undefined && window !== undefined) root = window;
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module unless amdModuleId is set
define(["jquery"], function (a0) {
return (factory(a0));
});
} else if (typeof module === 'object' && module.exports) {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory(require("jquery"));
} else {
factory(root["jQuery"]);
}
}(this, function (jQuery) {
(function ($) {
$.fn.selectpicker.defaults = {
noneSelectedText: 'Inget valt',
noneResultsText: 'Inget sökresultat matchar {0}',
countSelectedText: function (numSelected, numTotal) {
return (numSelected === 1) ? '{0} alternativ valt' : '{0} alternativ valda';
},
maxOptionsText: function (numAll, numGroup) {
return [
'Gräns uppnåd (max {n} alternativ)',
'Gräns uppnåd (max {n} gruppalternativ)'
];
},
selectAllText: 'Markera alla',
deselectAllText: 'Avmarkera alla',
multipleSeparator: ', '
};
})(jQuery);
}));
//# sourceMappingURL=defaults-sv_SE.js.map
|
import { fromJS } from 'immutable';
import * as constants from '../constants';
import createReducer from '../utils/createReducer';
const initialState = {
error: null,
loading: false,
requesting: false,
user: null
};
export const userDelete = createReducer(fromJS(initialState), { // eslint-disable-line import/prefer-default-export
[constants.REQUEST_DELETE_USER]: (state, action) =>
state.merge({
...initialState,
user: action.user,
requesting: true
}),
[constants.CANCEL_DELETE_USER]: (state) =>
state.merge({
...initialState
}),
[constants.DELETE_USER_PENDING]: (state) =>
state.merge({
loading: true
}),
[constants.DELETE_USER_REJECTED]: (state, action) =>
state.merge({
loading: false,
error: action.errorData
}),
[constants.DELETE_USER_FULFILLED]: (state) =>
state.merge({
...initialState
})
});
|
/* global cy */
import { editorVisible } from '../support'
// usually we can visit the page before each test
// but these tests use the url, which means wasted page load
// so instead visit the desired url in each test
describe('localStorage', () => {
const themeDropdown = () => cy.get('.toolbar .dropdown-container').first()
const pickTheme = (name = 'Blackboard') =>
themeDropdown()
.click()
.contains(name)
.click()
it.skip('is empty initially', () => {
cy.visit('/')
editorVisible()
cy.window()
.its('localStorage')
.should('have.length', 0)
})
it('saves on theme change', () => {
cy.visit('/')
editorVisible()
pickTheme('Blackboard')
themeDropdown()
.click()
.contains('Blackboard')
cy.wait(1000) // URL updates are debounced
cy.window()
.its('localStorage.CARBON_STATE')
.then(JSON.parse)
.its('theme')
.should('equal', 'blackboard')
// visiting page again restores theme from localStorage
cy.visit('/')
themeDropdown()
.click()
.contains('Blackboard')
cy.url().should('contain', 't=blackboard')
})
})
|
/*************************************************************
*
* MathJax/localization/uk/TeX.js
*
* Copyright (c) 2009-2016 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
MathJax.Localization.addTranslation("uk","TeX",{
version: "2.7.0-beta",
isLoaded: true,
strings: {
ExtraOpenMissingClose: "\u0417\u0430\u0439\u0432\u0430 \u0432\u0456\u0434\u043A\u0440\u0438\u0442\u0430 \u0444\u0456\u0433\u0443\u0440\u043D\u0430 \u0434\u0443\u0436\u043A\u0430 \u0430\u0431\u043E \u0432\u0456\u0434\u0441\u0443\u0442\u043D\u044F \u0437\u0430\u043A\u0440\u0438\u0442\u0430 \u0444\u0456\u0433\u0443\u0440\u043D\u0430 \u0434\u0443\u0436\u043A\u0430",
ExtraCloseMissingOpen: "\u0417\u0430\u0439\u0432\u0430 \u0437\u0430\u043A\u0440\u0438\u0442\u0430 \u0444\u0456\u0433\u0443\u0440\u043D\u0430 \u0434\u0443\u0436\u043A\u0430 \u0430\u0431\u043E \u0432\u0456\u0434\u0441\u0443\u0442\u043D\u044F \u0432\u0456\u0434\u043A\u0440\u0438\u0442\u0430 \u0444\u0456\u0433\u0443\u0440\u043D\u0430 \u0434\u0443\u0436\u043A\u0430",
MissingLeftExtraRight: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u0456\u0439 \\left \u0430\u0431\u043E \u0437\u0430\u0439\u0432\u0438\u0439 \\right",
MissingScript: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u0456 \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442\u0438 \u0432\u0435\u0440\u0445\u043D\u044C\u043E\u0433\u043E \u0456 \u043D\u0438\u0436\u043D\u044C\u043E\u0433\u043E \u0456\u043D\u0434\u0435\u043A\u0441\u0456\u0432",
ExtraLeftMissingRight: "\u0417\u0430\u0439\u0432\u0438\u0439 \\left \u0430\u0431\u043E \u0432\u0456\u0434\u0441\u0443\u0442\u043D\u0456\u0439 \\right",
Misplaced: "\u041D\u0435\u0432\u0456\u0440\u043D\u0438\u0439 %1",
MissingOpenForSub: "\u0411\u0440\u0430\u043A\u0443\u0454 \u0432\u0456\u0434\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u043E\u0457 \u0434\u0443\u0436\u043A\u0438 \u0434\u043B\u044F \u043D\u0438\u0436\u043D\u044C\u043E\u0433\u043E \u0456\u043D\u0434\u0435\u043A\u0441\u0443",
MissingOpenForSup: "\u0411\u0440\u0430\u043A\u0443\u0454 \u0432\u0456\u0434\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u043E\u0457 \u0434\u0443\u0436\u043A\u0438 \u0434\u043B\u044F \u0432\u0435\u0440\u0445\u043D\u044C\u043E\u0433\u043E \u0456\u043D\u0434\u0435\u043A\u0441\u0443",
AmbiguousUseOf: "\u041D\u0435\u043E\u0434\u043D\u043E\u0437\u043D\u0430\u0447\u043D\u0435 \u0432\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u0430\u043D\u043D\u044F %1",
EnvBadEnd: "\\begin{%1} \u0437\u0430\u043A\u0456\u043D\u0447\u0438\u0432\u0441\u044F \\end{%2}",
EnvMissingEnd: "\u0411\u0440\u0430\u043A\u0443\u0454 \\end{%1}",
MissingBoxFor: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u0454 \u043F\u043E\u043B\u0435 \u0434\u043B\u044F %1",
MissingCloseBrace: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u044F \u0437\u0430\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u0430 \u0434\u0443\u0436\u043A\u0430",
UndefinedControlSequence: "\u041D\u0435\u0432\u0438\u0437\u043D\u0430\u0447\u0435\u043D\u0430 \u043A\u043E\u043C\u0430\u043D\u0434\u0430 %1",
DoubleExponent: "\u041F\u043E\u0434\u0432\u0456\u0439\u043D\u0438\u0439 \u0441\u0442\u0435\u043F\u0456\u043D\u044C: \u0432\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u043E\u0432\u0443\u0439\u0442\u0435 \u0434\u0443\u0436\u043A\u0438 \u0434\u043B\u044F \u0443\u0442\u043E\u0447\u043D\u0435\u043D\u043D\u044F",
DoubleSubscripts: "\u041F\u043E\u0434\u0432\u0456\u0439\u043D\u0438\u0439 \u043D\u0438\u0436\u043D\u0456\u0439 \u0456\u043D\u0434\u0435\u043A\u0441: \u0432\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u043E\u0432\u0443\u0439\u0442\u0435 \u0434\u0443\u0436\u043A\u0438 \u0443\u0442\u043E\u0447\u0435\u043D\u043D\u044F",
DoubleExponentPrime: "\u0417\u043D\u0430\u043A \u0448\u0442\u0440\u0438\u0445\u0430 \u0441\u043F\u0440\u0438\u0447\u0438\u043D\u0438\u0432 \u043F\u043E\u0434\u0432\u0456\u0439\u043D\u0438\u0439 \u0441\u0442\u0435\u043F\u0456\u043D\u044C: \u0432\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u0430\u0439\u0442\u0435 \u0434\u0443\u0436\u043A\u0438 \u0434\u043B\u044F \u0443\u0442\u043E\u0447\u043D\u0435\u043D\u043D\u044F",
CantUseHash1: "\u041D\u0435 \u043C\u043E\u0436\u043D\u0430 \u0432\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u043E\u0432\u0443\u0432\u0430\u0442\u0438 \u0441\u0438\u043C\u0432\u043E\u043B \u043F\u0430\u0440\u0430\u043C\u0435\u0442\u0440\u0430 \u043C\u0430\u043A\u0440\u043E\u0441\u0443 # \u0443 \u043C\u0430\u0442\u0435\u043C\u0430\u0442\u0438\u0447\u043D\u043E\u043C\u0443 \u0440\u0435\u0436\u0438\u043C\u0456",
MisplacedMiddle: "%1 \u043C\u0430\u0454 \u0431\u0443\u0442\u0438 \u0432 \u043C\u0435\u0436\u0430\u0445 \\left \u0456 \\right",
MisplacedLimits: "%1 \u0434\u043E\u0437\u0432\u043E\u043B\u0435\u043D\u043E \u0442\u0456\u043B\u044C\u043A\u0438 \u043D\u0430 \u043E\u043F\u0435\u0440\u0430\u0442\u043E\u0440\u0430\u0445",
MisplacedMoveRoot: "%1 \u043C\u043E\u0436\u0435 \u0437'\u044F\u0432\u0438\u0442\u0438\u0441\u044F \u043B\u0438\u0448\u0435 \u043F\u0456\u0434 \u043A\u043E\u0440\u0435\u043D\u0435\u043C",
MultipleCommand: "\u041A\u0456\u043B\u044C\u043A\u0430 %1",
IntegerArg: "\u0410\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0434\u043B\u044F %1 \u043C\u0430\u0454 \u0431\u0443\u0442\u0438 \u0446\u0456\u043B\u0438\u043C \u0447\u0438\u0441\u043B\u043E\u043C",
NotMathMLToken: "%1 \u043D\u0435 \u0435\u043B\u0435\u043C\u0435\u043D\u0442\u043E\u043C \u043C\u0430\u0440\u043A\u0435\u0440\u0430",
InvalidMathMLAttr: "\u041D\u0435\u043F\u0440\u0438\u043F\u0443\u0441\u0442\u0438\u043C\u0438\u0439 \u0430\u0442\u0440\u0438\u0431\u0443\u0442 MathML: %1",
UnknownAttrForElement: "%1 \u043D\u0435\u0440\u043E\u0437\u043F\u0456\u0437\u043D\u0430\u043D\u0438\u0439 \u0430\u0442\u0440\u0438\u0431\u0443\u0442 \u0434\u043B\u044F %2",
MaxMacroSub1: "\u041F\u0435\u0440\u0435\u0432\u0438\u0449\u0435\u043D\u043E \u043C\u0430\u043A\u0441\u0438\u043C\u0430\u043B\u044C\u043D\u0443 \u043A\u0456\u043B\u044C\u043A\u0456\u0441\u0442\u044C \u0432\u0438\u043A\u043B\u0438\u043A\u0456\u0432 \u043C\u0430\u043A\u0440\u043E\u0441\u0456\u0432; \u0430\u0431\u043E \u0432\u0456\u0434\u0431\u0443\u0432\u0430\u0454\u0442\u044C\u0441\u044F \u0440\u0435\u043A\u0443\u0440\u0441\u0438\u0432\u043D\u0438\u0439 \u0432\u0438\u043A\u043B\u0438\u043A \u043C\u0430\u043A\u0440\u043E\u0441\u0443?",
MaxMacroSub2: "\u041F\u0435\u0440\u0435\u0432\u0438\u0449\u0435\u043D\u043E \u043C\u0430\u043A\u0441\u0438\u043C\u0430\u043B\u044C\u043D\u0443 \u043A\u0456\u043B\u044C\u043A\u0456\u0441\u0442\u044C \u043F\u0456\u0434\u0441\u0442\u0430\u043D\u043E\u0432\u043E\u043A MathJax; \u041C\u043E\u0436\u043B\u0438\u0432\u043E, \u0440\u0435\u043A\u0443\u0440\u0441\u0456\u044F \u0432 \u0441\u0435\u0440\u0435\u0434\u043E\u0432\u0438\u0449\u0456 LaTeX?",
MissingArgFor: "\u041F\u0440\u043E\u043F\u0443\u0449\u0435\u043D\u0438\u0439 \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0434\u043B\u044F %1",
ExtraAlignTab: "\u0417\u0430\u0439\u0432\u0438\u0439 \u0442\u0430\u0431\u0443\u043B\u044F\u0442\u043E\u0440 \u0442\u0435\u043A\u0441\u0442\u0443 \u0443 \\cases",
BracketMustBeDimension: "\u0410\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0443 \u0434\u0443\u0436\u043A\u0430\u0445 \u0434\u043B\u044F %1 \u043C\u0430\u0454 \u0431\u0443\u0442\u0438 \u0440\u043E\u0437\u043C\u0456\u0440\u043E\u043C",
InvalidEnv: "\u041D\u0435\u0432\u0456\u0440\u043D\u0435 \u0456\u043C'\u044F \u0441\u0435\u0440\u0435\u0434\u043E\u0432\u0438\u0449\u0430 '%1'",
UnknownEnv: "\u041D\u0435\u0432\u0456\u0434\u043E\u043C\u0435 \u0441\u0435\u0440\u0435\u0434\u043E\u0432\u0438\u0449\u0435 '%1'",
ExtraCloseLooking: "\u0417\u0430\u0439\u0432\u0430 \u0437\u0430\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u0430 \u0434\u0443\u0436\u043A\u0430 \u0432\u0438\u043D\u0438\u043A\u043B\u0430 \u043F\u0456\u0434 \u0447\u0430\u0441 \u043F\u043E\u0448\u0443\u043A\u0443 %1",
MissingCloseBracket: "\u041D\u0435 \u043C\u043E\u0436\u043B\u0438\u0432\u043E \u0437\u043D\u0430\u0439\u0442\u0438 \u0437\u0430\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u0443 ']' \u0434\u043B\u044F \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442\u0443 \u0434\u043E %1",
MissingOrUnrecognizedDelim: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u0456\u0439 \u0430\u0431\u043E \u043D\u0435\u0440\u043E\u0437\u043F\u0456\u0437\u043D\u0430\u043D\u0438\u0439 \u0440\u043E\u0437\u0434\u0456\u043B\u044C\u043D\u0438\u043A \u0434\u043B\u044F %1",
MissingDimOrUnits: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u0456\u0439 \u0440\u043E\u0437\u043C\u0456\u0440 \u0430\u0431\u043E \u0439\u043E\u0433\u043E \u043E\u0434\u0438\u043D\u0438\u0446\u0456 \u0434\u043B\u044F %1",
TokenNotFoundForCommand: "\u041D\u0435 \u043C\u043E\u0436\u043B\u0438\u0432\u043E \u0437\u043D\u0430\u0439\u0442\u0438 %1 \u0434\u043B\u044F %2",
MathNotTerminated: "\u041C\u0430\u0442\u0435\u043C\u0430\u0442\u0438\u043A\u0430 \u043D\u0435 \u0437\u0430\u0432\u0435\u0440\u0448\u0443\u0454\u0442\u044C\u0441\u044F \u0432 \u0442\u0435\u043A\u0441\u0442\u043E\u0432\u043E\u043C\u0443 \u043F\u043E\u043B\u0456",
IllegalMacroParam: "\u041D\u0435\u0437\u0430\u043A\u043E\u043D\u043D\u0435 \u043F\u043E\u0441\u0438\u043B\u0430\u043D\u043D\u044F \u043D\u0430 \u043F\u0430\u0440\u0430\u043C\u0435\u0442\u0440 \u043C\u0430\u043A\u0440\u043E\u0441\u0443",
MaxBufferSize: "\u041F\u0435\u0440\u0435\u0432\u0438\u0449\u0435\u043D\u043E \u0440\u043E\u0437\u043C\u0456\u0440 \u0432\u043D\u0443\u0442\u0440\u0456\u0448\u043D\u044C\u043E\u0433\u043E \u0431\u0443\u0444\u0435\u0440\u0430 MathJax; \u0442\u0430\u043C \u0454 \u0440\u0435\u043A\u0443\u0440\u0441\u0438\u0432\u043D\u0438\u0439 \u0432\u0438\u043A\u043B\u0438\u043A \u043C\u0430\u043A\u0440\u043E\u0441\u0443?",
CommandNotAllowedInEnv: "%1 \u043D\u0435 \u0434\u043E\u043F\u0443\u0441\u043A\u0430\u0454\u0442\u044C\u0441\u044F \u0432 \u0441\u0435\u0440\u0435\u0434\u043E\u0432\u0438\u0449\u0456 %2",
MultipleLabel: "\u041C\u0456\u0442\u043A\u0430 '%1' \u0432\u0438\u0437\u043D\u0430\u0447\u0435\u043D\u0430 \u043A\u0456\u043B\u044C\u043A\u0430 \u0440\u0430\u0437\u0456\u0432",
CommandAtTheBeginingOfLine: "%1 \u043C\u0430\u0454 \u0437\u043D\u0430\u0445\u043E\u0434\u0438\u0442\u0438\u0441\u044F \u043D\u0430 \u043F\u043E\u0447\u0430\u0442\u043A\u0443 \u0440\u044F\u0434\u043A\u0430",
IllegalAlign: "\u041D\u0435\u0437\u0430\u043A\u043E\u043D\u043D\u0435 \u0432\u0438\u0440\u0456\u0432\u043D\u044E\u0432\u0430\u043D\u043D\u044F \u0432\u043A\u0430\u0437\u0430\u043D\u0435 \u0443 %1",
BadMathStyleFor: "\u041D\u0435\u0432\u0456\u0440\u043D\u0438\u0439 \u0441\u0438\u043D\u0442\u0430\u043A\u0441\u0438\u0441 \u0434\u043B\u044F %1",
PositiveIntegerArg: "\u0410\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0434\u043B\u044F %1 \u043F\u043E\u0432\u0438\u043D\u0435\u043D \u0431\u0443\u0442\u0438 \u0434\u043E\u0434\u0430\u0442\u043D\u0438\u043C \u0446\u0456\u043B\u0438\u043C \u0447\u0438\u0441\u043B\u043E\u043C",
ErroneousNestingEq: "\u041F\u043E\u043C\u0438\u043B\u043A\u043E\u0432\u0456 \u0432\u043A\u043B\u0430\u0434\u0435\u043D\u043E\u0441\u0442\u0456 \u0441\u0442\u0440\u0443\u043A\u0442\u0443\u0440 \u0440\u0456\u0432\u043D\u044F\u043D\u043D\u044F",
MultlineRowsOneCol: "\u0420\u044F\u0434\u043A\u0438 \u0443 \u0441\u0435\u0440\u0435\u0434\u043E\u0432\u0438\u0449\u0456 %1 \u043C\u0443\u0441\u044F\u0442\u044C \u043C\u0430\u0442\u0438 \u043B\u0438\u0448\u0435 \u043E\u0434\u0438\u043D \u0441\u0442\u043E\u0432\u043F\u0435\u0446\u044C",
MultipleBBoxProperty: "%1 \u0432\u043A\u0430\u0437\u0430\u043D\u043E \u0434\u0432\u0456\u0447\u0456 \u0432 %2",
InvalidBBoxProperty: "'%1' \u0437\u043E\u0432\u0441\u0456\u043C \u043D\u0435 \u0441\u0445\u043E\u0436\u0438\u0439 \u043D\u0430 \u043A\u043E\u043B\u0456\u0440, \u0434\u043E\u0434\u0430\u0442\u043A\u043E\u0432\u0438\u0439 \u0440\u043E\u0437\u043C\u0456\u0440 \u0430\u0431\u043E \u0441\u0442\u0438\u043B\u044C",
ExtraEndMissingBegin: "\u0417\u0430\u0439\u0432\u0430 %1 \u0430\u0431\u043E \u0432\u0456\u0434\u0441\u0443\u0442\u043D\u044F \\begingroup",
GlobalNotFollowedBy: "%1 \u043D\u0435 \u0441\u043B\u0456\u0434\u0443\u0454 \u043F\u0456\u0441\u043B\u044F \\let, \\def \u0430\u0431\u043E \\newcommand",
UndefinedColorModel: "\u041A\u043E\u043B\u0456\u0440\u043D\u0430 \u043C\u043E\u0434\u0435\u043B\u044C '%1' \u043D\u0435 \u0432\u0438\u0437\u043D\u0430\u0447\u0435\u043D\u0430",
ModelArg1: "\u0417\u043D\u0430\u0447\u0435\u043D\u043D\u044F \u043A\u043E\u043B\u044C\u043E\u0440\u0443 \u0434\u043B\u044F \u043C\u043E\u0434\u0435\u043B\u0456 %1 \u0432\u0438\u043C\u0430\u0433\u0430\u044E\u0442\u044C 3 \u0446\u0438\u0444\u0440\u0438",
InvalidDecimalNumber: "\u041D\u0435\u0432\u0456\u0440\u043D\u0435 \u0434\u0435\u0441\u044F\u0442\u043A\u043E\u0432\u0435 \u0447\u0438\u0441\u043B\u043E",
ModelArg2: "\u0417\u043D\u0430\u0447\u0435\u043D\u043D\u044F \u043A\u043E\u043B\u044C\u043E\u0440\u0443 \u0434\u043B\u044F \u043C\u043E\u0434\u0435\u043B\u0456 %1 \u043F\u043E\u0432\u0438\u043D\u043D\u043E \u0431\u0443\u0442\u0438 \u043C\u0456\u0436 \u0442\u0435\u0433\u0430\u043C\u0438 %2 \u0456 %3",
InvalidNumber: "\u041D\u0435\u043F\u0440\u0430\u0432\u0438\u043B\u044C\u043D\u0435 \u0447\u0438\u0441\u043B\u043E",
NewextarrowArg1: "\u041F\u0435\u0440\u0448\u0438\u0439 \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442 %1 \u043C\u0430\u0454 \u0431\u0443\u0442\u0438 \u043D\u0430\u0437\u0432\u043E\u044E \u0435\u043B\u0435\u043C\u0435\u043D\u0442\u0430 \u043A\u0435\u0440\u0456\u0432\u043D\u043E\u0457 \u043F\u043E\u0441\u043B\u0456\u0434\u043E\u0432\u043D\u043E\u0441\u0442\u0456",
NewextarrowArg2: "\u0414\u0440\u0443\u0433\u0438\u0439 \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0434\u043B\u044F %1 \u043C\u0430\u0454 \u0431\u0443\u0442\u0438 \u0434\u0432\u043E\u043C\u0430 \u0446\u0456\u043B\u0438\u043C\u0438 \u0447\u0438\u0441\u043B\u0430\u043C\u0438, \u0440\u043E\u0437\u0434\u0456\u043B\u0435\u043D\u0438\u043C\u0438 \u043A\u043E\u043C\u0430\u043C\u0438",
NewextarrowArg3: "\u0422\u0440\u0435\u0442\u0456\u0439 \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0434\u043B\u044F %1 \u043C\u0430\u0454 \u0431\u0443\u0442\u0438 \u0447\u0438\u0441\u043B\u043E\u0432\u0438\u043C \u044E\u043D\u0456\u043A\u043E\u0434\u043D\u0438\u043C \u0441\u0438\u043C\u0432\u043E\u043B\u043E\u043C",
NoClosingChar: "\u041D\u0435 \u043C\u043E\u0436\u043B\u0438\u0432\u043E \u0437\u043D\u0430\u0439\u0442\u0438 \u0432\u0456\u0434\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u043E\u0457 %1",
IllegalControlSequenceName: "\u041D\u0435\u0432\u0456\u0440\u043D\u0435 \u0456\u043C'\u044F \u043A\u0435\u0440\u0456\u0432\u043D\u043E\u0457 \u043F\u043E\u0441\u043B\u0456\u0434\u043E\u0432\u043D\u043E\u0441\u0442\u0456 \u0434\u043B\u044F %1",
IllegalParamNumber: "\u041D\u0435\u0432\u0456\u0440\u043D\u0435 \u0447\u0438\u0441\u043B\u043E \u043F\u0430\u0440\u0430\u043C\u0435\u0442\u0440\u0456\u0432, \u0437\u0430\u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0445 \u0443 %1",
MissingCS: "\u041F\u0456\u0441\u043B\u044F %1 \u043C\u0443\u0441\u0438\u0442\u044C \u0441\u043B\u0456\u0434\u0443\u0432\u0430\u0442\u0438 \u043A\u0435\u0440\u0456\u0432\u043D\u0430 \u043F\u043E\u0441\u043B\u0456\u0434\u043E\u0432\u043D\u0456\u0441\u0442\u044C",
CantUseHash2: "\u041D\u0435\u0437\u0430\u043A\u043E\u043D\u043D\u0435 \u0432\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u0430\u043D\u043D\u044F # \u0443 \u0448\u0430\u0431\u043B\u043E\u043D\u0456 \u0434\u043B\u044F %1",
SequentialParam: "\u041F\u0430\u0440\u0430\u043C\u0435\u0442\u0440\u0438 \u0434\u043B\u044F %1 \u043F\u043E\u0432\u0438\u043D\u043D\u0456 \u0431\u0443\u0442\u0438 \u043F\u0440\u043E\u043D\u0443\u043C\u0435\u0440\u043E\u0432\u0430\u043D\u0456 \u043F\u043E\u0441\u043B\u0456\u0434\u043E\u0432\u043D\u043E",
MissingReplacementString: "\u0412\u0456\u0434\u0441\u0443\u0442\u043D\u0456\u0439 \u0440\u044F\u0434\u043E\u043A \u0437\u0430\u043C\u0456\u043D\u0438 \u0434\u043B\u044F \u0432\u0438\u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044F %1",
MismatchUseDef: "\u0412\u0438\u043A\u043E\u0440\u0438\u0441\u0442\u0430\u043D\u043D\u044F %1 \u043D\u0435 \u0432\u0456\u0434\u043F\u043E\u0432\u0456\u0434\u0430\u0454 \u0439\u043E\u0433\u043E \u0432\u0438\u0437\u043D\u0430\u0447\u0435\u043D\u043D\u044E",
RunawayArgument: "\u0412\u0442\u0440\u0430\u0447\u0435\u043D\u0438\u0439 \u0430\u0440\u0433\u0443\u043C\u0435\u043D\u0442 \u0434\u043B\u044F %1?",
NoClosingDelim: "\u041D\u0435 \u0432\u0434\u0430\u043B\u043E\u0441\u044F \u0437\u043D\u0430\u0439\u0442\u0438 \u0437\u0430\u043A\u0440\u0438\u0432\u0430\u044E\u0447\u043E\u0433\u043E \u0440\u043E\u0437\u0434\u0456\u043B\u044C\u043D\u0438\u043A\u0430 \u0434\u043B\u044F%1"
}
});
MathJax.Ajax.loadComplete("[MathJax]/localization/uk/TeX.js");
|
from tkinter import Tk,font, StringVar, RAISED, BOTH, RIGHT
from tkinter.ttk import Frame, Button, Style, Label
from functions import functions
import cv2
from PIL import ImageGrab
from configparser import ConfigParser
#loads the configuration file and reads it
config = ConfigParser()
config.read('config.ini')
click1 = False
point1 = (0,0)
class GUI(Frame):
def __init__(self):
super().__init__()
self.style = Style()
self.initGUI()
self.run = True
def initGUI(self):
#adds additional frame
self.master.title("auto OCR jp-en translator")
self.frame = Frame(self,relief=RAISED,borderwidth=1)
self.frame.pack(expand=True,fill=BOTH)
self.pack(fill=BOTH, expand=True)
#buttons
#self.show_area_button = Button(self.master, text="Show Area", command=self.show_area).pack(side=RIGHT)
self.area_button = Button(self.master, text="Area", command=self.area).pack(side=RIGHT)
self.translate_button = Button(self.master, text="Translate", command=self.translate).pack(side=RIGHT)
self.auto_translate_button = Button(self.master, text="Auto",command= self.start_translate).pack(side=RIGHT)
self.stop_translate_button = Button(self.master,text="Stop",command=self.stop_translate).pack(side=RIGHT)
self.master.update_idletasks() #finishes wrapping the buttons and gui(still continues to add the Label right after) This is needed to update the winfo_width of the program
self.text_var = StringVar() #creates an instance of strinvar which is used to dynamically change the text in translated_label
self.translated_label = Label(self.frame, textvariable=self.text_var,font=(config.get('text', 'font_style'),config.get('text','font_size')), wraplength=self.master.winfo_width()).pack(fill=BOTH)
def area(self):
img = ImageGrab.grab() #basically screenshots the entire screen
img.save('Assets/screenshot.png')
instance = MouseEvents(img)
instance.initialize()
def show_area(self):
coordinates = MouseEvents.click.start_point + MouseEvents.click.end_point
print(coordinates)
#one time translation, gonna tweak later to screenshot then translate
def translate(self):
coordinates = MouseEvents.click.start_point + MouseEvents.click.end_point
translated = functions.image_reader(coordinates)
self.text_var.set(translated)
def start_translate(self):
self.run=True
self.auto_translate()
#calls the function in functions.py and translatordeepl.py
def auto_translate(self):
coordinates = MouseEvents.click.start_point + MouseEvents.click.end_point
translated = functions.auto_image_reader(coordinates)
if translated == None:
pass
else:
self.text_var.set(translated)
#repeats auto_translate_initiator every x seconds
if self.run == True:
self.master.after(config.getint('translator','time_wait_to_check_for_screen_update')*1000, self.auto_translate)
def stop_translate(self):
self.run=False
class MouseEvents:
def __init__(self,img):
img = cv2.imread("Assets/screenshot.png", 1)
self.img = img
def click(self, event,x,y,flags, params):
global click1, point1, img
if event == cv2.EVENT_LBUTTONDOWN:
# if mousedown, store the x,y position of the mous
click1 = True
point1 = (x,y)
MouseEvents.click.start_point = point1
elif event == cv2.EVENT_MOUSEMOVE and click1:
# when dragging pressed, draw rectangle in image
img_copy = self.img.copy()
cv2.rectangle(img_copy, point1, (x,y), (245, 245, 66),1)
cv2.imshow("Image", img_copy)
elif event == cv2.EVENT_LBUTTONUP:
# on mouseUp
click1 = False
cv2.destroyAllWindows()
MouseEvents.click.end_point = (x,y)
def initialize(self):
cv2.namedWindow("Image", cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty("Image",cv2.WND_PROP_FULLSCREEN,cv2.WINDOW_FULLSCREEN)
cv2.setMouseCallback("Image", self.click)
cv2.imshow("Image", self.img)
cv2.waitKey(0)
def main():
root = Tk()
root.iconbitmap("Assets/mashiro.ico")
default_font = font.nametofont("TkDefaultFont")
default_font.configure(family=config.get('text','font_style'))
root.attributes('-topmost',True, '-alpha',config.getfloat('main','window_transparency'))
root.geometry(f"{config.get('main','window_size')}+300+300")
my_gui = GUI()
root.mainloop()
if __name__ == "__main__":
main()
|
module.exports={A:{A:{"1":"E A B","2":"I D F nB"},B:{"1":"C O H P J K L Z MB M N S T U V W"},C:{"1":"0 1 2 3 4 5 6 7 8 9 G a I D F E A B C O H P J K L b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB SB IB JB KB LB R NB OB PB QB HB Y XB TB UB VB WB RB Z MB M lB N S T U V","2":"mB cB uB xB"},D:{"1":"0 1 2 3 4 5 6 7 8 9 G a I D F E A B C O H P J K L b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB FB GB SB IB JB KB LB R NB OB PB QB HB Y XB TB UB VB WB RB Z MB M N S T U V W zB dB eB"},E:{"1":"I D F E A B C O H hB iB jB kB ZB X Q oB pB","2":"G fB YB","16":"a"},F:{"1":"0 1 2 3 4 5 6 7 8 9 B C P J K L b c d e f g h i j k l m n o p q r s t u v w x y z AB BB CB DB EB GB IB JB KB LB R NB OB PB QB HB Y X aB vB Q","2":"E qB rB sB tB"},G:{"1":"F wB bB yB XC 0B 1B 2B 3B 4B 5B 6B 7B 8B 9B AC BC CC DC EC","16":"YB"},H:{"1":"FC"},I:{"1":"cB G M IC JC bB KC LC","16":"GC HC"},J:{"1":"D A"},K:{"1":"B C FB X aB Q","2":"A"},L:{"1":"W"},M:{"1":"N"},N:{"1":"A B"},O:{"1":"MC"},P:{"1":"G NC OC PC QC RC ZB SC TC UC"},Q:{"1":"VC"},R:{"1":"WC"},S:{"1":"gB"}},B:1,C:"document.head"};
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[35],{"76GY":function(t,a,n){"use strict";n.d(a,"a",(function(){return v}));var e=n("ykax"),o=n("fXoL"),c=n("ofXK"),i=n("vYAO"),s=n("iCsr"),r=n("sYmb"),l=n("j7f2"),p=n("6ako"),b=n("QUb3");function u(t,a){if(1&t&&(o.Zb(0,"os-icon-container",9),o.Sc(1),o.mc(2,"translate"),o.mc(3,"pollKeyVerbose"),o.Yb()),2&t){const t=o.lc().$implicit,a=o.lc(2);o.tc("icon",t.value[0].icon)("size",a.iconSize),o.Gb(1),o.Uc(" ",o.nc(2,3,o.nc(3,5,t.votingOption))," ")}}function h(t,a){if(1&t&&(o.Zb(0,"span"),o.Sc(1),o.mc(2,"translate"),o.mc(3,"pollKeyVerbose"),o.Yb()),2&t){const t=o.lc().$implicit;o.Gb(1),o.Uc(" ",o.nc(2,1,o.nc(3,3,t.votingOption))," ")}}function d(t,a){if(1&t&&(o.Zb(0,"span"),o.Sc(1),o.mc(2,"pollPercentBase"),o.Yb()),2&t){const t=o.lc().$implicit,a=o.lc(2);o.Gb(1),o.Uc(" ",o.oc(2,1,t.value[0].amount,a.poll)," ")}}function g(t,a){if(1&t&&(o.Zb(0,"tr"),o.Zb(1,"td"),o.Qc(2,u,4,7,"os-icon-container",6),o.Qc(3,h,4,5,"span",7),o.Yb(),o.Zb(4,"td",8),o.Qc(5,d,3,4,"span",7),o.Yb(),o.Zb(6,"td",8),o.Sc(7),o.mc(8,"parsePollNumber"),o.Yb(),o.Yb()),2&t){const t=a.$implicit;o.Ib(t.votingOption),o.Gb(2),o.tc("ngIf",t.value[0].icon),o.Gb(1),o.tc("ngIf",!t.value[0].icon),o.Gb(2),o.tc("ngIf",t.value[0].showPercent),o.Gb(2),o.Uc(" ",o.nc(8,6,t.value[0].amount)," ")}}function f(t,a){if(1&t&&(o.Zb(0,"div",10),o.Ub(1,"os-charts",11),o.mc(2,"async"),o.Yb()),2&t){const t=o.lc(2);o.Gb(1),o.tc("data",o.nc(2,1,t.chartData))}}function m(t,a){if(1&t&&(o.Zb(0,"div",1),o.Zb(1,"table",2),o.Zb(2,"tbody"),o.Zb(3,"tr"),o.Ub(4,"th"),o.Zb(5,"th",3),o.Sc(6),o.mc(7,"translate"),o.Yb(),o.Yb(),o.Qc(8,g,9,8,"tr",4),o.Yb(),o.Yb(),o.Qc(9,f,3,3,"div",5),o.Yb()),2&t){const t=o.lc();o.Gb(6),o.Tc(o.nc(7,3,"Votes")),o.Gb(2),o.tc("ngForOf",t.getTableData()),o.Gb(1),o.tc("ngIf",t.showChart)}}let v=(()=>{class t{constructor(t){this.motionPollService=t,this.iconSize="large"}get hasVotes(){return this.poll&&!!this.poll.options}ngOnInit(){}getTableData(){return this.motionPollService.generateTableData(this.poll)}get showChart(){return this.motionPollService.showChart(this.poll)&&this.chartData&&!!this.chartData.value}}return t.\u0275fac=function(a){return new(a||t)(o.Tb(e.a))},t.\u0275cmp=o.Nb({type:t,selectors:[["os-motion-poll-detail-content"]],inputs:{poll:"poll",chartData:"chartData",iconSize:"iconSize"},decls:1,vars:1,consts:[["class","result-wrapper",4,"ngIf"],[1,"result-wrapper"],[1,"result-table"],["colspan","2"],[3,"class",4,"ngFor","ngForOf"],["class","doughnut-chart",4,"ngIf"],[3,"icon","size",4,"ngIf"],[4,"ngIf"],[1,"result-cell-definition"],[3,"icon","size"],[1,"doughnut-chart"],["type","doughnut",3,"data"]],template:function(t,a){1&t&&o.Qc(0,m,10,5,"div",0),2&t&&o.tc("ngIf",a.hasVotes)},directives:[c.t,c.s,i.a,s.a],pipes:[r.j,l.a,p.a,b.a,c.b],styles:[".yes[_ngcontent-%COMP%]{color:#4caf50}.no[_ngcontent-%COMP%]{color:#cc6c5b}.abstain[_ngcontent-%COMP%]{color:#a6a6a6}.voted-yes[_ngcontent-%COMP%]{background-color:#4caf50;color:#fff}.voted-no[_ngcontent-%COMP%]{background-color:#cc6c5b;color:#fff}.voted-abstain[_ngcontent-%COMP%]{background-color:#a6a6a6;color:#fff}.start-poll-button[_ngcontent-%COMP%]{color:#4caf50}.stop-poll-button[_ngcontent-%COMP%]{color:#ff5252}.publish-poll-button[_ngcontent-%COMP%]{color:#e6b100}.result-wrapper[_ngcontent-%COMP%]{display:grid;grid-gap:2em;margin:2em;grid-template-columns:repeat(auto-fit,minmax(150px,1fr))}.result-wrapper[_ngcontent-%COMP%] .result-table[_ngcontent-%COMP%] th[_ngcontent-%COMP%]{text-align:right;font-weight:400}.result-wrapper[_ngcontent-%COMP%] .result-table[_ngcontent-%COMP%] tr[_ngcontent-%COMP%]{height:48px;border-bottom:none!important}.result-wrapper[_ngcontent-%COMP%] .result-table[_ngcontent-%COMP%] tr[_ngcontent-%COMP%] .result-cell-definition[_ngcontent-%COMP%]{text-align:right}.result-wrapper[_ngcontent-%COMP%] .doughnut-chart[_ngcontent-%COMP%]{display:block}"]}),t})()},"9st3":function(t,a,n){"use strict";n.d(a,"a",(function(){return o})),n("Ri39");var e=n("fXoL");let o=(()=>{class t{constructor(){}}return t.\u0275fac=function(a){return new(a||t)},t.\u0275dir=e.Ob({type:t,inputs:{data:"data",projector:"projector"}}),t})()},SQVg:function(t,a,n){"use strict";n.r(a),n.d(a,"MotionPollSlideModule",(function(){return g}));var e=n("ofXK"),o=n("PCNd"),c=n("DGVe"),i=n("qnZ9"),s=n("ykax"),r=n("JTqh"),l=n("fXoL"),p=n("76GY");function b(t,a){if(1&t&&(l.Zb(0,"span"),l.Sc(1),l.Yb()),2&t){const t=l.lc(2);l.Gb(1),l.Uc("",t.data.data.motion.identifier,":")}}function u(t,a){if(1&t&&(l.Zb(0,"div"),l.Ub(1,"os-motion-poll-detail-content",4),l.Yb()),2&t){const t=l.lc(2);l.Gb(1),l.tc("poll",t.pollData)("chartData",t.chartDataSubject)}}function h(t,a){if(1&t&&(l.Xb(0),l.Zb(1,"div",1),l.Zb(2,"h1",2),l.Qc(3,b,2,1,"span",0),l.Sc(4),l.Yb(),l.Zb(5,"h2",3),l.Sc(6),l.Yb(),l.Yb(),l.Qc(7,u,2,2,"div",0),l.Wb()),2&t){const t=l.lc();l.Gb(3),l.tc("ngIf",t.data.data.motion.identifier),l.Gb(1),l.Uc(" ",t.data.data.motion.title," "),l.Gb(2),l.Tc(t.data.data.poll.title),l.Gb(1),l.tc("ngIf",t.data.data.poll.state===t.PollState.Published)}}let d=(()=>{class t extends r.a{constructor(t){super(t),this.PollState=i.e,this.chartDataSubject.subscribe(()=>{if(this.data&&this.data.data){this.pollData=this.data.data.poll;const t=this.pollData.options[0];this.voteYes=t.yes,this.voteNo=t.no,this.voteAbstain=t.abstain}})}showChart(){return this.pollService.showChart(this.pollData)}getTableData(){return this.pollService.generateTableData(this.pollData)}}return t.\u0275fac=function(a){return new(a||t)(l.Tb(s.a))},t.\u0275cmp=l.Nb({type:t,selectors:[["os-motion-poll-slide"]],features:[l.Db],decls:1,vars:1,consts:[[4,"ngIf"],[1,"slidetitle"],[1,"motion-title"],[1,"poll-title"],["iconSize","gigantic",3,"poll","chartData"]],template:function(t,a){1&t&&l.Qc(0,h,8,4,"ng-container",0),2&t&&l.tc("ngIf",a.data&&a.data.data)},directives:[e.t,p.a],styles:[".motion-title[_ngcontent-%COMP%]{margin:0 0 10px}"]}),t})(),g=(()=>{class t{}return t.\u0275mod=l.Rb({type:t}),t.\u0275inj=l.Qb({factory:function(a){return new(a||t)},providers:[{provide:c.a.token,useValue:d}],imports:[[e.c,o.a]]}),t})()},iCsr:function(t,a,n){"use strict";n.d(a,"a",(function(){return b}));var e=n("mV3u"),o=n("fXoL"),c=n("jhN1"),i=n("sYmb"),s=n("dNgK"),r=n("ofXK"),l=n("LPYB");function p(t,a){if(1&t&&o.Ub(0,"canvas",2),2&t){const t=o.lc();o.tc("datasets",t.isCircle?null:t.chartData)("data",t.isCircle?t.chartData:null)("colors",t.circleColors)("labels",t.labels)("options",t.chartOptions)("chartType",t.type)("legend",t.legend)}}let b=(()=>{class t extends e.a{constructor(t,a,n){super(t,a,n),this.type="horizontalBar",this.legend=!1}set data(t){this.progressInputData(t)}get chartOptions(){return this.isCircle?{responsive:!0,maintainAspectRatio:!1,tooltips:{enabled:!1},legend:{position:"left"}}:{responsive:!0,maintainAspectRatio:!1,tooltips:{enabled:!1},scales:{xAxes:[{gridLines:{drawOnChartArea:!1},ticks:{beginAtZero:!0,stepSize:1},stacked:!0}],yAxes:[{gridLines:{drawBorder:!1,drawOnChartArea:!1,drawTicks:!1},ticks:{mirror:!0,labelOffset:-20},stacked:!0}]}}}get isCircle(){return"pie"===this.type||"doughnut"===this.type}calcBarChartHeight(){if(!this.isCircle)return 120+60*this.labels.length+"px"}progressInputData(t){this.isCircle?(this.chartData=t.flatMap(t=>t.data),this.circleColors=[{backgroundColor:t.map(t=>t.backgroundColor).filter(t=>!!t),hoverBackgroundColor:t.map(t=>t.hoverBackgroundColor).filter(t=>!!t)}]):this.chartData=t,this.labels||(this.labels=t.map(t=>t.label))}}return t.\u0275fac=function(a){return new(a||t)(o.Tb(c.d),o.Tb(i.k),o.Tb(s.a))},t.\u0275cmp=o.Nb({type:t,selectors:[["os-charts"]],inputs:{type:"type",labels:"labels",legend:"legend",data:"data"},features:[o.Db],decls:2,vars:3,consts:[[1,"charts-wrapper"],["class","chart-js-canvas","baseChart","",3,"datasets","data","colors","labels","options","chartType","legend",4,"ngIf"],["baseChart","",1,"chart-js-canvas",3,"datasets","data","colors","labels","options","chartType","legend"]],template:function(t,a){1&t&&(o.Zb(0,"div",0),o.Qc(1,p,1,7,"canvas",1),o.Yb()),2&t&&(o.Pc("height",a.calcBarChartHeight()),o.Gb(1),o.tc("ngIf",a.chartData&&a.chartData.length))},directives:[r.t,l.a],styles:[".charts-wrapper[_ngcontent-%COMP%]{position:relative;display:block;height:100%;margin:auto}"],changeDetection:0}),t})()}}]);
|
from conans import ConanFile, CMake, tools
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake", "cmake_find_package_multi"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
if not tools.cross_building(self.settings):
self.run(os.path.join("bin", "test_package"), run_environment=True)
|
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.one import tutorial001 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == [
[
"Hero:",
{
"name": "Tarantula",
"secret_name": "Natalia Roman-on",
"age": 32,
"id": 4,
},
]
]
|
const { ALLOWED_IMAGE_FORMATE } = require('../constents');
const hasValidImage = (url = '') => {
const extention = getImageExtention(url);
return ALLOWED_IMAGE_FORMATE.includes(extention.toLocaleLowerCase());
}
const getImageExtention = (url = '') => {
const fragments = url.split('.');
if (fragments.length) {
return fragments[fragments.length - 1];
}
return '';
}
module.exports = {
hasValidImage,
}
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
#
# Thread Harness Automation documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 29 15:18:24 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Thread Harness Automation'
copyright = u'2016, Yakun Xu'
author = u'Yakun Xu'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.5'
# The full version, including alpha/beta/rc tags.
release = u'0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'Thread Harness Automation v0.5'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ThreadHarnessAutomationdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ThreadHarnessAutomation.tex', u'Thread Harness Automation Documentation',
u'Yakun Xu', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'threadharnessautomation', u'Thread Harness Automation Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ThreadHarnessAutomation', u'Thread Harness Automation Documentation',
author, 'ThreadHarnessAutomation', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
|
/**
******************************************************************************
* @file stm32l0xx_hal_conf.h
* @author MCD Application Team
* @version V1.8.0
* @date 25-November-2016
* @brief HAL configuration template file.
* This file should be copied to the application folder and renamed
* to stm32l0xx_hal_conf.h.
******************************************************************************
* @attention
*
* <h2><center>© COPYRIGHT(c) 2016 STMicroelectronics</center></h2>
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of STMicroelectronics nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __STM32L0xx_HAL_CONF_H
#define __STM32L0xx_HAL_CONF_H
#ifdef __cplusplus
extern "C" {
#endif
/* Exported types ------------------------------------------------------------*/
/* Exported constants --------------------------------------------------------*/
/* ########################## Module Selection ############################## */
/**
* @brief This is the list of modules to be used in the HAL driver
*/
#define HAL_MODULE_ENABLED
#define HAL_ADC_MODULE_ENABLED
// #define HAL_COMP_MODULE_ENABLED
// #define HAL_CRC_MODULE_ENABLED
// #define HAL_CRYP_MODULE_ENABLED
#define HAL_DAC_MODULE_ENABLED
#define HAL_DMA_MODULE_ENABLED
// #define HAL_FIREWALL_MODULE_ENABLED
#define HAL_FLASH_MODULE_ENABLED
#define HAL_GPIO_MODULE_ENABLED
#define HAL_I2C_MODULE_ENABLED
// #define HAL_I2S_MODULE_ENABLED
// #define HAL_IWDG_MODULE_ENABLED
// #define HAL_LCD_MODULE_ENABLED
// #define HAL_LPTIM_MODULE_ENABLED
#define HAL_PWR_MODULE_ENABLED
#define HAL_RCC_MODULE_ENABLED
// #define HAL_RNG_MODULE_ENABLED
#define HAL_RTC_MODULE_ENABLED
#define HAL_SPI_MODULE_ENABLED
#define HAL_TIM_MODULE_ENABLED
// #define HAL_TSC_MODULE_ENABLED
// #define HAL_UART_MODULE_ENABLED
// #define HAL_USART_MODULE_ENABLED
// #define HAL_IRDA_MODULE_ENABLED
// #define HAL_SMARTCARD_MODULE_ENABLED
// #define HAL_SMBUS_MODULE_ENABLED
// #define HAL_WWDG_MODULE_ENABLED
#define HAL_CORTEX_MODULE_ENABLED
// #define HAL_PCD_MODULE_ENABLED
/* ########################## Oscillator Values adaptation ####################*/
/**
* @brief Adjust the value of External High Speed oscillator (HSE) used in your application.
* This value is used by the RCC HAL module to compute the system frequency
* (when HSE is used as system clock source, directly or through the PLL).
*/
#if !defined (HSE_VALUE)
#define HSE_VALUE ((uint32_t)8000000U) /*!< Value of the External oscillator in Hz */
#endif /* HSE_VALUE */
#if !defined (HSE_STARTUP_TIMEOUT)
#define HSE_STARTUP_TIMEOUT ((uint32_t)100U) /*!< Time out for HSE start up, in ms */
#endif /* HSE_STARTUP_TIMEOUT */
/**
* @brief Internal Multiple Speed oscillator (MSI) default value.
* This value is the default MSI range value after Reset.
*/
#if !defined (MSI_VALUE)
#define MSI_VALUE ((uint32_t)2000000U) /*!< Value of the Internal oscillator in Hz*/
#endif /* MSI_VALUE */
/**
* @brief Internal High Speed oscillator (HSI) value.
* This value is used by the RCC HAL module to compute the system frequency
* (when HSI is used as system clock source, directly or through the PLL).
*/
#if !defined (HSI_VALUE)
#define HSI_VALUE ((uint32_t)16000000U) /*!< Value of the Internal oscillator in Hz*/
#endif /* HSI_VALUE */
/**
* @brief Internal High Speed oscillator for USB (HSI48) value.
*/
#if !defined (HSI48_VALUE)
#define HSI48_VALUE ((uint32_t)48000000U) /*!< Value of the Internal High Speed oscillator for USB in Hz.
The real value may vary depending on the variations
in voltage and temperature. */
#endif /* HSI48_VALUE */
/**
* @brief Internal Low Speed oscillator (LSI) value.
*/
#if !defined (LSI_VALUE)
#define LSI_VALUE ((uint32_t)37000U) /*!< LSI Typical Value in Hz*/
#endif /* LSI_VALUE */ /*!< Value of the Internal Low Speed oscillator in Hz
The real value may vary depending on the variations
in voltage and temperature.*/
/**
* @brief External Low Speed oscillator (LSE) value.
* This value is used by the UART, RTC HAL module to compute the system frequency
*/
#if !defined (LSE_VALUE)
#define LSE_VALUE ((uint32_t)32768U) /*!< Value of the External oscillator in Hz*/
#endif /* LSE_VALUE */
/**
* @brief Time out for LSE start up value in ms.
*/
#if !defined (LSE_STARTUP_TIMEOUT)
#define LSE_STARTUP_TIMEOUT ((uint32_t)5000U) /*!< Time out for LSE start up, in ms */
#endif /* LSE_STARTUP_TIMEOUT */
/* Tip: To avoid modifying this file each time you need to use different HSE,
=== you can define the HSE value in your toolchain compiler preprocessor. */
/* ########################### System Configuration ######################### */
/**
* @brief This is the HAL system configuration section
*/
#define VDD_VALUE ((uint32_t)3300U) /*!< Value of VDD in mv */
#define TICK_INT_PRIORITY (((uint32_t)1U<<__NVIC_PRIO_BITS) - 1U) /*!< tick interrupt priority */
#define USE_RTOS 0U
#define PREFETCH_ENABLE 1U
#define PREREAD_ENABLE 0U
#define BUFFER_CACHE_DISABLE 0U
/* ########################## Assert Selection ############################## */
/**
* @brief Uncomment the line below to expanse the "assert_param" macro in the
* HAL drivers code
*/
/* #define USE_FULL_ASSERT 1 */
/* Includes ------------------------------------------------------------------*/
/**
* @brief Include module's header file
*/
#ifdef HAL_RCC_MODULE_ENABLED
#include "stm32l0xx_hal_rcc.h"
#endif /* HAL_RCC_MODULE_ENABLED */
#ifdef HAL_GPIO_MODULE_ENABLED
#include "stm32l0xx_hal_gpio.h"
#endif /* HAL_GPIO_MODULE_ENABLED */
#ifdef HAL_DMA_MODULE_ENABLED
#include "stm32l0xx_hal_dma.h"
#endif /* HAL_DMA_MODULE_ENABLED */
#ifdef HAL_CORTEX_MODULE_ENABLED
#include "stm32l0xx_hal_cortex.h"
#endif /* HAL_CORTEX_MODULE_ENABLED */
#ifdef HAL_ADC_MODULE_ENABLED
#include "stm32l0xx_hal_adc.h"
#endif /* HAL_ADC_MODULE_ENABLED */
#ifdef HAL_COMP_MODULE_ENABLED
#include "stm32l0xx_hal_comp.h"
#endif /* HAL_COMP_MODULE_ENABLED */
#ifdef HAL_CRC_MODULE_ENABLED
#include "stm32l0xx_hal_crc.h"
#endif /* HAL_CRC_MODULE_ENABLED */
#ifdef HAL_CRYP_MODULE_ENABLED
#include "stm32l0xx_hal_cryp.h"
#endif /* HAL_CRYP_MODULE_ENABLED */
#ifdef HAL_DAC_MODULE_ENABLED
#include "stm32l0xx_hal_dac.h"
#endif /* HAL_DAC_MODULE_ENABLED */
#ifdef HAL_FIREWALL_MODULE_ENABLED
#include "stm32l0xx_hal_firewall.h"
#endif /* HAL_FIREWALL_MODULE_ENABLED */
#ifdef HAL_FLASH_MODULE_ENABLED
#include "stm32l0xx_hal_flash.h"
#endif /* HAL_FLASH_MODULE_ENABLED */
#ifdef HAL_I2C_MODULE_ENABLED
#include "stm32l0xx_hal_i2c.h"
#endif /* HAL_I2C_MODULE_ENABLED */
#ifdef HAL_I2S_MODULE_ENABLED
#include "stm32l0xx_hal_i2s.h"
#endif /* HAL_I2S_MODULE_ENABLED */
#ifdef HAL_IWDG_MODULE_ENABLED
#include "stm32l0xx_hal_iwdg.h"
#endif /* HAL_IWDG_MODULE_ENABLED */
#ifdef HAL_LCD_MODULE_ENABLED
#include "stm32l0xx_hal_lcd.h"
#endif /* HAL_LCD_MODULE_ENABLED */
#ifdef HAL_LPTIM_MODULE_ENABLED
#include "stm32l0xx_hal_lptim.h"
#endif /* HAL_LPTIM_MODULE_ENABLED */
#ifdef HAL_PWR_MODULE_ENABLED
#include "stm32l0xx_hal_pwr.h"
#endif /* HAL_PWR_MODULE_ENABLED */
#ifdef HAL_RNG_MODULE_ENABLED
#include "stm32l0xx_hal_rng.h"
#endif /* HAL_RNG_MODULE_ENABLED */
#ifdef HAL_RTC_MODULE_ENABLED
#include "stm32l0xx_hal_rtc.h"
#endif /* HAL_RTC_MODULE_ENABLED */
#ifdef HAL_SPI_MODULE_ENABLED
#include "stm32l0xx_hal_spi.h"
#endif /* HAL_SPI_MODULE_ENABLED */
#ifdef HAL_TIM_MODULE_ENABLED
#include "stm32l0xx_hal_tim.h"
#endif /* HAL_TIM_MODULE_ENABLED */
#ifdef HAL_TSC_MODULE_ENABLED
#include "stm32l0xx_hal_tsc.h"
#endif /* HAL_TSC_MODULE_ENABLED */
#ifdef HAL_UART_MODULE_ENABLED
#include "stm32l0xx_hal_uart.h"
#endif /* HAL_UART_MODULE_ENABLED */
#ifdef HAL_USART_MODULE_ENABLED
#include "stm32l0xx_hal_usart.h"
#endif /* HAL_USART_MODULE_ENABLED */
#ifdef HAL_IRDA_MODULE_ENABLED
#include "stm32l0xx_hal_irda.h"
#endif /* HAL_IRDA_MODULE_ENABLED */
#ifdef HAL_SMARTCARD_MODULE_ENABLED
#include "stm32l0xx_hal_smartcard.h"
#endif /* HAL_SMARTCARD_MODULE_ENABLED */
#ifdef HAL_SMBUS_MODULE_ENABLED
#include "stm32l0xx_hal_smbus.h"
#endif /* HAL_SMBUS_MODULE_ENABLED */
#ifdef HAL_WWDG_MODULE_ENABLED
#include "stm32l0xx_hal_wwdg.h"
#endif /* HAL_WWDG_MODULE_ENABLED */
#ifdef HAL_PCD_MODULE_ENABLED
#include "stm32l0xx_hal_pcd.h"
#endif /* HAL_PCD_MODULE_ENABLED */
/* Exported macro ------------------------------------------------------------*/
#ifdef USE_FULL_ASSERT
/**
* @brief The assert_param macro is used for function's parameters check.
* @param expr: If expr is false, it calls assert_failed function
* which reports the name of the source file and the source
* line number of the call that failed.
* If expr is true, it returns no value.
* @retval None
*/
#define assert_param(expr) ((expr) ? (void)0U : assert_failed((uint8_t *)__FILE__, __LINE__))
/* Exported functions ------------------------------------------------------- */
void assert_failed(uint8_t* file, uint32_t line);
#else
#define assert_param(expr) ((void)0U)
#endif /* USE_FULL_ASSERT */
#ifdef __cplusplus
}
#endif
#endif /* __STM32L0xx_HAL_CONF_H */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Anaconda, Inc
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod, abstractproperty
from errno import EXDEV
from logging import getLogger
from os.path import basename, dirname, getsize, join
import re
from uuid import uuid4
from .envs_manager import USER_ENVIRONMENTS_TXT_FILE, register_env, unregister_env
from .portability import _PaddingError, update_prefix
from .prefix_data import PrefixData
from .. import CondaError
from .._vendor.auxlib.compat import with_metaclass
from .._vendor.auxlib.ish import dals
from .._vendor.toolz import concat
from ..base.constants import CONDA_TARBALL_EXTENSION
from ..base.context import context
from ..common.compat import iteritems, on_win, text_type
from ..common.path import (get_bin_directory_short_path, get_leaf_directories,
get_python_noarch_target_path, get_python_short_path,
parse_entry_point_def,
pyc_path, url_to_path, win_path_ok)
from ..common.url import has_platform, path_to_url, unquote
from ..exceptions import CondaUpgradeError, CondaVerificationError, PaddingError, SafetyError
from ..gateways.connection.download import download
from ..gateways.disk.create import (compile_multiple_pyc, copy,
create_hard_link_or_copy, create_link,
create_python_entry_point, extract_tarball,
make_menu, mkdir_p, write_as_json_to_file)
from ..gateways.disk.delete import rm_rf, try_rmdir_all_empty
from ..gateways.disk.permissions import make_writable
from ..gateways.disk.read import (compute_md5sum, compute_sha256sum, islink, lexists,
read_index_json)
from ..gateways.disk.update import backoff_rename, touch
from ..history import History
from ..models.channel import Channel
from ..models.enums import LinkType, NoarchType, PathType
from ..models.match_spec import MatchSpec
from ..models.records import (Link, PackageCacheRecord, PackageRecord, PathDataV1, PathsData,
PrefixRecord)
log = getLogger(__name__)
REPR_IGNORE_KWARGS = (
'transaction_context',
'package_info',
'hold_path',
)
@with_metaclass(ABCMeta)
class PathAction(object):
_verified = False
@abstractmethod
def verify(self):
# if verify fails, it should return an exception object rather than raise
# at the end of a verification run, all errors will be raised as a CondaMultiError
# after successful verification, the verify method should set self._verified = True
raise NotImplementedError()
@abstractmethod
def execute(self):
raise NotImplementedError()
@abstractmethod
def reverse(self):
raise NotImplementedError()
@abstractmethod
def cleanup(self):
raise NotImplementedError()
@abstractproperty
def target_full_path(self):
raise NotImplementedError()
@property
def verified(self):
return self._verified
def __repr__(self):
args = ('%s=%r' % (key, value) for key, value in iteritems(vars(self))
if key not in REPR_IGNORE_KWARGS)
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
@with_metaclass(ABCMeta)
class MultiPathAction(object):
_verified = False
@abstractmethod
def verify(self):
# if verify fails, it should return an exception object rather than raise
# at the end of a verification run, all errors will be raised as a CondaMultiError
# after successful verification, the verify method should set self._verified = True
raise NotImplementedError()
@abstractmethod
def execute(self):
raise NotImplementedError()
@abstractmethod
def reverse(self):
raise NotImplementedError()
@abstractmethod
def cleanup(self):
raise NotImplementedError()
@abstractproperty
def target_full_paths(self):
raise NotImplementedError()
@property
def verified(self):
return self._verified
def __repr__(self):
args = ('%s=%r' % (key, value) for key, value in iteritems(vars(self))
if key not in REPR_IGNORE_KWARGS)
return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
@with_metaclass(ABCMeta)
class PrefixPathAction(PathAction):
def __init__(self, transaction_context, target_prefix, target_short_path):
self.transaction_context = transaction_context
self.target_prefix = target_prefix
self.target_short_path = target_short_path
@property
def target_full_path(self):
trgt, shrt_pth = self.target_prefix, self.target_short_path
if trgt is not None and shrt_pth is not None:
return join(trgt, win_path_ok(shrt_pth))
else:
return None
# ######################################################
# Creation of Paths within a Prefix
# ######################################################
@with_metaclass(ABCMeta)
class CreateInPrefixPathAction(PrefixPathAction):
# All CreatePathAction subclasses must create a SINGLE new path
# the short/in-prefix version of that path must be returned by execute()
def __init__(self, transaction_context, package_info, source_prefix, source_short_path,
target_prefix, target_short_path):
super(CreateInPrefixPathAction, self).__init__(transaction_context,
target_prefix, target_short_path)
self.package_info = package_info
self.source_prefix = source_prefix
self.source_short_path = source_short_path
def verify(self):
self._verified = True
def cleanup(self):
# create actions typically won't need cleanup
pass
@property
def source_full_path(self):
prfx, shrt_pth = self.source_prefix, self.source_short_path
return join(prfx, win_path_ok(shrt_pth)) if prfx and shrt_pth else None
class LinkPathAction(CreateInPrefixPathAction):
@classmethod
def create_file_link_actions(cls, transaction_context, package_info, target_prefix,
requested_link_type):
def get_prefix_replace(source_path_data):
if source_path_data.path_type == PathType.softlink:
link_type = LinkType.copy
prefix_placehoder, file_mode = '', None
elif source_path_data.prefix_placeholder:
link_type = LinkType.copy
prefix_placehoder = source_path_data.prefix_placeholder
file_mode = source_path_data.file_mode
elif source_path_data.no_link:
link_type = LinkType.copy
prefix_placehoder, file_mode = '', None
else:
link_type = requested_link_type
prefix_placehoder, file_mode = '', None
return link_type, prefix_placehoder, file_mode
def make_file_link_action(source_path_data):
# TODO: this inner function is still kind of a mess
noarch = package_info.repodata_record.noarch
if noarch == NoarchType.python:
sp_dir = transaction_context['target_site_packages_short_path']
if sp_dir is None:
raise CondaError("Unable to determine python site-packages "
"dir in target_prefix!\nPlease make sure "
"python is installed in %s" % target_prefix)
target_short_path = get_python_noarch_target_path(source_path_data.path, sp_dir)
elif noarch is None or noarch == NoarchType.generic:
target_short_path = source_path_data.path
else:
raise CondaUpgradeError(dals("""
The current version of conda is too old to install this package.
Please update conda."""))
link_type, placeholder, fmode = get_prefix_replace(source_path_data)
if placeholder:
return PrefixReplaceLinkAction(transaction_context, package_info,
package_info.extracted_package_dir,
source_path_data.path,
target_prefix, target_short_path,
requested_link_type,
placeholder, fmode, source_path_data)
else:
return LinkPathAction(transaction_context, package_info,
package_info.extracted_package_dir, source_path_data.path,
target_prefix, target_short_path,
link_type, source_path_data)
return tuple(make_file_link_action(spi) for spi in package_info.paths_data.paths)
@classmethod
def create_directory_actions(cls, transaction_context, package_info, target_prefix,
requested_link_type, file_link_actions):
leaf_directories = get_leaf_directories(axn.target_short_path for axn in file_link_actions)
return tuple(
cls(transaction_context, package_info, None, None,
target_prefix, directory_short_path, LinkType.directory, None)
for directory_short_path in leaf_directories
)
@classmethod
def create_python_entry_point_windows_exe_action(cls, transaction_context, package_info,
target_prefix, requested_link_type,
entry_point_def):
source_directory = context.conda_prefix
source_short_path = 'Scripts/conda.exe'
command, _, _ = parse_entry_point_def(entry_point_def)
target_short_path = "Scripts/%s.exe" % command
source_path_data = PathDataV1(
_path=target_short_path,
path_type=PathType.windows_python_entry_point_exe,
)
return cls(transaction_context, package_info, source_directory,
source_short_path, target_prefix, target_short_path,
requested_link_type, source_path_data)
def __init__(self, transaction_context, package_info,
extracted_package_dir, source_short_path,
target_prefix, target_short_path, link_type, source_path_data):
super(LinkPathAction, self).__init__(transaction_context, package_info,
extracted_package_dir, source_short_path,
target_prefix, target_short_path)
self.link_type = link_type
self._execute_successful = False
self.source_path_data = source_path_data
self.prefix_path_data = None
def verify(self):
if self.link_type != LinkType.directory and not lexists(self.source_full_path): # pragma: no cover # NOQA
return CondaVerificationError(dals("""
The package for %s located at %s
appears to be corrupted. The path '%s'
specified in the package manifest cannot be found.
""" % (self.package_info.repodata_record.name,
self.package_info.extracted_package_dir,
self.source_short_path)))
source_path_data = self.source_path_data
try:
source_path_type = source_path_data.path_type
except AttributeError:
source_path_type = None
if source_path_type in PathType.basic_types:
# this let's us keep the non-generic path types like windows_python_entry_point_exe
source_path_type = None
if self.link_type == LinkType.directory:
self.prefix_path_data = None
elif self.link_type == LinkType.softlink:
self.prefix_path_data = PathDataV1.from_objects(
self.source_path_data,
path_type=source_path_type or PathType.softlink,
)
elif self.link_type == LinkType.copy and source_path_data.path_type == PathType.softlink:
self.prefix_path_data = PathDataV1.from_objects(
self.source_path_data,
path_type=source_path_type or PathType.softlink,
)
elif source_path_data.path_type == PathType.hardlink:
try:
reported_sha256 = source_path_data.sha256
except AttributeError:
reported_sha256 = None
source_sha256 = compute_sha256sum(self.source_full_path)
if reported_sha256 and reported_sha256 != source_sha256:
return SafetyError(dals("""
The package for %s located at %s
appears to be corrupted. The path '%s'
has a sha256 mismatch.
reported sha256: %s
actual sha256: %s
""" % (self.package_info.repodata_record.name,
self.package_info.extracted_package_dir,
self.source_short_path,
reported_sha256,
source_sha256,
)))
try:
reported_size_in_bytes = source_path_data.size_in_bytes
except AttributeError:
reported_size_in_bytes = None
if reported_size_in_bytes:
source_size_in_bytes = getsize(self.source_full_path)
if reported_size_in_bytes != source_size_in_bytes:
return SafetyError(dals("""
The package for %s located at %s
appears to be corrupted. The path '%s'
has an incorrect size.
reported size: %s bytes
actual size: %s bytes
""" % (self.package_info.repodata_record.name,
self.package_info.extracted_package_dir,
self.source_short_path,
reported_size_in_bytes,
source_size_in_bytes,
)))
self.prefix_path_data = PathDataV1.from_objects(
source_path_data,
sha256=reported_sha256,
sha256_in_prefix=reported_sha256,
path_type=source_path_type or PathType.hardlink,
)
elif source_path_data.path_type == PathType.windows_python_entry_point_exe:
self.prefix_path_data = source_path_data
else:
raise NotImplementedError()
self._verified = True
def execute(self):
log.trace("linking %s => %s", self.source_full_path, self.target_full_path)
create_link(self.source_full_path, self.target_full_path, self.link_type,
force=context.force)
self._execute_successful = True
def reverse(self):
if self._execute_successful:
log.trace("reversing link creation %s", self.target_prefix)
if self.link_type == LinkType.directory:
try_rmdir_all_empty(self.target_full_path)
else:
rm_rf(self.target_full_path)
class PrefixReplaceLinkAction(LinkPathAction):
def __init__(self, transaction_context, package_info,
extracted_package_dir, source_short_path,
target_prefix, target_short_path,
link_type,
prefix_placeholder, file_mode, source_path_data):
# This link_type used in execute(). Make sure we always respect LinkType.copy request.
link_type = LinkType.copy if link_type == LinkType.copy else LinkType.hardlink
super(PrefixReplaceLinkAction, self).__init__(transaction_context, package_info,
extracted_package_dir, source_short_path,
target_prefix, target_short_path,
link_type, source_path_data)
self.prefix_placeholder = prefix_placeholder
self.file_mode = file_mode
self.intermediate_path = None
def verify(self):
validation_error = super(PrefixReplaceLinkAction, self).verify()
if validation_error:
return validation_error
if islink(self.source_full_path):
log.trace("ignoring prefix update for symlink with source path %s",
self.source_full_path)
# return
assert False, "I don't think this is the right place to ignore this"
mkdir_p(self.transaction_context['temp_dir'])
self.intermediate_path = join(self.transaction_context['temp_dir'], text_type(uuid4()))
log.trace("copying %s => %s", self.source_full_path, self.intermediate_path)
create_link(self.source_full_path, self.intermediate_path, LinkType.copy)
make_writable(self.intermediate_path)
try:
log.trace("rewriting prefixes in %s", self.target_full_path)
update_prefix(self.intermediate_path,
context.target_prefix_override or self.target_prefix,
self.prefix_placeholder,
self.file_mode)
except _PaddingError:
raise PaddingError(self.target_full_path, self.prefix_placeholder,
len(self.prefix_placeholder))
sha256_in_prefix = compute_sha256sum(self.intermediate_path)
self.prefix_path_data = PathDataV1.from_objects(
self.prefix_path_data,
file_mode=self.file_mode,
path_type=PathType.hardlink,
prefix_placeholder=self.prefix_placeholder,
sha256_in_prefix=sha256_in_prefix,
)
self._verified = True
def execute(self):
if not self._verified:
self.verify()
source_path = self.intermediate_path or self.source_full_path
log.trace("linking %s => %s", source_path, self.target_full_path)
create_link(source_path, self.target_full_path, self.link_type)
self._execute_successful = True
class MakeMenuAction(CreateInPrefixPathAction):
@classmethod
def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type):
if on_win and context.shortcuts:
MENU_RE = re.compile(r'^menu/.*\.json$', re.IGNORECASE)
return tuple(cls(transaction_context, package_info, target_prefix, spi.path)
for spi in package_info.paths_data.paths if bool(MENU_RE.match(spi.path)))
else:
return ()
def __init__(self, transaction_context, package_info, target_prefix, target_short_path):
super(MakeMenuAction, self).__init__(transaction_context, package_info,
None, None, target_prefix, target_short_path)
self._execute_successful = False
def execute(self):
log.trace("making menu for %s", self.target_full_path)
make_menu(self.target_prefix, self.target_short_path, remove=False)
self._execute_successful = True
def reverse(self):
if self._execute_successful:
log.trace("removing menu for %s", self.target_full_path)
make_menu(self.target_prefix, self.target_short_path, remove=True)
class CreateNonadminAction(CreateInPrefixPathAction):
@classmethod
def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type):
if on_win and lexists(join(context.root_prefix, '.nonadmin')):
return cls(transaction_context, package_info, target_prefix),
else:
return ()
def __init__(self, transaction_context, package_info, target_prefix):
super(CreateNonadminAction, self).__init__(transaction_context, package_info, None, None,
target_prefix, '.nonadmin')
self._file_created = False
def execute(self):
log.trace("touching nonadmin %s", self.target_full_path)
self._file_created = touch(self.target_full_path)
def reverse(self):
if self._file_created:
log.trace("removing nonadmin file %s", self.target_full_path)
rm_rf(self.target_full_path)
class CompileMultiPycAction(MultiPathAction):
@classmethod
def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type,
file_link_actions):
noarch = package_info.package_metadata and package_info.package_metadata.noarch
if noarch is not None and noarch.type == NoarchType.python:
noarch_py_file_re = re.compile(r'^site-packages[/\\][^\t\n\r\f\v]+\.py$')
py_ver = transaction_context['target_python_version']
py_files = tuple((axn.target_short_path for axn in file_link_actions
if noarch_py_file_re.match(axn.source_short_path)))
pyc_files = tuple((pyc_path(pf, py_ver) for pf in py_files))
return (cls(transaction_context, package_info, target_prefix, py_files, pyc_files), )
else:
return ()
def __init__(self, transaction_context, package_info, target_prefix,
source_short_paths, target_short_paths):
self.transaction_context = transaction_context
self.package_info = package_info
self.target_prefix = target_prefix
self.source_short_paths = source_short_paths
self.target_short_paths = target_short_paths
self.prefix_path_data = None
self.prefix_paths_data = [
PathDataV1(_path=p, path_type=PathType.pyc_file,) for p in self.target_short_paths]
self._execute_successful = False
@property
def target_full_paths(self):
def join_or_none(prefix, short_path):
if prefix is None or short_path is None:
return None
else:
return join(prefix, win_path_ok(short_path))
return (join_or_none(self.target_prefix, p) for p in self.target_short_paths)
@property
def source_full_paths(self):
def join_or_none(prefix, short_path):
if prefix is None or short_path is None:
return None
else:
return join(prefix, win_path_ok(short_path))
return (join_or_none(self.target_prefix, p) for p in self.source_short_paths)
def verify(self):
self._verified = True
def cleanup(self):
# create actions typically won't need cleanup
pass
def execute(self):
# compile_pyc is sometimes expected to fail, for example a python 3.6 file
# installed into a python 2 environment, but no code paths actually importing it
# technically then, this file should be removed from the manifest in conda-meta, but
# at the time of this writing that's not currently happening
log.trace("compiling %s", ' '.join(self.target_full_paths))
target_python_version = self.transaction_context['target_python_version']
python_short_path = get_python_short_path(target_python_version)
python_full_path = join(self.target_prefix, win_path_ok(python_short_path))
compile_multiple_pyc(python_full_path, self.source_full_paths, self.target_full_paths,
self.target_prefix, self.transaction_context['target_python_version'])
self._execute_successful = True
def reverse(self):
# this removes all pyc files even if they were not created
if self._execute_successful:
log.trace("reversing pyc creation %s", ' '.join(self.target_full_paths))
for target_full_path in self.target_full_paths:
rm_rf(target_full_path)
class CreatePythonEntryPointAction(CreateInPrefixPathAction):
@classmethod
def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type):
noarch = package_info.package_metadata and package_info.package_metadata.noarch
if noarch is not None and noarch.type == NoarchType.python:
def this_triplet(entry_point_def):
command, module, func = parse_entry_point_def(entry_point_def)
target_short_path = "%s/%s" % (get_bin_directory_short_path(), command)
if on_win:
target_short_path += "-script.py"
return target_short_path, module, func
actions = tuple(cls(transaction_context, package_info, target_prefix,
*this_triplet(ep_def))
for ep_def in noarch.entry_points or ())
if on_win: # pragma: unix no cover
actions += tuple(
LinkPathAction.create_python_entry_point_windows_exe_action(
transaction_context, package_info, target_prefix,
requested_link_type, ep_def
) for ep_def in noarch.entry_points or ()
)
return actions
else:
return ()
def __init__(self, transaction_context, package_info, target_prefix, target_short_path,
module, func):
super(CreatePythonEntryPointAction, self).__init__(transaction_context, package_info,
None, None,
target_prefix, target_short_path)
self.module = module
self.func = func
if on_win:
path_type = PathType.windows_python_entry_point_script
else:
path_type = PathType.unix_python_entry_point
self.prefix_path_data = PathDataV1(
_path=self.target_short_path,
path_type=path_type,
)
self._execute_successful = False
def execute(self):
log.trace("creating python entry point %s", self.target_full_path)
if on_win:
python_full_path = None
else:
target_python_version = self.transaction_context['target_python_version']
python_short_path = get_python_short_path(target_python_version)
python_full_path = join(self.target_prefix, win_path_ok(python_short_path))
create_python_entry_point(self.target_full_path, python_full_path,
self.module, self.func)
self._execute_successful = True
def reverse(self):
if self._execute_successful:
log.trace("reversing python entry point creation %s", self.target_full_path)
rm_rf(self.target_full_path)
# class CreateApplicationEntryPointWindowsExeAction(LinkPathAction):
#
# @classmethod
# def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type, # NOQA
# exe_path):
# source_directory = context.conda_prefix
# source_short_path = 'Scripts/conda.exe'
# target_short_path = exe_path
# return cls(transaction_context, package_info, source_directory,
# source_short_path, target_prefix, target_short_path, requested_link_type)
#
# def __init__(self, transaction_context, package_info, source_prefix, source_short_path,
# target_prefix, target_short_path, requested_link_type):
# super(CreateApplicationEntryPointWindowsExeAction, self).__init__(
# transaction_context, package_info, source_prefix, source_short_path,
# target_prefix, target_short_path, requested_link_type,
# )
# self.leased_path_entry = LeasedPathEntry(
# _path=target_short_path,
# target_path=self.source_full_path,
# target_prefix=source_prefix,
# leased_path=self.target_full_path,
# package_name=package_info.index_json_record.name,
# leased_path_type=self.leased_path_type,
# )
#
# @property
# def leased_path_type(self):
# return LeasedPathType.application_entry_point_windows_exe
# class CreateApplicationEntryPointAction(CreateLeasedPathAction):
#
# @classmethod
# def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): # NOQA
# preferred_env = package_info.repodata_record.preferred_env
# if preferred_env_matches_prefix(preferred_env, target_prefix, context.root_prefix):
# exe_paths = (package_info.package_metadata
# and package_info.package_metadata.preferred_env
# and package_info.package_metadata.preferred_env.executable_paths
# or ())
#
# # target_prefix for the instantiated path action is the root prefix, not the same
# # as target_prefix for the larger transaction
# assert is_private_env_path(target_prefix)
# root_prefix = dirname(dirname(target_prefix))
#
# if on_win:
# def make_app_entry_point_axns(exe_path):
# assert exe_path.endswith(('.exe', '.bat'))
# target_short_path = exe_path[:-4] + "-script.py"
# yield cls(transaction_context, package_info, target_prefix, exe_path,
# root_prefix, target_short_path)
#
# yield CreateApplicationEntryPointWindowsExeAction.create_actions(
# transaction_context, package_info, root_prefix,
# LinkType.hardlink, exe_path[:-4] + ".exe"
# )
# return tuple(concat(make_app_entry_point_axns(executable_short_path)
# for executable_short_path in exe_paths))
#
# else:
# return tuple(
# cls(transaction_context, package_info, target_prefix, executable_short_path,
# root_prefix, executable_short_path)
# for executable_short_path in exe_paths
# )
# else:
# return ()
#
# def execute(self):
# log.trace("creating application entry point %s => %s",
# self.source_full_path, self.target_full_path)
# if self.source_prefix == context.conda_prefix:
# # this could blow up for the special case of application entry points in conda's
# # private environment
# # in that case, probably should use the python version from transaction_context
# conda_python_version = self.transaction_context['target_python_version']
# else:
# conda_python_version = get_python_version_for_prefix(context.conda_prefix)
# conda_python_short_path = get_python_short_path(conda_python_version)
# conda_python_full_path = join(context.conda_prefix, win_path_ok(conda_python_short_path))
# create_application_entry_point(self.source_full_path, self.target_full_path,
# conda_python_full_path)
# self._execute_successful = True
#
# @property
# def leased_path_type(self):
# return LeasedPathType.application_entry_point
#
#
# class CreateApplicationSoftlinkAction(CreateLeasedPathAction):
#
# @classmethod
# def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): # NOQA
# preferred_env = package_info.repodata_record.preferred_env
# if preferred_env_matches_prefix(preferred_env, target_prefix, context.root_prefix):
# softlink_paths = (package_info.package_metadata
# and package_info.package_metadata.preferred_env
# and package_info.package_metadata.preferred_env.softlink_paths
# or ())
#
# # target_prefix for the instantiated path action is the root prefix, not the same
# # as target_prefix for the larger transaction
# assert is_private_env_path(target_prefix)
# root_prefix = dirname(dirname(target_prefix))
# softlink_supported_test_file = join(target_prefix, PREFIX_MAGIC_FILE)
#
# def make_softlink_exe_axn(softlink_short_path):
# if not on_win: # pragma: win no cover
# root_short_path = softlink_short_path
# softlink_method = 'softlink'
# else: # pragma: unix no cover
# windows_pathext = os.getenv('PATHEXT', '').lower().split(';')
# path_root, path_ext = splitext(softlink_short_path)
#
# if softlink_supported(softlink_supported_test_file, root_prefix):
# root_short_path = softlink_short_path
# softlink_method = 'softlink'
# elif path_ext.lower() in windows_pathext:
# root_short_path = splitext(softlink_short_path)[0] + '.bat'
# softlink_method = 'fake_exe_softlink'
# else:
# root_short_path = softlink_short_path
# softlink_method = 'softlink_or_fail_ok'
#
# return cls(transaction_context, package_info, target_prefix, softlink_short_path,
# root_prefix, root_short_path, softlink_method)
#
# return tuple(make_softlink_exe_axn(softlink_short_path)
# for softlink_short_path in softlink_paths)
#
# else:
# return ()
#
# def __init__(self, transaction_context, package_info, source_prefix, source_short_path,
# target_prefix, target_short_path, softlink_method):
# super(CreateApplicationSoftlinkAction, self).__init__(transaction_context, package_info,
# source_prefix, source_short_path,
# target_prefix, target_short_path)
# self.softlink_method = softlink_method
#
# def execute(self):
# log.trace("creating application softlink via %s %s => %s",
# self.softlink_method, self.source_full_path, self.target_full_path)
# getattr(self, self.softlink_method)()
# self._execute_successful = True
#
# def softlink(self):
# symlink(self.source_full_path, self.target_full_path)
# assert islink(self.target_full_path)
#
# def fake_exe_softlink(self): # pragma: unix no cover
# create_fake_executable_softlink(self.source_full_path, self.target_full_path)
#
# def softlink_or_fail_ok(self): # pragma: unix no cover
# try:
# symlink(self.source_full_path, self.target_full_path)
# except (IOError, OSError) as e:
# log.trace('%r', e)
#
# @property
# def leased_path_type(self):
# return LeasedPathType.application_softlink
class CreatePrefixRecordAction(CreateInPrefixPathAction):
# this is the action that creates a packages json file in the conda-meta/ directory
@classmethod
def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type,
requested_spec, all_link_path_actions):
extracted_package_dir = package_info.extracted_package_dir
target_short_path = 'conda-meta/%s.json' % basename(extracted_package_dir)
return cls(transaction_context, package_info, target_prefix, target_short_path,
requested_link_type, requested_spec, all_link_path_actions),
def __init__(self, transaction_context, package_info, target_prefix, target_short_path,
requested_link_type, requested_spec, all_link_path_actions):
super(CreatePrefixRecordAction, self).__init__(transaction_context, package_info,
None, None, target_prefix,
target_short_path)
self.requested_link_type = requested_link_type
self.requested_spec = requested_spec
self.all_link_path_actions = all_link_path_actions
self._execute_successful = False
def execute(self):
link = Link(
source=self.package_info.extracted_package_dir,
type=self.requested_link_type,
)
extracted_package_dir = self.package_info.extracted_package_dir
package_tarball_full_path = extracted_package_dir + CONDA_TARBALL_EXTENSION
# TODO: don't make above assumption; put package_tarball_full_path in package_info
def files_from_action(link_path_action):
if isinstance(link_path_action, CompileMultiPycAction):
return link_path_action.target_short_paths
else:
return (link_path_action.target_short_path, )
def paths_from_action(link_path_action):
if isinstance(link_path_action, CompileMultiPycAction):
return link_path_action.prefix_paths_data
else:
if link_path_action.prefix_path_data is None:
return ()
else:
return (link_path_action.prefix_path_data, )
files = concat((files_from_action(x) for x in self.all_link_path_actions if x))
paths_data = PathsData(
paths_version=1,
paths=concat((paths_from_action(x) for x in self.all_link_path_actions if x)),
)
self.prefix_record = PrefixRecord.from_objects(
self.package_info.repodata_record,
# self.package_info.index_json_record,
self.package_info.package_metadata,
requested_spec=text_type(self.requested_spec),
paths_data=paths_data,
files=files,
link=link,
url=self.package_info.url,
extracted_package_dir=extracted_package_dir,
package_tarball_full_path=package_tarball_full_path,
)
log.trace("creating linked package record %s", self.target_full_path)
PrefixData(self.target_prefix).insert(self.prefix_record)
self._execute_successful = True
def reverse(self):
log.trace("reversing linked package record creation %s", self.target_full_path)
if self._execute_successful:
PrefixData(self.target_prefix).remove(self.package_info.repodata_record.name)
class UpdateHistoryAction(CreateInPrefixPathAction):
@classmethod
def create_actions(cls, transaction_context, target_prefix, remove_specs, update_specs):
target_short_path = join('conda-meta', 'history')
return cls(transaction_context, target_prefix, target_short_path,
remove_specs, update_specs),
def __init__(self, transaction_context, target_prefix, target_short_path, remove_specs,
update_specs):
super(UpdateHistoryAction, self).__init__(transaction_context, None, None, None,
target_prefix, target_short_path)
self.remove_specs = remove_specs
self.update_specs = update_specs
self.hold_path = self.target_full_path + '.c~'
def execute(self):
log.trace("updating environment history %s", self.target_full_path)
if lexists(self.target_full_path):
copy(self.target_full_path, self.hold_path)
h = History(self.target_prefix)
h.update()
h.write_specs(self.remove_specs, self.update_specs)
def reverse(self):
if lexists(self.hold_path):
log.trace("moving %s => %s", self.hold_path, self.target_full_path)
backoff_rename(self.hold_path, self.target_full_path, force=True)
def cleanup(self):
rm_rf(self.hold_path)
class RegisterEnvironmentLocationAction(PathAction):
def __init__(self, transaction_context, target_prefix):
self.transaction_context = transaction_context
self.target_prefix = target_prefix
self._execute_successful = False
def verify(self):
touch(USER_ENVIRONMENTS_TXT_FILE, mkdir=True, sudo_safe=True)
self._verified = True
def execute(self):
log.trace("registering environment in catalog %s", self.target_prefix)
register_env(self.target_prefix)
self._execute_successful = True
def reverse(self):
pass
def cleanup(self):
pass
@property
def target_full_path(self):
raise NotImplementedError()
# ######################################################
# Removal of Paths within a Prefix
# ######################################################
@with_metaclass(ABCMeta)
class RemoveFromPrefixPathAction(PrefixPathAction):
def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path):
super(RemoveFromPrefixPathAction, self).__init__(transaction_context,
target_prefix, target_short_path)
self.linked_package_data = linked_package_data
def verify(self):
# inability to remove will trigger a rollback
# can't definitely know if path can be removed until it's attempted and failed
self._verified = True
class UnlinkPathAction(RemoveFromPrefixPathAction):
def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path,
link_type=LinkType.hardlink):
super(UnlinkPathAction, self).__init__(transaction_context, linked_package_data,
target_prefix, target_short_path)
conda_temp_extension = '.c~'
self.holding_short_path = self.target_short_path + conda_temp_extension
self.holding_full_path = self.target_full_path + conda_temp_extension
self.link_type = link_type
def execute(self):
if self.link_type != LinkType.directory:
log.trace("renaming %s => %s", self.target_short_path, self.holding_short_path)
backoff_rename(self.target_full_path, self.holding_full_path, force=True)
def reverse(self):
if self.link_type != LinkType.directory and lexists(self.holding_full_path):
log.trace("reversing rename %s => %s", self.holding_short_path, self.target_short_path)
backoff_rename(self.holding_full_path, self.target_full_path, force=True)
def cleanup(self):
if self.link_type == LinkType.directory:
try_rmdir_all_empty(self.target_full_path)
else:
rm_rf(self.holding_full_path)
class RemoveMenuAction(RemoveFromPrefixPathAction):
@classmethod
def create_actions(cls, transaction_context, linked_package_data, target_prefix):
if on_win:
MENU_RE = re.compile(r'^menu/.*\.json$', re.IGNORECASE)
return tuple(cls(transaction_context, linked_package_data, target_prefix, trgt)
for trgt in linked_package_data.files if bool(MENU_RE.match(trgt)))
else:
return ()
def __init__(self, transaction_context, linked_package_data,
target_prefix, target_short_path):
super(RemoveMenuAction, self).__init__(transaction_context, linked_package_data,
target_prefix, target_short_path)
def execute(self):
log.trace("removing menu for %s ", self.target_prefix)
make_menu(self.target_prefix, self.target_short_path, remove=True)
def reverse(self):
log.trace("re-creating menu for %s ", self.target_prefix)
make_menu(self.target_prefix, self.target_short_path, remove=False)
def cleanup(self):
pass
class RemoveLinkedPackageRecordAction(UnlinkPathAction):
def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path):
super(RemoveLinkedPackageRecordAction, self).__init__(transaction_context,
linked_package_data,
target_prefix, target_short_path)
def execute(self):
super(RemoveLinkedPackageRecordAction, self).execute()
PrefixData(self.target_prefix).remove(self.linked_package_data.name)
def reverse(self):
super(RemoveLinkedPackageRecordAction, self).reverse()
PrefixData(self.target_prefix)._load_single_record(self.target_full_path)
class UnregisterEnvironmentLocationAction(PathAction):
def __init__(self, transaction_context, target_prefix):
self.transaction_context = transaction_context
self.target_prefix = target_prefix
self._execute_successful = False
def verify(self):
self._verified = True
def execute(self):
log.trace("unregistering environment in catalog %s", self.target_prefix)
unregister_env(self.target_prefix)
self._execute_successful = True
def reverse(self):
pass
def cleanup(self):
pass
@property
def target_full_path(self):
raise NotImplementedError()
# ######################################################
# Fetch / Extract Actions
# ######################################################
class CacheUrlAction(PathAction):
def __init__(self, url, target_pkgs_dir, target_package_basename,
md5sum=None, expected_size_in_bytes=None):
self.url = url
self.target_pkgs_dir = target_pkgs_dir
self.target_package_basename = target_package_basename
self.md5sum = md5sum
self.expected_size_in_bytes = expected_size_in_bytes
self.hold_path = self.target_full_path + '.c~'
def verify(self):
assert '::' not in self.url
self._verified = True
def execute(self, progress_update_callback=None):
# I hate inline imports, but I guess it's ok since we're importing from the conda.core
# The alternative is passing the PackageCache class to CacheUrlAction __init__
from .package_cache_data import PackageCacheData
target_package_cache = PackageCacheData(self.target_pkgs_dir)
log.trace("caching url %s => %s", self.url, self.target_full_path)
if lexists(self.hold_path):
rm_rf(self.hold_path)
if lexists(self.target_full_path):
if self.url.startswith('file:/') and self.url == path_to_url(self.target_full_path):
# the source and destination are the same file, so we're done
return
else:
backoff_rename(self.target_full_path, self.hold_path, force=True)
if self.url.startswith('file:/'):
source_path = unquote(url_to_path(self.url))
if dirname(source_path) in context.pkgs_dirs:
# if url points to another package cache, link to the writable cache
create_hard_link_or_copy(source_path, self.target_full_path)
source_package_cache = PackageCacheData(dirname(source_path))
# the package is already in a cache, so it came from a remote url somewhere;
# make sure that remote url is the most recent url in the
# writable cache urls.txt
origin_url = source_package_cache._urls_data.get_url(self.target_package_basename)
if origin_url and has_platform(origin_url, context.known_subdirs):
target_package_cache._urls_data.add_url(origin_url)
else:
# so our tarball source isn't a package cache, but that doesn't mean it's not
# in another package cache somewhere
# let's try to find the actual, remote source url by matching md5sums, and then
# record that url as the remote source url in urls.txt
# we do the search part of this operation before the create_link so that we
# don't md5sum-match the file created by 'create_link'
# there is no point in looking for the tarball in the cache that we are writing
# this file into because we have already removed the previous file if there was
# any. This also makes sure that we ignore the md5sum of a possible extracted
# directory that might exist in this cache because we are going to overwrite it
# anyway when we extract the tarball.
source_md5sum = compute_md5sum(source_path)
exclude_caches = self.target_pkgs_dir,
pc_entry = PackageCacheData.tarball_file_in_cache(source_path, source_md5sum,
exclude_caches=exclude_caches)
if pc_entry:
origin_url = target_package_cache._urls_data.get_url(
pc_entry.extracted_package_dir
)
else:
origin_url = None
# copy the tarball to the writable cache
create_link(source_path, self.target_full_path, link_type=LinkType.copy,
force=context.force)
if origin_url and has_platform(origin_url, context.known_subdirs):
target_package_cache._urls_data.add_url(origin_url)
else:
target_package_cache._urls_data.add_url(self.url)
else:
download(self.url, self.target_full_path, self.md5sum,
progress_update_callback=progress_update_callback)
target_package_cache._urls_data.add_url(self.url)
def reverse(self):
if lexists(self.hold_path):
log.trace("moving %s => %s", self.hold_path, self.target_full_path)
backoff_rename(self.hold_path, self.target_full_path, force=True)
def cleanup(self):
rm_rf(self.hold_path)
@property
def target_full_path(self):
return join(self.target_pkgs_dir, self.target_package_basename)
def __str__(self):
return 'CacheUrlAction<url=%r, target_full_path=%r>' % (self.url, self.target_full_path)
class ExtractPackageAction(PathAction):
def __init__(self, source_full_path, target_pkgs_dir, target_extracted_dirname,
record_or_spec, md5sum):
self.source_full_path = source_full_path
self.target_pkgs_dir = target_pkgs_dir
self.target_extracted_dirname = target_extracted_dirname
self.hold_path = self.target_full_path + '.c~'
self.record_or_spec = record_or_spec
self.md5sum = md5sum
def verify(self):
self._verified = True
def execute(self, progress_update_callback=None):
# I hate inline imports, but I guess it's ok since we're importing from the conda.core
# The alternative is passing the the classes to ExtractPackageAction __init__
from .package_cache_data import PackageCacheData
log.trace("extracting %s => %s", self.source_full_path, self.target_full_path)
if lexists(self.hold_path):
rm_rf(self.hold_path)
if lexists(self.target_full_path):
try:
backoff_rename(self.target_full_path, self.hold_path)
except (IOError, OSError) as e:
if e.errno == EXDEV:
# OSError(18, 'Invalid cross-device link')
# https://github.com/docker/docker/issues/25409
# ignore, but we won't be able to roll back
log.debug("Invalid cross-device link on rename %s => %s",
self.target_full_path, self.hold_path)
rm_rf(self.target_full_path)
else:
raise
extract_tarball(self.source_full_path, self.target_full_path,
progress_update_callback=progress_update_callback)
raw_index_json = read_index_json(self.target_full_path)
if isinstance(self.record_or_spec, MatchSpec):
url = self.record_or_spec.get_raw_value('url')
assert url
channel = Channel(url) if has_platform(url, context.known_subdirs) else Channel(None)
fn = basename(url)
md5 = self.md5sum or compute_md5sum(self.source_full_path)
repodata_record = PackageRecord.from_objects(raw_index_json, url=url,
channel=channel, fn=fn, md5=md5)
else:
repodata_record = PackageRecord.from_objects(self.record_or_spec, raw_index_json)
repodata_record_path = join(self.target_full_path, 'info', 'repodata_record.json')
write_as_json_to_file(repodata_record_path, repodata_record)
target_package_cache = PackageCacheData(self.target_pkgs_dir)
package_cache_record = PackageCacheRecord.from_objects(
repodata_record,
package_tarball_full_path=self.source_full_path,
extracted_package_dir=self.target_full_path,
)
target_package_cache.insert(package_cache_record)
# dist = Dist(recorded_url) if recorded_url else Dist(path_to_url(self.source_full_path))
# package_cache_entry = PackageCacheRecord.make_legacy(self.target_pkgs_dir, dist)
# target_package_cache[package_cache_entry.dist] = package_cache_entry
def reverse(self):
rm_rf(self.target_full_path)
if lexists(self.hold_path):
log.trace("moving %s => %s", self.hold_path, self.target_full_path)
rm_rf(self.target_full_path)
backoff_rename(self.hold_path, self.target_full_path)
def cleanup(self):
rm_rf(self.hold_path)
@property
def target_full_path(self):
return join(self.target_pkgs_dir, self.target_extracted_dirname)
def __str__(self):
return ('ExtractPackageAction<source_full_path=%r, target_full_path=%r>'
% (self.source_full_path, self.target_full_path))
|
/*! For license information please see 111.75c17ec3.chunk.js.LICENSE.txt */
(this["webpackJsonpmy-app"]=this["webpackJsonpmy-app"]||[]).push([[111],{475:function(e,a,n){"use strict";var r,i;n.r(a),n.d(a,"S",(function(){return r})),(i=r||(r={})).green="checkCircle",i.yellow="exclamationMarkTriangle",i.red="exclamationMarkTriangle",i.blue="lightbulb"}}]);
//# sourceMappingURL=111.75c17ec3.chunk.js.map
|
import operator
from typing import Any, Dict
import sqlalchemy as sa
import sqlalchemy.sql as sql
import ibis
import ibis.common.exceptions as com
import ibis.expr.analysis as L
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.expr.types as ir
import ibis.expr.window as W
from .database import AlchemyTable
from .geospatial import geospatial_supported
def variance_reduction(func_name):
suffix = {'sample': 'samp', 'pop': 'pop'}
def variance_compiler(t, expr):
arg, how, where = expr.op().args
if arg.type().equals(dt.boolean):
arg = arg.cast('int32')
func = getattr(
sa.func, '{}_{}'.format(func_name, suffix.get(how, 'samp'))
)
if where is not None:
arg = where.ifelse(arg, None)
return func(t.translate(arg))
return variance_compiler
def infix_op(infix_sym):
def formatter(t, expr):
op = expr.op()
left, right = op.args
left_arg = t.translate(left)
right_arg = t.translate(right)
return left_arg.op(infix_sym)(right_arg)
return formatter
def fixed_arity(sa_func, arity):
if isinstance(sa_func, str):
sa_func = getattr(sa.func, sa_func)
def formatter(t, expr):
if arity != len(expr.op().args):
raise com.IbisError('incorrect number of args')
return _varargs_call(sa_func, t, expr)
return formatter
def varargs(sa_func):
def formatter(t, expr):
op = expr.op()
trans_args = [t.translate(arg) for arg in op.arg]
return sa_func(*trans_args)
return formatter
def _varargs_call(sa_func, t, expr):
op = expr.op()
trans_args = [t.translate(arg) for arg in op.args]
return sa_func(*trans_args)
def get_sqla_table(ctx, table):
if ctx.has_ref(table, parent_contexts=True):
ctx_level = ctx
sa_table = ctx_level.get_ref(table)
while sa_table is None and ctx_level.parent is not ctx_level:
ctx_level = ctx_level.parent
sa_table = ctx_level.get_ref(table)
else:
op = table.op()
if isinstance(op, AlchemyTable):
sa_table = op.sqla_table
else:
sa_table = ctx.get_compiled_expr(table)
return sa_table
def _table_column(t, expr):
op = expr.op()
ctx = t.context
table = op.table
sa_table = get_sqla_table(ctx, table)
out_expr = getattr(sa_table.c, op.name)
# If the column does not originate from the table set in the current SELECT
# context, we should format as a subquery
if t.permit_subquery and ctx.is_foreign_expr(table):
return sa.select([out_expr])
return out_expr
def _table_array_view(t, expr):
ctx = t.context
table = ctx.get_compiled_expr(expr.op().table)
return table
def _exists_subquery(t, expr):
from .query_builder import AlchemyCompiler
op = expr.op()
ctx = t.context
filtered = op.foreign_table.filter(op.predicates).projection(
[ir.literal(1).name(ir.unnamed)]
)
sub_ctx = ctx.subcontext()
clause = AlchemyCompiler.to_sql(filtered, sub_ctx, exists=True)
if isinstance(op, ops.NotExistsSubquery):
clause = sa.not_(clause)
return clause
def _cast(t, expr):
op = expr.op()
arg, target_type = op.args
sa_arg = t.translate(arg)
sa_type = t.get_sqla_type(target_type)
if isinstance(arg, ir.CategoryValue) and target_type == 'int32':
return sa_arg
else:
return sa.cast(sa_arg, sa_type)
def _contains(t, expr):
op = expr.op()
left, right = (t.translate(arg) for arg in op.args)
return left.in_(right)
def _not_contains(t, expr):
return sa.not_(_contains(t, expr))
def reduction(sa_func):
def formatter(t, expr):
op = expr.op()
if op.where is not None:
arg = t.translate(op.where.ifelse(op.arg, ibis.NA))
else:
arg = t.translate(op.arg)
return sa_func(arg)
return formatter
def _group_concat(t, expr):
op = expr.op()
sep = t.translate(op.sep)
if op.where is not None:
arg = t.translate(op.where.ifelse(op.arg, ibis.NA))
else:
arg = t.translate(op.arg)
return sa.func.group_concat(arg, sep)
def _literal(t, expr):
dtype = expr.type()
value = expr.op().value
if isinstance(dtype, dt.Set):
return list(map(sa.literal, value))
return sa.literal(value)
def _value_list(t, expr):
return [t.translate(x) for x in expr.op().values]
def _is_null(t, expr):
arg = t.translate(expr.op().args[0])
return arg.is_(sa.null())
def _not_null(t, expr):
arg = t.translate(expr.op().args[0])
return arg.isnot(sa.null())
def _round(t, expr):
op = expr.op()
arg, digits = op.args
sa_arg = t.translate(arg)
f = sa.func.round
if digits is not None:
sa_digits = t.translate(digits)
return f(sa_arg, sa_digits)
else:
return f(sa_arg)
def _floor_divide(t, expr):
left, right = map(t.translate, expr.op().args)
return sa.func.floor(left / right)
def _count_distinct(t, expr):
arg, where = expr.op().args
if where is not None:
sa_arg = t.translate(where.ifelse(arg, None))
else:
sa_arg = t.translate(arg)
return sa.func.count(sa_arg.distinct())
def _simple_case(t, expr):
op = expr.op()
cases = [op.base == case for case in op.cases]
return _translate_case(t, cases, op.results, op.default)
def _searched_case(t, expr):
op = expr.op()
return _translate_case(t, op.cases, op.results, op.default)
def _translate_case(t, cases, results, default):
case_args = [t.translate(arg) for arg in cases]
result_args = [t.translate(arg) for arg in results]
whens = zip(case_args, result_args)
default = t.translate(default)
return sa.case(list(whens), else_=default)
def _negate(t, expr):
op = expr.op()
(arg,) = map(t.translate, op.args)
return sa.not_(arg) if isinstance(expr, ir.BooleanValue) else -arg
def unary(sa_func):
return fixed_arity(sa_func, 1)
def _string_like(t, expr):
arg, pattern, escape = expr.op().args
result = t.translate(arg).like(t.translate(pattern), escape=escape)
return result
def _startswith(t, expr):
arg, start = expr.op().args
return t.translate(arg).startswith(t.translate(start))
def _endswith(t, expr):
arg, start = expr.op().args
return t.translate(arg).endswith(t.translate(start))
_cumulative_to_reduction = {
ops.CumulativeSum: ops.Sum,
ops.CumulativeMin: ops.Min,
ops.CumulativeMax: ops.Max,
ops.CumulativeMean: ops.Mean,
ops.CumulativeAny: ops.Any,
ops.CumulativeAll: ops.All,
}
def _cumulative_to_window(translator, expr, window):
win = W.cumulative_window()
win = win.group_by(window._group_by).order_by(window._order_by)
op = expr.op()
klass = _cumulative_to_reduction[type(op)]
new_op = klass(*op.args)
new_expr = expr._factory(new_op, name=expr._name)
if type(new_op) in translator._rewrites:
new_expr = translator._rewrites[type(new_op)](new_expr)
return L.windowize_function(new_expr, win)
def _window(t, expr):
op = expr.op()
arg, window = op.args
reduction = t.translate(arg)
window_op = arg.op()
_require_order_by = (
ops.DenseRank,
ops.MinRank,
ops.NTile,
ops.PercentRank,
)
if isinstance(window_op, ops.CumulativeOp):
arg = _cumulative_to_window(t, arg, window)
return t.translate(arg)
if window.max_lookback is not None:
raise NotImplementedError(
'Rows with max lookback is not implemented '
'for SQLAlchemy-based backends.'
)
# Some analytic functions need to have the expression of interest in
# the ORDER BY part of the window clause
if isinstance(window_op, _require_order_by) and not window._order_by:
order_by = t.translate(window_op.args[0])
else:
order_by = list(map(t.translate, window._order_by))
partition_by = list(map(t.translate, window._group_by))
frame_clause_not_allowed = (
ops.Lag,
ops.Lead,
ops.DenseRank,
ops.MinRank,
ops.NTile,
ops.PercentRank,
ops.RowNumber,
)
how = {'range': 'range_'}.get(window.how, window.how)
preceding = window.preceding
additional_params = (
{}
if isinstance(window_op, frame_clause_not_allowed)
else {
how: (
-preceding if preceding is not None else preceding,
window.following,
)
}
)
result = reduction.over(
partition_by=partition_by, order_by=order_by, **additional_params
)
if isinstance(
window_op, (ops.RowNumber, ops.DenseRank, ops.MinRank, ops.NTile)
):
return result - 1
else:
return result
def _lag(t, expr):
arg, offset, default = expr.op().args
if default is not None:
raise NotImplementedError()
sa_arg = t.translate(arg)
sa_offset = t.translate(offset) if offset is not None else 1
return sa.func.lag(sa_arg, sa_offset)
def _lead(t, expr):
arg, offset, default = expr.op().args
if default is not None:
raise NotImplementedError()
sa_arg = t.translate(arg)
sa_offset = t.translate(offset) if offset is not None else 1
return sa.func.lead(sa_arg, sa_offset)
def _ntile(t, expr):
op = expr.op()
args = op.args
arg, buckets = map(t.translate, args)
return sa.func.ntile(buckets)
def _sort_key(t, expr):
# We need to define this for window functions that have an order by
by, ascending = expr.op().args
sort_direction = sa.asc if ascending else sa.desc
return sort_direction(t.translate(by))
sqlalchemy_operation_registry: Dict[Any, Any] = {
ops.And: fixed_arity(sql.and_, 2),
ops.Or: fixed_arity(sql.or_, 2),
ops.Not: unary(sa.not_),
ops.Abs: unary(sa.func.abs),
ops.Cast: _cast,
ops.Coalesce: varargs(sa.func.coalesce),
ops.NullIf: fixed_arity(sa.func.nullif, 2),
ops.Contains: _contains,
ops.NotContains: _not_contains,
ops.Count: reduction(sa.func.count),
ops.Sum: reduction(sa.func.sum),
ops.Mean: reduction(sa.func.avg),
ops.Min: reduction(sa.func.min),
ops.Max: reduction(sa.func.max),
ops.CountDistinct: _count_distinct,
ops.GroupConcat: _group_concat,
ops.Between: fixed_arity(sa.between, 3),
ops.IsNull: _is_null,
ops.NotNull: _not_null,
ops.Negate: _negate,
ops.Round: _round,
ops.TypeOf: unary(sa.func.typeof),
ops.Literal: _literal,
ops.ValueList: _value_list,
ops.NullLiteral: lambda *args: sa.null(),
ops.SimpleCase: _simple_case,
ops.SearchedCase: _searched_case,
ops.TableColumn: _table_column,
ops.TableArrayView: _table_array_view,
ops.ExistsSubquery: _exists_subquery,
ops.NotExistsSubquery: _exists_subquery,
# miscellaneous varargs
ops.Least: varargs(sa.func.least),
ops.Greatest: varargs(sa.func.greatest),
# string
ops.LPad: fixed_arity(sa.func.lpad, 3),
ops.RPad: fixed_arity(sa.func.rpad, 3),
ops.Strip: unary(sa.func.trim),
ops.LStrip: unary(sa.func.ltrim),
ops.RStrip: unary(sa.func.rtrim),
ops.Repeat: fixed_arity(sa.func.repeat, 2),
ops.Reverse: unary(sa.func.reverse),
ops.StrRight: fixed_arity(sa.func.right, 2),
ops.Lowercase: unary(sa.func.lower),
ops.Uppercase: unary(sa.func.upper),
ops.StringAscii: unary(sa.func.ascii),
ops.StringLength: unary(sa.func.length),
ops.StringReplace: fixed_arity(sa.func.replace, 3),
ops.StringSQLLike: _string_like,
ops.StartsWith: _startswith,
ops.EndsWith: _endswith,
# math
ops.Ln: unary(sa.func.ln),
ops.Exp: unary(sa.func.exp),
ops.Sign: unary(sa.func.sign),
ops.Sqrt: unary(sa.func.sqrt),
ops.Ceil: unary(sa.func.ceil),
ops.Floor: unary(sa.func.floor),
ops.Power: fixed_arity(sa.func.pow, 2),
ops.FloorDivide: _floor_divide,
# other
ops.SortKey: _sort_key,
}
# TODO: unit tests for each of these
_binary_ops = {
# Binary arithmetic
ops.Add: operator.add,
ops.Subtract: operator.sub,
ops.Multiply: operator.mul,
# XXX `ops.Divide` is overwritten in `translator.py` with a custom
# function `_true_divide`, but for some reason both are required
ops.Divide: operator.truediv,
ops.Modulus: operator.mod,
# Comparisons
ops.Equals: operator.eq,
ops.NotEquals: operator.ne,
ops.Less: operator.lt,
ops.LessEqual: operator.le,
ops.Greater: operator.gt,
ops.GreaterEqual: operator.ge,
ops.IdenticalTo: lambda x, y: x.op('IS NOT DISTINCT FROM')(y),
# Boolean comparisons
# TODO
}
sqlalchemy_window_functions_registry = {
ops.Lag: _lag,
ops.Lead: _lead,
ops.NTile: _ntile,
ops.FirstValue: unary(sa.func.first_value),
ops.LastValue: unary(sa.func.last_value),
ops.RowNumber: fixed_arity(lambda: sa.func.row_number(), 0),
ops.DenseRank: unary(lambda arg: sa.func.dense_rank()),
ops.MinRank: unary(lambda arg: sa.func.rank()),
ops.PercentRank: unary(lambda arg: sa.func.percent_rank()),
ops.WindowOp: _window,
ops.CumulativeOp: _window,
ops.CumulativeMax: unary(sa.func.max),
ops.CumulativeMin: unary(sa.func.min),
ops.CumulativeSum: unary(sa.func.sum),
ops.CumulativeMean: unary(sa.func.avg),
}
if geospatial_supported:
_geospatial_functions = {
ops.GeoArea: unary(sa.func.ST_Area),
ops.GeoAsBinary: unary(sa.func.ST_AsBinary),
ops.GeoAsEWKB: unary(sa.func.ST_AsEWKB),
ops.GeoAsEWKT: unary(sa.func.ST_AsEWKT),
ops.GeoAsText: unary(sa.func.ST_AsText),
ops.GeoAzimuth: fixed_arity(sa.func.ST_Azimuth, 2),
ops.GeoBuffer: fixed_arity(sa.func.ST_Buffer, 2),
ops.GeoCentroid: unary(sa.func.ST_Centroid),
ops.GeoContains: fixed_arity(sa.func.ST_Contains, 2),
ops.GeoContainsProperly: fixed_arity(sa.func.ST_Contains, 2),
ops.GeoCovers: fixed_arity(sa.func.ST_Covers, 2),
ops.GeoCoveredBy: fixed_arity(sa.func.ST_CoveredBy, 2),
ops.GeoCrosses: fixed_arity(sa.func.ST_Crosses, 2),
ops.GeoDFullyWithin: fixed_arity(sa.func.ST_DFullyWithin, 3),
ops.GeoDifference: fixed_arity(sa.func.ST_Difference, 2),
ops.GeoDisjoint: fixed_arity(sa.func.ST_Disjoint, 2),
ops.GeoDistance: fixed_arity(sa.func.ST_Distance, 2),
ops.GeoDWithin: fixed_arity(sa.func.ST_DWithin, 3),
ops.GeoEndPoint: unary(sa.func.ST_EndPoint),
ops.GeoEnvelope: unary(sa.func.ST_Envelope),
ops.GeoEquals: fixed_arity(sa.func.ST_Equals, 2),
ops.GeoGeometryN: fixed_arity(sa.func.ST_GeometryN, 2),
ops.GeoGeometryType: unary(sa.func.ST_GeometryType),
ops.GeoIntersection: fixed_arity(sa.func.ST_Intersection, 2),
ops.GeoIntersects: fixed_arity(sa.func.ST_Intersects, 2),
ops.GeoIsValid: unary(sa.func.ST_IsValid),
ops.GeoLineLocatePoint: fixed_arity(sa.func.ST_LineLocatePoint, 2),
ops.GeoLineMerge: unary(sa.func.ST_LineMerge),
ops.GeoLineSubstring: fixed_arity(sa.func.ST_LineSubstring, 3),
ops.GeoLength: unary(sa.func.ST_Length),
ops.GeoNPoints: unary(sa.func.ST_NPoints),
ops.GeoOrderingEquals: fixed_arity(sa.func.ST_OrderingEquals, 2),
ops.GeoOverlaps: fixed_arity(sa.func.ST_Overlaps, 2),
ops.GeoPerimeter: unary(sa.func.ST_Perimeter),
ops.GeoSimplify: fixed_arity(sa.func.ST_Simplify, 3),
ops.GeoSRID: unary(sa.func.ST_SRID),
ops.GeoSetSRID: fixed_arity(sa.func.ST_SetSRID, 2),
ops.GeoStartPoint: unary(sa.func.ST_StartPoint),
ops.GeoTouches: fixed_arity(sa.func.ST_Touches, 2),
ops.GeoTransform: fixed_arity(sa.func.ST_Transform, 2),
ops.GeoUnaryUnion: unary(sa.func.ST_Union),
ops.GeoUnion: fixed_arity(sa.func.ST_Union, 2),
ops.GeoWithin: fixed_arity(sa.func.ST_Within, 2),
ops.GeoX: unary(sa.func.ST_X),
ops.GeoY: unary(sa.func.ST_Y),
# Missing Geospatial ops:
# ST_AsGML
# ST_AsGeoJSON
# ST_AsKML
# ST_AsRaster
# ST_AsSVG
# ST_AsTWKB
# ST_Distance_Sphere
# ST_Dump
# ST_DumpPoints
# ST_GeogFromText
# ST_GeomFromEWKB
# ST_GeomFromEWKT
# ST_GeomFromText
}
sqlalchemy_operation_registry.update(_geospatial_functions)
for _k, _v in _binary_ops.items():
sqlalchemy_operation_registry[_k] = fixed_arity(_v, 2)
|
import pytest
# import mock
import qarnot
from qarnot.pool import Pool
from qarnot.task import Task
from qarnot.bucket import Bucket
from qarnot.advanced_bucket import BucketPrefixFiltering, PrefixResourcesTransformation
import datetime
from .mock_connection import MockConnection
class TestAdvancedResourceBucketsMethods:
def test_check_the_init_values(self):
bucket = Bucket(MockConnection(), "name", False)
bucket2 = bucket.with_filtering(BucketPrefixFiltering("test")).with_resource_transformation(PrefixResourcesTransformation("test2"))
assert "name" == bucket2.uuid
assert "test" == bucket2._filtering._filters["prefixFiltering"].prefix
assert "test2" == bucket2._resources_transformation._resource_transformers["stripPrefix"].prefix
bucket = Bucket(MockConnection(), "name", False)
bucket2 = bucket.with_resource_transformation(PrefixResourcesTransformation("test2")).with_filtering(BucketPrefixFiltering("test"))
assert "name" == bucket2.uuid
assert "test" == bucket2._filtering._filters["prefixFiltering"].prefix
assert "test2" == bucket2._resources_transformation._resource_transformers["stripPrefix"].prefix
def test_create_an_advance_resource_json(self):
"""
{
BucketName:"name",
Filtering: {
BucketPrefixFiltering: {
Prefix:"prefix"
}
},
ResourcesTransformation: {
StripPrefix: {
Prefix:"prefix"
}
}
}
"""
bucket = Bucket(MockConnection(), "name", False)
bucket2 = bucket.with_filtering(BucketPrefixFiltering("prefix1")).with_resource_transformation(PrefixResourcesTransformation("prefix2"))
json_dict = bucket2.to_json()
assert "name" == json_dict["bucketName"]
assert "prefix1" == json_dict["filtering"]["prefixFiltering"]["prefix"]
assert "prefix2" == json_dict["resourcesTransformation"]["stripPrefix"]["prefix"]
def test_create_an_advance_bucket_from_a_json(self):
json = {
"bucketName": "name",
"filtering": {
"prefixFiltering": {
"prefix": "prefix1"
}
},
"resourcesTransformation": {
"stripPrefix": {
"prefix": "prefix2"
}
}
}
bucket = Bucket.from_json(MockConnection(), json)
assert "name" == bucket.uuid
assert "prefix1" == bucket._filtering._filters["prefixFiltering"].prefix
assert "prefix2" == bucket._resources_transformation._resource_transformers["stripPrefix"].prefix
|
const Discord = require("discord.js");
/** @type {import("../../types/command").CommandFile} */
module.exports = {
commandDescription: "Affiche des informations sur le serveur actuel.",
async execute(message) {
if (message.guild) {
/** @type {Discord.GuildMember[]} */
let admins = [];
// Récupération des données du serveur.
const currentGuild = await message.client.guilds.fetch(message.guild.id);
const channelsCount = currentGuild.channels.cache.size;
// Récupération des membres du serveur.
const currentGuildMembers = await currentGuild.members.fetch();
const membersCount = currentGuildMembers.filter(member => !member.user.bot).size;
const botsCount = currentGuildMembers.filter(member => member.user.bot).size;
// Récupération des administrateurs du serveur.
currentGuildMembers.each(
/** @param {Discord.GuildMember} member */
member => {
if (member.permissions.has("ADMINISTRATOR")) {
admins.push(member.displayName);
}
}
);
// on construit l'embed
const embed = new Discord.MessageEmbed()
.setTitle(message.guild.name)
.setDescription("**﹒ ﹕ ̟乀 Informations sur ce serveur.꒷.✦**")
.setThumbnail(message.guild.iconURL())
.addFields(
{
name: "₊˚દ Administrateurs ┊ ⋆ 。 ",
value: admins.join(", ")
},
{
name: "₊˚દ Date de création ┊ ⋆ 。 ",
value: `${message.guild.createdAt.toDateString()} à ${message.guild.createdAt.toTimeString()}`
},
{
name: "₊˚દ Salons ┊ ⋆ 。 ",
value: String(channelsCount ?? 0),
inline: true
},
{
name: "₊˚દ Membres ┊ ⋆ 。 ",
value: String(membersCount ?? 0),
inline: true
},
{
name: "₊˚દ Bots ┊ ⋆ 。 ",
value: String(botsCount ?? 0),
inline: true
}
)
.setTimestamp()
.setFooter(`ID: ${message.guild.id}`);
await message.channel.send({
embeds: [embed]
});
}
// Message envoyé en DM, impossible de récupérer infos du serveur.
else {
return message.reply(
"﹒ ﹕ ̟乀 :warning: - Tu dois envoyer cette commande depuis un serveur qui possède ce bot. ꒷꒦︶︶"
);
}
}
};
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Abaxis VetScan VS2
"""
from datetime import datetime
from bika.lims.exportimport.instruments.resultsimport import \
AnalysisResultsImporter, InstrumentCSVResultsFileParser
class AbaxisVetScanCSVParser(InstrumentCSVResultsFileParser):
def __init__(self, csv):
InstrumentCSVResultsFileParser.__init__(self, csv)
self._columns = [] # The different columns names
self._linedata = {} # The line with the data
self._rownum = None
self._end_header = False
def _parseline(self, line):
sline = line.split(';')
if len(sline) > 0 and not self._end_header:
self._columns = sline
self._end_header = True
return 0
elif sline > 0 and self._end_header:
self.parse_data_line(sline)
else:
self.err("Unexpected data format", numline=self._numline)
return -1
def parse_data_line(self, sline):
"""
Parses the data line and builds the dictionary.
:param sline: a split data line to parse
:return: the number of rows to jump and parse the next data line or return the code error -1
"""
# if there are less values founded than headers, it's an error
if len(sline) != len(self._columns):
self.err("One data line has the wrong number of items")
return -1
values = {}
remark = ''
date = ''
resid = ''
for idx, result in enumerate(sline):
if self._columns[idx] == 'Date':
date = self.csvDate2BikaDate(result)
elif self._columns[idx] == 'Patient no.':
resid = result
elif self._columns[idx] == 'Customer no.':
remark = result
elif self._columns[idx] != '':
values[self._columns[idx]] = {
'result': result,
'DefaultResult': 'result',
'Remarks': remark,
'DateTime': date,
}
self._addRawResult(resid, values, False)
return 0
def csvDate2BikaDate(self, DateTime):
# example: 11/03/2014 14:46:46 --> %d/%m/%Y %H:%M %p
Date, Time, locale = DateTime.replace('.', '').split(' ')
dtobj = datetime.strptime(Date + ' ' + Time + ' ' + locale, "%d/%m/%Y %H:%M %p")
return dtobj.strftime("%Y%m%d %H:%M")
class AbaxisVetScanImporter(AnalysisResultsImporter):
def __init__(self, parser, context, idsearchcriteria, override,
allowed_ar_states=None, allowed_analysis_states=None,
instrument_uid=None):
AnalysisResultsImporter.__init__(self, parser, context,
idsearchcriteria, override,
allowed_ar_states,
allowed_analysis_states,
instrument_uid)
|
/*******************************************************************************
* Copyright 2020 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the ""License"");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an ""AS IS"" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/* 2020/06/29 22:25 */
void Xbyak::CodeGenerator::translateVFNMADD231PS(xed_decoded_inst_t *p) {
namespace xa = Xbyak_aarch64;
struct xt_a64fx_operands_structV3_t a64_opt;
struct xt_a64fx_operands_structV3_t a64;
xt_construct_a64fx_operandsV3(p, &a64_opt, false, true);
xt_construct_a64fx_operandsV3(p, &a64);
bool isValid = false;
xt_reg_idx_t dstIdx = XT_REG_INVALID;
xt_reg_idx_t srcIdx = XT_REG_INVALID;
xt_reg_idx_t src2Idx = XT_REG_INVALID;
xt_reg_idx_t maskIdx = XT_REG_INVALID;
xt_reg_idx_t zTmpIdx = XT_REG_INVALID;
/* Col=AE119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
zTmpIdx = xt_push_zreg(&a64);
}
/* Col=AL119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 256 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
dstIdx = a64.operands[0].regIdx;
}
/* Col=AO119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 256 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
srcIdx = a64.operands[2].regIdx;
}
/* Col=AQ119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 256 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
src2Idx = a64.operands[3].regIdx;
}
/* Col=AR119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
src2Idx = zTmpIdx;
}
/* Col=BB119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
xa_->ldr(xa::ZReg(zTmpIdx), xa::ptr(X_TMP_ADDR));
}
/* Col=BF119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 256 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
xa_->fmls(xa::ZRegS(dstIdx), P_ALL_ONE_0_7 / xa::T_m, xa::ZRegS(srcIdx),
xa::ZRegS(src2Idx));
}
/* Col=BJ119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 256 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
xa_->mov(xa::ZRegS(dstIdx), P_MSB_256 / xa::T_m, 0);
}
/* Col=BQ119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
xt_pop_zreg();
}
/* Col=BY119*/
if (false ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 256 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_REG3 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true) ||
(a64_opt.operands[2].opName == XED_OPERAND_REG2 &&
a64_opt.operands[3].opName == XED_OPERAND_MEM0 &&
a64_opt.operands[0].opWidth == 512 && a64_opt.predType == A64_PRED_NO &&
a64_opt.EVEXb == 0 && isAvailAll1Preg0_7() == true && true)) {
return;
}
/* Col=T119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true)) {
XT_UNIMPLEMENTED;
}
/* Col=U119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
maskIdx = xt_push_preg(&a64);
}
/* Col=W119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
zTmpIdx = xt_push_zreg(&a64);
}
/* Col=X119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->mov(xa::PRegB(maskIdx), P_ALL_ONE.b);
}
/* Col=AD119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
dstIdx = a64.operands[0].regIdx;
}
/* Col=AF119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true)) {
srcIdx = a64.operands[1].regIdx;
}
/* Col=AG119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
srcIdx = a64.operands[2].regIdx;
}
/* Col=AH119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true)) {
src2Idx = a64.operands[2].regIdx;
}
/* Col=AI119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
src2Idx = a64.operands[3].regIdx;
}
/* Col=AJ119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
src2Idx = zTmpIdx;
}
/* Col=AN119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->ptrue(xa::PRegS(maskIdx), xa::VL8);
}
/* Col=AQ119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->ldr(xa::QReg(zTmpIdx), xa::ptr(X_TMP_ADDR));
}
/* Col=AS119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->ld1w(xa::ZRegS(zTmpIdx), xa::PReg(maskIdx) / xa::T_z,
xa::ptr(X_TMP_ADDR));
}
/* Col=AT119*/
if (false || (a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->ldr(xa::ZReg(zTmpIdx), xa::ptr(X_TMP_ADDR));
}
/* Col=AW119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->fmls(xa::VReg4S(dstIdx), xa::VReg4S(srcIdx), xa::VReg4S(src2Idx));
}
/* Col=AX119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->fmls(xa::ZRegS(dstIdx), xa::PReg(maskIdx) / xa::T_m, xa::ZRegS(srcIdx),
xa::ZRegS(src2Idx));
}
/* Col=BB119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xa_->mov(xa::ZRegS(dstIdx), P_MSB_256 / xa::T_m, 0);
}
/* Col=BI119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xt_pop_zreg();
}
/* Col=BJ119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true)) {
xt_pop_preg();
}
/* Col=BQ119*/
if (false ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 128 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_MEM0 &&
a64.operands[3].opName == XED_OPERAND_INVALID &&
a64.operands[0].opWidth == 256 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 128 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 256 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_NO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_ZERO &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_REG3 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 0 && true) ||
(a64.operands[2].opName == XED_OPERAND_REG2 &&
a64.operands[3].opName == XED_OPERAND_MEM0 &&
a64.operands[0].opWidth == 512 && a64.predType == A64_PRED_MERG &&
a64.EVEXb == 1 && true)) {
XT_VALID_CHECK;
}
XT_VALID_CHECK_IF;
}
|
import { render, createElement, useEffect } from 'rax';
import unmountComponentAtNode from 'rax-unmount-component-at-node';
function Portal(props) {
useEffect(() => {
// Nested render will cause error when hydrating, it should be trigger in useEffect.
render(props.element, props.container, {
parent: this
});
});
useEffect(() => {
return () => {
unmountComponentAtNode(props.container);
};
}, [props.container]);
return null;
}
export default function createPortal(element, container) {
return createElement(Portal, {
element,
container,
});
}
|
# This is here to help code readability
# it helps furbot find relevant tags
# Thanks to \u\_PM_ME_GFUR_ for compiling the lists this bot uses
# Priority goes to gender, fetish, pairing, body, act, positions and then others.
tag_limit = 25
# Get the list from a text file.
def get_list(list_url):
if isinstance(list_url, str):
full_list = open(list_url, 'r').read()
split_list = full_list.split('|')
finished_list = list(filter(None, split_list))
return finished_list
# Sort tags into categories, based on provided categories.
def find_tags(tags, search_list):
tag_list = list()
i = 0
while i < len(tags):
if tags[i] in search_list:
tag_list.append(tags[i])
i += 1
return tag_list
def find_other_tags(tags, search_list):
tag_list = list()
i = 0
while i < len(tags):
if tags[i] not in search_list:
tag_list.append(tags[i])
i += 1
return tag_list
# Starts the search
# Called by Furbot
def start_searching(tags):
# start by filling all the tag lists.
gender_tags = find_tags(tags, get_list('tag/gender.txt'))
fetish_tags = find_tags(tags, get_list('tag/fetishes.txt'))
pairing_tags = find_tags(tags, get_list('tag/pairings.txt'))
body_tags = find_tags(tags, get_list('tag/body.txt'))
act_tags = find_tags(tags, get_list('tag/sex_acts.txt'))
position_tags = find_tags(tags, get_list('tag/positions.txt'))
# If it was not caught before, it's not in a previous list.
other_tags = find_other_tags(tags, gender_tags + fetish_tags + pairing_tags + body_tags + act_tags + position_tags)
fixed_tag_list = gender_tags + fetish_tags + pairing_tags + body_tags + act_tags + position_tags + other_tags
# Create the short list by slicing
short_list = fixed_tag_list[:tag_limit]
tag_count = len(short_list)
extra_tags = len(fixed_tag_list) - tag_count
if len(fixed_tag_list) == tag_limit + 1:
short_list.append('**^^^^And ^^^^' + str(extra_tags) + ' ^^^^other ^^^^tag**')
elif len(fixed_tag_list) > tag_limit + 1:
short_list.append('**^^^^And ^^^^' + str(extra_tags) + ' ^^^^other ^^^^tags**')
return short_list
|
module.exports = function(obj, keys){
obj = obj || {};
if ('string' == typeof keys) keys = keys.split(/ +/);
return keys.reduce(function(ret, key){
ret[key] = obj[key];
return ret;
}, {});
};
|
// WARNING: ODEBUG bug in corrupted
// https://syzkaller.appspot.com/bug?id=06d4181acaa4748544f0
// status:6
// autogenerated by syzkaller (https://github.com/google/syzkaller)
#define _GNU_SOURCE
#include <arpa/inet.h>
#include <dirent.h>
#include <endian.h>
#include <errno.h>
#include <fcntl.h>
#include <net/if.h>
#include <net/if_arp.h>
#include <netinet/in.h>
#include <pthread.h>
#include <sched.h>
#include <setjmp.h>
#include <signal.h>
#include <stdarg.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/epoll.h>
#include <sys/ioctl.h>
#include <sys/mount.h>
#include <sys/prctl.h>
#include <sys/resource.h>
#include <sys/socket.h>
#include <sys/stat.h>
#include <sys/syscall.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <sys/wait.h>
#include <time.h>
#include <unistd.h>
#include <linux/capability.h>
#include <linux/futex.h>
#include <linux/genetlink.h>
#include <linux/if_addr.h>
#include <linux/if_ether.h>
#include <linux/if_link.h>
#include <linux/if_tun.h>
#include <linux/in6.h>
#include <linux/ip.h>
#include <linux/neighbour.h>
#include <linux/net.h>
#include <linux/netlink.h>
#include <linux/rfkill.h>
#include <linux/rtnetlink.h>
#include <linux/tcp.h>
#include <linux/veth.h>
static unsigned long long procid;
static __thread int skip_segv;
static __thread jmp_buf segv_env;
static void segv_handler(int sig, siginfo_t* info, void* ctx)
{
uintptr_t addr = (uintptr_t)info->si_addr;
const uintptr_t prog_start = 1 << 20;
const uintptr_t prog_end = 100 << 20;
int skip = __atomic_load_n(&skip_segv, __ATOMIC_RELAXED) != 0;
int valid = addr < prog_start || addr > prog_end;
if (skip && valid) {
_longjmp(segv_env, 1);
}
exit(sig);
}
static void install_segv_handler(void)
{
struct sigaction sa;
memset(&sa, 0, sizeof(sa));
sa.sa_handler = SIG_IGN;
syscall(SYS_rt_sigaction, 0x20, &sa, NULL, 8);
syscall(SYS_rt_sigaction, 0x21, &sa, NULL, 8);
memset(&sa, 0, sizeof(sa));
sa.sa_sigaction = segv_handler;
sa.sa_flags = SA_NODEFER | SA_SIGINFO;
sigaction(SIGSEGV, &sa, NULL);
sigaction(SIGBUS, &sa, NULL);
}
#define NONFAILING(...) \
{ \
__atomic_fetch_add(&skip_segv, 1, __ATOMIC_SEQ_CST); \
if (_setjmp(segv_env) == 0) { \
__VA_ARGS__; \
} \
__atomic_fetch_sub(&skip_segv, 1, __ATOMIC_SEQ_CST); \
}
static void sleep_ms(uint64_t ms)
{
usleep(ms * 1000);
}
static uint64_t current_time_ms(void)
{
struct timespec ts;
if (clock_gettime(CLOCK_MONOTONIC, &ts))
exit(1);
return (uint64_t)ts.tv_sec * 1000 + (uint64_t)ts.tv_nsec / 1000000;
}
static void use_temporary_dir(void)
{
char tmpdir_template[] = "./syzkaller.XXXXXX";
char* tmpdir = mkdtemp(tmpdir_template);
if (!tmpdir)
exit(1);
if (chmod(tmpdir, 0777))
exit(1);
if (chdir(tmpdir))
exit(1);
}
static void thread_start(void* (*fn)(void*), void* arg)
{
pthread_t th;
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_attr_setstacksize(&attr, 128 << 10);
int i = 0;
for (; i < 100; i++) {
if (pthread_create(&th, &attr, fn, arg) == 0) {
pthread_attr_destroy(&attr);
return;
}
if (errno == EAGAIN) {
usleep(50);
continue;
}
break;
}
exit(1);
}
typedef struct {
int state;
} event_t;
static void event_init(event_t* ev)
{
ev->state = 0;
}
static void event_reset(event_t* ev)
{
ev->state = 0;
}
static void event_set(event_t* ev)
{
if (ev->state)
exit(1);
__atomic_store_n(&ev->state, 1, __ATOMIC_RELEASE);
syscall(SYS_futex, &ev->state, FUTEX_WAKE | FUTEX_PRIVATE_FLAG, 1000000);
}
static void event_wait(event_t* ev)
{
while (!__atomic_load_n(&ev->state, __ATOMIC_ACQUIRE))
syscall(SYS_futex, &ev->state, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, 0, 0);
}
static int event_isset(event_t* ev)
{
return __atomic_load_n(&ev->state, __ATOMIC_ACQUIRE);
}
static int event_timedwait(event_t* ev, uint64_t timeout)
{
uint64_t start = current_time_ms();
uint64_t now = start;
for (;;) {
uint64_t remain = timeout - (now - start);
struct timespec ts;
ts.tv_sec = remain / 1000;
ts.tv_nsec = (remain % 1000) * 1000 * 1000;
syscall(SYS_futex, &ev->state, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, 0, &ts);
if (__atomic_load_n(&ev->state, __ATOMIC_ACQUIRE))
return 1;
now = current_time_ms();
if (now - start > timeout)
return 0;
}
}
static bool write_file(const char* file, const char* what, ...)
{
char buf[1024];
va_list args;
va_start(args, what);
vsnprintf(buf, sizeof(buf), what, args);
va_end(args);
buf[sizeof(buf) - 1] = 0;
int len = strlen(buf);
int fd = open(file, O_WRONLY | O_CLOEXEC);
if (fd == -1)
return false;
if (write(fd, buf, len) != len) {
int err = errno;
close(fd);
errno = err;
return false;
}
close(fd);
return true;
}
struct nlmsg {
char* pos;
int nesting;
struct nlattr* nested[8];
char buf[1024];
};
static struct nlmsg nlmsg;
static void netlink_init(struct nlmsg* nlmsg, int typ, int flags,
const void* data, int size)
{
memset(nlmsg, 0, sizeof(*nlmsg));
struct nlmsghdr* hdr = (struct nlmsghdr*)nlmsg->buf;
hdr->nlmsg_type = typ;
hdr->nlmsg_flags = NLM_F_REQUEST | NLM_F_ACK | flags;
memcpy(hdr + 1, data, size);
nlmsg->pos = (char*)(hdr + 1) + NLMSG_ALIGN(size);
}
static void netlink_attr(struct nlmsg* nlmsg, int typ, const void* data,
int size)
{
struct nlattr* attr = (struct nlattr*)nlmsg->pos;
attr->nla_len = sizeof(*attr) + size;
attr->nla_type = typ;
memcpy(attr + 1, data, size);
nlmsg->pos += NLMSG_ALIGN(attr->nla_len);
}
static void netlink_nest(struct nlmsg* nlmsg, int typ)
{
struct nlattr* attr = (struct nlattr*)nlmsg->pos;
attr->nla_type = typ;
nlmsg->pos += sizeof(*attr);
nlmsg->nested[nlmsg->nesting++] = attr;
}
static void netlink_done(struct nlmsg* nlmsg)
{
struct nlattr* attr = nlmsg->nested[--nlmsg->nesting];
attr->nla_len = nlmsg->pos - (char*)attr;
}
static int netlink_send_ext(struct nlmsg* nlmsg, int sock, uint16_t reply_type,
int* reply_len)
{
if (nlmsg->pos > nlmsg->buf + sizeof(nlmsg->buf) || nlmsg->nesting)
exit(1);
struct nlmsghdr* hdr = (struct nlmsghdr*)nlmsg->buf;
hdr->nlmsg_len = nlmsg->pos - nlmsg->buf;
struct sockaddr_nl addr;
memset(&addr, 0, sizeof(addr));
addr.nl_family = AF_NETLINK;
unsigned n = sendto(sock, nlmsg->buf, hdr->nlmsg_len, 0,
(struct sockaddr*)&addr, sizeof(addr));
if (n != hdr->nlmsg_len)
exit(1);
n = recv(sock, nlmsg->buf, sizeof(nlmsg->buf), 0);
if (reply_len)
*reply_len = 0;
if (hdr->nlmsg_type == NLMSG_DONE)
return 0;
if (n < sizeof(struct nlmsghdr))
exit(1);
if (reply_len && hdr->nlmsg_type == reply_type) {
*reply_len = n;
return 0;
}
if (n < sizeof(struct nlmsghdr) + sizeof(struct nlmsgerr))
exit(1);
if (hdr->nlmsg_type != NLMSG_ERROR)
exit(1);
return -((struct nlmsgerr*)(hdr + 1))->error;
}
static int netlink_send(struct nlmsg* nlmsg, int sock)
{
return netlink_send_ext(nlmsg, sock, 0, NULL);
}
static int netlink_next_msg(struct nlmsg* nlmsg, unsigned int offset,
unsigned int total_len)
{
struct nlmsghdr* hdr = (struct nlmsghdr*)(nlmsg->buf + offset);
if (offset == total_len || offset + hdr->nlmsg_len > total_len)
return -1;
return hdr->nlmsg_len;
}
static void netlink_add_device_impl(struct nlmsg* nlmsg, const char* type,
const char* name)
{
struct ifinfomsg hdr;
memset(&hdr, 0, sizeof(hdr));
netlink_init(nlmsg, RTM_NEWLINK, NLM_F_EXCL | NLM_F_CREATE, &hdr,
sizeof(hdr));
if (name)
netlink_attr(nlmsg, IFLA_IFNAME, name, strlen(name));
netlink_nest(nlmsg, IFLA_LINKINFO);
netlink_attr(nlmsg, IFLA_INFO_KIND, type, strlen(type));
}
static void netlink_add_device(struct nlmsg* nlmsg, int sock, const char* type,
const char* name)
{
netlink_add_device_impl(nlmsg, type, name);
netlink_done(nlmsg);
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_add_veth(struct nlmsg* nlmsg, int sock, const char* name,
const char* peer)
{
netlink_add_device_impl(nlmsg, "veth", name);
netlink_nest(nlmsg, IFLA_INFO_DATA);
netlink_nest(nlmsg, VETH_INFO_PEER);
nlmsg->pos += sizeof(struct ifinfomsg);
netlink_attr(nlmsg, IFLA_IFNAME, peer, strlen(peer));
netlink_done(nlmsg);
netlink_done(nlmsg);
netlink_done(nlmsg);
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_add_hsr(struct nlmsg* nlmsg, int sock, const char* name,
const char* slave1, const char* slave2)
{
netlink_add_device_impl(nlmsg, "hsr", name);
netlink_nest(nlmsg, IFLA_INFO_DATA);
int ifindex1 = if_nametoindex(slave1);
netlink_attr(nlmsg, IFLA_HSR_SLAVE1, &ifindex1, sizeof(ifindex1));
int ifindex2 = if_nametoindex(slave2);
netlink_attr(nlmsg, IFLA_HSR_SLAVE2, &ifindex2, sizeof(ifindex2));
netlink_done(nlmsg);
netlink_done(nlmsg);
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_add_linked(struct nlmsg* nlmsg, int sock, const char* type,
const char* name, const char* link)
{
netlink_add_device_impl(nlmsg, type, name);
netlink_done(nlmsg);
int ifindex = if_nametoindex(link);
netlink_attr(nlmsg, IFLA_LINK, &ifindex, sizeof(ifindex));
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_add_vlan(struct nlmsg* nlmsg, int sock, const char* name,
const char* link, uint16_t id, uint16_t proto)
{
netlink_add_device_impl(nlmsg, "vlan", name);
netlink_nest(nlmsg, IFLA_INFO_DATA);
netlink_attr(nlmsg, IFLA_VLAN_ID, &id, sizeof(id));
netlink_attr(nlmsg, IFLA_VLAN_PROTOCOL, &proto, sizeof(proto));
netlink_done(nlmsg);
netlink_done(nlmsg);
int ifindex = if_nametoindex(link);
netlink_attr(nlmsg, IFLA_LINK, &ifindex, sizeof(ifindex));
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_add_macvlan(struct nlmsg* nlmsg, int sock, const char* name,
const char* link)
{
netlink_add_device_impl(nlmsg, "macvlan", name);
netlink_nest(nlmsg, IFLA_INFO_DATA);
uint32_t mode = MACVLAN_MODE_BRIDGE;
netlink_attr(nlmsg, IFLA_MACVLAN_MODE, &mode, sizeof(mode));
netlink_done(nlmsg);
netlink_done(nlmsg);
int ifindex = if_nametoindex(link);
netlink_attr(nlmsg, IFLA_LINK, &ifindex, sizeof(ifindex));
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_add_geneve(struct nlmsg* nlmsg, int sock, const char* name,
uint32_t vni, struct in_addr* addr4,
struct in6_addr* addr6)
{
netlink_add_device_impl(nlmsg, "geneve", name);
netlink_nest(nlmsg, IFLA_INFO_DATA);
netlink_attr(nlmsg, IFLA_GENEVE_ID, &vni, sizeof(vni));
if (addr4)
netlink_attr(nlmsg, IFLA_GENEVE_REMOTE, addr4, sizeof(*addr4));
if (addr6)
netlink_attr(nlmsg, IFLA_GENEVE_REMOTE6, addr6, sizeof(*addr6));
netlink_done(nlmsg);
netlink_done(nlmsg);
int err = netlink_send(nlmsg, sock);
(void)err;
}
#define IFLA_IPVLAN_FLAGS 2
#define IPVLAN_MODE_L3S 2
#undef IPVLAN_F_VEPA
#define IPVLAN_F_VEPA 2
static void netlink_add_ipvlan(struct nlmsg* nlmsg, int sock, const char* name,
const char* link, uint16_t mode, uint16_t flags)
{
netlink_add_device_impl(nlmsg, "ipvlan", name);
netlink_nest(nlmsg, IFLA_INFO_DATA);
netlink_attr(nlmsg, IFLA_IPVLAN_MODE, &mode, sizeof(mode));
netlink_attr(nlmsg, IFLA_IPVLAN_FLAGS, &flags, sizeof(flags));
netlink_done(nlmsg);
netlink_done(nlmsg);
int ifindex = if_nametoindex(link);
netlink_attr(nlmsg, IFLA_LINK, &ifindex, sizeof(ifindex));
int err = netlink_send(nlmsg, sock);
(void)err;
}
static void netlink_device_change(struct nlmsg* nlmsg, int sock,
const char* name, bool up, const char* master,
const void* mac, int macsize,
const char* new_name)
{
struct ifinfomsg hdr;
memset(&hdr, 0, sizeof(hdr));
if (up)
hdr.ifi_flags = hdr.ifi_change = IFF_UP;
hdr.ifi_index = if_nametoindex(name);
netlink_init(nlmsg, RTM_NEWLINK, 0, &hdr, sizeof(hdr));
if (new_name)
netlink_attr(nlmsg, IFLA_IFNAME, new_name, strlen(new_name));
if (master) {
int ifindex = if_nametoindex(master);
netlink_attr(nlmsg, IFLA_MASTER, &ifindex, sizeof(ifindex));
}
if (macsize)
netlink_attr(nlmsg, IFLA_ADDRESS, mac, macsize);
int err = netlink_send(nlmsg, sock);
(void)err;
}
static int netlink_add_addr(struct nlmsg* nlmsg, int sock, const char* dev,
const void* addr, int addrsize)
{
struct ifaddrmsg hdr;
memset(&hdr, 0, sizeof(hdr));
hdr.ifa_family = addrsize == 4 ? AF_INET : AF_INET6;
hdr.ifa_prefixlen = addrsize == 4 ? 24 : 120;
hdr.ifa_scope = RT_SCOPE_UNIVERSE;
hdr.ifa_index = if_nametoindex(dev);
netlink_init(nlmsg, RTM_NEWADDR, NLM_F_CREATE | NLM_F_REPLACE, &hdr,
sizeof(hdr));
netlink_attr(nlmsg, IFA_LOCAL, addr, addrsize);
netlink_attr(nlmsg, IFA_ADDRESS, addr, addrsize);
return netlink_send(nlmsg, sock);
}
static void netlink_add_addr4(struct nlmsg* nlmsg, int sock, const char* dev,
const char* addr)
{
struct in_addr in_addr;
inet_pton(AF_INET, addr, &in_addr);
int err = netlink_add_addr(nlmsg, sock, dev, &in_addr, sizeof(in_addr));
(void)err;
}
static void netlink_add_addr6(struct nlmsg* nlmsg, int sock, const char* dev,
const char* addr)
{
struct in6_addr in6_addr;
inet_pton(AF_INET6, addr, &in6_addr);
int err = netlink_add_addr(nlmsg, sock, dev, &in6_addr, sizeof(in6_addr));
(void)err;
}
static void netlink_add_neigh(struct nlmsg* nlmsg, int sock, const char* name,
const void* addr, int addrsize, const void* mac,
int macsize)
{
struct ndmsg hdr;
memset(&hdr, 0, sizeof(hdr));
hdr.ndm_family = addrsize == 4 ? AF_INET : AF_INET6;
hdr.ndm_ifindex = if_nametoindex(name);
hdr.ndm_state = NUD_PERMANENT;
netlink_init(nlmsg, RTM_NEWNEIGH, NLM_F_EXCL | NLM_F_CREATE, &hdr,
sizeof(hdr));
netlink_attr(nlmsg, NDA_DST, addr, addrsize);
netlink_attr(nlmsg, NDA_LLADDR, mac, macsize);
int err = netlink_send(nlmsg, sock);
(void)err;
}
static int tunfd = -1;
#define TUN_IFACE "syz_tun"
#define LOCAL_MAC 0xaaaaaaaaaaaa
#define REMOTE_MAC 0xaaaaaaaaaabb
#define LOCAL_IPV4 "172.20.20.170"
#define REMOTE_IPV4 "172.20.20.187"
#define LOCAL_IPV6 "fe80::aa"
#define REMOTE_IPV6 "fe80::bb"
#define IFF_NAPI 0x0010
static void initialize_tun(void)
{
tunfd = open("/dev/net/tun", O_RDWR | O_NONBLOCK);
if (tunfd == -1) {
printf("tun: can't open /dev/net/tun: please enable CONFIG_TUN=y\n");
printf("otherwise fuzzing or reproducing might not work as intended\n");
return;
}
const int kTunFd = 240;
if (dup2(tunfd, kTunFd) < 0)
exit(1);
close(tunfd);
tunfd = kTunFd;
struct ifreq ifr;
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name, TUN_IFACE, IFNAMSIZ);
ifr.ifr_flags = IFF_TAP | IFF_NO_PI;
if (ioctl(tunfd, TUNSETIFF, (void*)&ifr) < 0) {
exit(1);
}
char sysctl[64];
sprintf(sysctl, "/proc/sys/net/ipv6/conf/%s/accept_dad", TUN_IFACE);
write_file(sysctl, "0");
sprintf(sysctl, "/proc/sys/net/ipv6/conf/%s/router_solicitations", TUN_IFACE);
write_file(sysctl, "0");
int sock = socket(AF_NETLINK, SOCK_RAW, NETLINK_ROUTE);
if (sock == -1)
exit(1);
netlink_add_addr4(&nlmsg, sock, TUN_IFACE, LOCAL_IPV4);
netlink_add_addr6(&nlmsg, sock, TUN_IFACE, LOCAL_IPV6);
uint64_t macaddr = REMOTE_MAC;
struct in_addr in_addr;
inet_pton(AF_INET, REMOTE_IPV4, &in_addr);
netlink_add_neigh(&nlmsg, sock, TUN_IFACE, &in_addr, sizeof(in_addr),
&macaddr, ETH_ALEN);
struct in6_addr in6_addr;
inet_pton(AF_INET6, REMOTE_IPV6, &in6_addr);
netlink_add_neigh(&nlmsg, sock, TUN_IFACE, &in6_addr, sizeof(in6_addr),
&macaddr, ETH_ALEN);
macaddr = LOCAL_MAC;
netlink_device_change(&nlmsg, sock, TUN_IFACE, true, 0, &macaddr, ETH_ALEN,
NULL);
close(sock);
}
#define DEVLINK_FAMILY_NAME "devlink"
#define DEVLINK_CMD_PORT_GET 5
#define DEVLINK_ATTR_BUS_NAME 1
#define DEVLINK_ATTR_DEV_NAME 2
#define DEVLINK_ATTR_NETDEV_NAME 7
static int netlink_devlink_id_get(struct nlmsg* nlmsg, int sock)
{
struct genlmsghdr genlhdr;
memset(&genlhdr, 0, sizeof(genlhdr));
genlhdr.cmd = CTRL_CMD_GETFAMILY;
netlink_init(nlmsg, GENL_ID_CTRL, 0, &genlhdr, sizeof(genlhdr));
netlink_attr(nlmsg, CTRL_ATTR_FAMILY_NAME, DEVLINK_FAMILY_NAME,
strlen(DEVLINK_FAMILY_NAME) + 1);
int n = 0;
int err = netlink_send_ext(nlmsg, sock, GENL_ID_CTRL, &n);
if (err) {
return -1;
}
uint16_t id = 0;
struct nlattr* attr = (struct nlattr*)(nlmsg->buf + NLMSG_HDRLEN +
NLMSG_ALIGN(sizeof(genlhdr)));
for (; (char*)attr < nlmsg->buf + n;
attr = (struct nlattr*)((char*)attr + NLMSG_ALIGN(attr->nla_len))) {
if (attr->nla_type == CTRL_ATTR_FAMILY_ID) {
id = *(uint16_t*)(attr + 1);
break;
}
}
if (!id) {
return -1;
}
recv(sock, nlmsg->buf, sizeof(nlmsg->buf), 0);
return id;
}
static struct nlmsg nlmsg2;
static void initialize_devlink_ports(const char* bus_name, const char* dev_name,
const char* netdev_prefix)
{
struct genlmsghdr genlhdr;
int len, total_len, id, err, offset;
uint16_t netdev_index;
int sock = socket(AF_NETLINK, SOCK_RAW, NETLINK_GENERIC);
if (sock == -1)
exit(1);
int rtsock = socket(AF_NETLINK, SOCK_RAW, NETLINK_ROUTE);
if (rtsock == -1)
exit(1);
id = netlink_devlink_id_get(&nlmsg, sock);
if (id == -1)
goto error;
memset(&genlhdr, 0, sizeof(genlhdr));
genlhdr.cmd = DEVLINK_CMD_PORT_GET;
netlink_init(&nlmsg, id, NLM_F_DUMP, &genlhdr, sizeof(genlhdr));
netlink_attr(&nlmsg, DEVLINK_ATTR_BUS_NAME, bus_name, strlen(bus_name) + 1);
netlink_attr(&nlmsg, DEVLINK_ATTR_DEV_NAME, dev_name, strlen(dev_name) + 1);
err = netlink_send_ext(&nlmsg, sock, id, &total_len);
if (err) {
goto error;
}
offset = 0;
netdev_index = 0;
while ((len = netlink_next_msg(&nlmsg, offset, total_len)) != -1) {
struct nlattr* attr = (struct nlattr*)(nlmsg.buf + offset + NLMSG_HDRLEN +
NLMSG_ALIGN(sizeof(genlhdr)));
for (; (char*)attr < nlmsg.buf + offset + len;
attr = (struct nlattr*)((char*)attr + NLMSG_ALIGN(attr->nla_len))) {
if (attr->nla_type == DEVLINK_ATTR_NETDEV_NAME) {
char* port_name;
char netdev_name[IFNAMSIZ];
port_name = (char*)(attr + 1);
snprintf(netdev_name, sizeof(netdev_name), "%s%d", netdev_prefix,
netdev_index);
netlink_device_change(&nlmsg2, rtsock, port_name, true, 0, 0, 0,
netdev_name);
break;
}
}
offset += len;
netdev_index++;
}
error:
close(rtsock);
close(sock);
}
#define DEV_IPV4 "172.20.20.%d"
#define DEV_IPV6 "fe80::%02x"
#define DEV_MAC 0x00aaaaaaaaaa
static void netdevsim_add(unsigned int addr, unsigned int port_count)
{
char buf[16];
sprintf(buf, "%u %u", addr, port_count);
if (write_file("/sys/bus/netdevsim/new_device", buf)) {
snprintf(buf, sizeof(buf), "netdevsim%d", addr);
initialize_devlink_ports("netdevsim", buf, "netdevsim");
}
}
#define WG_GENL_NAME "wireguard"
enum wg_cmd {
WG_CMD_GET_DEVICE,
WG_CMD_SET_DEVICE,
};
enum wgdevice_attribute {
WGDEVICE_A_UNSPEC,
WGDEVICE_A_IFINDEX,
WGDEVICE_A_IFNAME,
WGDEVICE_A_PRIVATE_KEY,
WGDEVICE_A_PUBLIC_KEY,
WGDEVICE_A_FLAGS,
WGDEVICE_A_LISTEN_PORT,
WGDEVICE_A_FWMARK,
WGDEVICE_A_PEERS,
};
enum wgpeer_attribute {
WGPEER_A_UNSPEC,
WGPEER_A_PUBLIC_KEY,
WGPEER_A_PRESHARED_KEY,
WGPEER_A_FLAGS,
WGPEER_A_ENDPOINT,
WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
WGPEER_A_LAST_HANDSHAKE_TIME,
WGPEER_A_RX_BYTES,
WGPEER_A_TX_BYTES,
WGPEER_A_ALLOWEDIPS,
WGPEER_A_PROTOCOL_VERSION,
};
enum wgallowedip_attribute {
WGALLOWEDIP_A_UNSPEC,
WGALLOWEDIP_A_FAMILY,
WGALLOWEDIP_A_IPADDR,
WGALLOWEDIP_A_CIDR_MASK,
};
static int netlink_wireguard_id_get(struct nlmsg* nlmsg, int sock)
{
struct genlmsghdr genlhdr;
memset(&genlhdr, 0, sizeof(genlhdr));
genlhdr.cmd = CTRL_CMD_GETFAMILY;
netlink_init(nlmsg, GENL_ID_CTRL, 0, &genlhdr, sizeof(genlhdr));
netlink_attr(nlmsg, CTRL_ATTR_FAMILY_NAME, WG_GENL_NAME,
strlen(WG_GENL_NAME) + 1);
int n = 0;
int err = netlink_send_ext(nlmsg, sock, GENL_ID_CTRL, &n);
if (err) {
return -1;
}
uint16_t id = 0;
struct nlattr* attr = (struct nlattr*)(nlmsg->buf + NLMSG_HDRLEN +
NLMSG_ALIGN(sizeof(genlhdr)));
for (; (char*)attr < nlmsg->buf + n;
attr = (struct nlattr*)((char*)attr + NLMSG_ALIGN(attr->nla_len))) {
if (attr->nla_type == CTRL_ATTR_FAMILY_ID) {
id = *(uint16_t*)(attr + 1);
break;
}
}
if (!id) {
return -1;
}
recv(sock, nlmsg->buf, sizeof(nlmsg->buf), 0);
return id;
}
static void netlink_wireguard_setup(void)
{
const char ifname_a[] = "wg0";
const char ifname_b[] = "wg1";
const char ifname_c[] = "wg2";
const char private_a[] =
"\xa0\x5c\xa8\x4f\x6c\x9c\x8e\x38\x53\xe2\xfd\x7a\x70\xae\x0f\xb2\x0f\xa1"
"\x52\x60\x0c\xb0\x08\x45\x17\x4f\x08\x07\x6f\x8d\x78\x43";
const char private_b[] =
"\xb0\x80\x73\xe8\xd4\x4e\x91\xe3\xda\x92\x2c\x22\x43\x82\x44\xbb\x88\x5c"
"\x69\xe2\x69\xc8\xe9\xd8\x35\xb1\x14\x29\x3a\x4d\xdc\x6e";
const char private_c[] =
"\xa0\xcb\x87\x9a\x47\xf5\xbc\x64\x4c\x0e\x69\x3f\xa6\xd0\x31\xc7\x4a\x15"
"\x53\xb6\xe9\x01\xb9\xff\x2f\x51\x8c\x78\x04\x2f\xb5\x42";
const char public_a[] =
"\x97\x5c\x9d\x81\xc9\x83\xc8\x20\x9e\xe7\x81\x25\x4b\x89\x9f\x8e\xd9\x25"
"\xae\x9f\x09\x23\xc2\x3c\x62\xf5\x3c\x57\xcd\xbf\x69\x1c";
const char public_b[] =
"\xd1\x73\x28\x99\xf6\x11\xcd\x89\x94\x03\x4d\x7f\x41\x3d\xc9\x57\x63\x0e"
"\x54\x93\xc2\x85\xac\xa4\x00\x65\xcb\x63\x11\xbe\x69\x6b";
const char public_c[] =
"\xf4\x4d\xa3\x67\xa8\x8e\xe6\x56\x4f\x02\x02\x11\x45\x67\x27\x08\x2f\x5c"
"\xeb\xee\x8b\x1b\xf5\xeb\x73\x37\x34\x1b\x45\x9b\x39\x22";
const uint16_t listen_a = 20001;
const uint16_t listen_b = 20002;
const uint16_t listen_c = 20003;
const uint16_t af_inet = AF_INET;
const uint16_t af_inet6 = AF_INET6;
const struct sockaddr_in endpoint_b_v4 = {
.sin_family = AF_INET,
.sin_port = htons(listen_b),
.sin_addr = {htonl(INADDR_LOOPBACK)}};
const struct sockaddr_in endpoint_c_v4 = {
.sin_family = AF_INET,
.sin_port = htons(listen_c),
.sin_addr = {htonl(INADDR_LOOPBACK)}};
struct sockaddr_in6 endpoint_a_v6 = {.sin6_family = AF_INET6,
.sin6_port = htons(listen_a)};
endpoint_a_v6.sin6_addr = in6addr_loopback;
struct sockaddr_in6 endpoint_c_v6 = {.sin6_family = AF_INET6,
.sin6_port = htons(listen_c)};
endpoint_c_v6.sin6_addr = in6addr_loopback;
const struct in_addr first_half_v4 = {0};
const struct in_addr second_half_v4 = {(uint32_t)htonl(128 << 24)};
const struct in6_addr first_half_v6 = {{{0}}};
const struct in6_addr second_half_v6 = {{{0x80}}};
const uint8_t half_cidr = 1;
const uint16_t persistent_keepalives[] = {1, 3, 7, 9, 14, 19};
struct genlmsghdr genlhdr = {.cmd = WG_CMD_SET_DEVICE, .version = 1};
int sock;
int id, err;
sock = socket(AF_NETLINK, SOCK_RAW, NETLINK_GENERIC);
if (sock == -1) {
return;
}
id = netlink_wireguard_id_get(&nlmsg, sock);
if (id == -1)
goto error;
netlink_init(&nlmsg, id, 0, &genlhdr, sizeof(genlhdr));
netlink_attr(&nlmsg, WGDEVICE_A_IFNAME, ifname_a, strlen(ifname_a) + 1);
netlink_attr(&nlmsg, WGDEVICE_A_PRIVATE_KEY, private_a, 32);
netlink_attr(&nlmsg, WGDEVICE_A_LISTEN_PORT, &listen_a, 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGDEVICE_A_PEERS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGPEER_A_PUBLIC_KEY, public_b, 32);
netlink_attr(&nlmsg, WGPEER_A_ENDPOINT, &endpoint_b_v4,
sizeof(endpoint_b_v4));
netlink_attr(&nlmsg, WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
&persistent_keepalives[0], 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGPEER_A_ALLOWEDIPS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &first_half_v4,
sizeof(first_half_v4));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet6, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &first_half_v6,
sizeof(first_half_v6));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGPEER_A_PUBLIC_KEY, public_c, 32);
netlink_attr(&nlmsg, WGPEER_A_ENDPOINT, &endpoint_c_v6,
sizeof(endpoint_c_v6));
netlink_attr(&nlmsg, WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
&persistent_keepalives[1], 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGPEER_A_ALLOWEDIPS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &second_half_v4,
sizeof(second_half_v4));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet6, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &second_half_v6,
sizeof(second_half_v6));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
err = netlink_send(&nlmsg, sock);
if (err) {
}
netlink_init(&nlmsg, id, 0, &genlhdr, sizeof(genlhdr));
netlink_attr(&nlmsg, WGDEVICE_A_IFNAME, ifname_b, strlen(ifname_b) + 1);
netlink_attr(&nlmsg, WGDEVICE_A_PRIVATE_KEY, private_b, 32);
netlink_attr(&nlmsg, WGDEVICE_A_LISTEN_PORT, &listen_b, 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGDEVICE_A_PEERS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGPEER_A_PUBLIC_KEY, public_a, 32);
netlink_attr(&nlmsg, WGPEER_A_ENDPOINT, &endpoint_a_v6,
sizeof(endpoint_a_v6));
netlink_attr(&nlmsg, WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
&persistent_keepalives[2], 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGPEER_A_ALLOWEDIPS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &first_half_v4,
sizeof(first_half_v4));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet6, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &first_half_v6,
sizeof(first_half_v6));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGPEER_A_PUBLIC_KEY, public_c, 32);
netlink_attr(&nlmsg, WGPEER_A_ENDPOINT, &endpoint_c_v4,
sizeof(endpoint_c_v4));
netlink_attr(&nlmsg, WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
&persistent_keepalives[3], 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGPEER_A_ALLOWEDIPS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &second_half_v4,
sizeof(second_half_v4));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet6, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &second_half_v6,
sizeof(second_half_v6));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
err = netlink_send(&nlmsg, sock);
if (err) {
}
netlink_init(&nlmsg, id, 0, &genlhdr, sizeof(genlhdr));
netlink_attr(&nlmsg, WGDEVICE_A_IFNAME, ifname_c, strlen(ifname_c) + 1);
netlink_attr(&nlmsg, WGDEVICE_A_PRIVATE_KEY, private_c, 32);
netlink_attr(&nlmsg, WGDEVICE_A_LISTEN_PORT, &listen_c, 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGDEVICE_A_PEERS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGPEER_A_PUBLIC_KEY, public_a, 32);
netlink_attr(&nlmsg, WGPEER_A_ENDPOINT, &endpoint_a_v6,
sizeof(endpoint_a_v6));
netlink_attr(&nlmsg, WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
&persistent_keepalives[4], 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGPEER_A_ALLOWEDIPS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &first_half_v4,
sizeof(first_half_v4));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet6, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &first_half_v6,
sizeof(first_half_v6));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGPEER_A_PUBLIC_KEY, public_b, 32);
netlink_attr(&nlmsg, WGPEER_A_ENDPOINT, &endpoint_b_v4,
sizeof(endpoint_b_v4));
netlink_attr(&nlmsg, WGPEER_A_PERSISTENT_KEEPALIVE_INTERVAL,
&persistent_keepalives[5], 2);
netlink_nest(&nlmsg, NLA_F_NESTED | WGPEER_A_ALLOWEDIPS);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &second_half_v4,
sizeof(second_half_v4));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_nest(&nlmsg, NLA_F_NESTED | 0);
netlink_attr(&nlmsg, WGALLOWEDIP_A_FAMILY, &af_inet6, 2);
netlink_attr(&nlmsg, WGALLOWEDIP_A_IPADDR, &second_half_v6,
sizeof(second_half_v6));
netlink_attr(&nlmsg, WGALLOWEDIP_A_CIDR_MASK, &half_cidr, 1);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
netlink_done(&nlmsg);
err = netlink_send(&nlmsg, sock);
if (err) {
}
error:
close(sock);
}
static void initialize_netdevices(void)
{
char netdevsim[16];
sprintf(netdevsim, "netdevsim%d", (int)procid);
struct {
const char* type;
const char* dev;
} devtypes[] = {
{"ip6gretap", "ip6gretap0"}, {"bridge", "bridge0"},
{"vcan", "vcan0"}, {"bond", "bond0"},
{"team", "team0"}, {"dummy", "dummy0"},
{"nlmon", "nlmon0"}, {"caif", "caif0"},
{"batadv", "batadv0"}, {"vxcan", "vxcan1"},
{"netdevsim", netdevsim}, {"veth", 0},
{"xfrm", "xfrm0"}, {"wireguard", "wg0"},
{"wireguard", "wg1"}, {"wireguard", "wg2"},
};
const char* devmasters[] = {"bridge", "bond", "team", "batadv"};
struct {
const char* name;
int macsize;
bool noipv6;
} devices[] = {
{"lo", ETH_ALEN},
{"sit0", 0},
{"bridge0", ETH_ALEN},
{"vcan0", 0, true},
{"tunl0", 0},
{"gre0", 0},
{"gretap0", ETH_ALEN},
{"ip_vti0", 0},
{"ip6_vti0", 0},
{"ip6tnl0", 0},
{"ip6gre0", 0},
{"ip6gretap0", ETH_ALEN},
{"erspan0", ETH_ALEN},
{"bond0", ETH_ALEN},
{"veth0", ETH_ALEN},
{"veth1", ETH_ALEN},
{"team0", ETH_ALEN},
{"veth0_to_bridge", ETH_ALEN},
{"veth1_to_bridge", ETH_ALEN},
{"veth0_to_bond", ETH_ALEN},
{"veth1_to_bond", ETH_ALEN},
{"veth0_to_team", ETH_ALEN},
{"veth1_to_team", ETH_ALEN},
{"veth0_to_hsr", ETH_ALEN},
{"veth1_to_hsr", ETH_ALEN},
{"hsr0", 0},
{"dummy0", ETH_ALEN},
{"nlmon0", 0},
{"vxcan0", 0, true},
{"vxcan1", 0, true},
{"caif0", ETH_ALEN},
{"batadv0", ETH_ALEN},
{netdevsim, ETH_ALEN},
{"xfrm0", ETH_ALEN},
{"veth0_virt_wifi", ETH_ALEN},
{"veth1_virt_wifi", ETH_ALEN},
{"virt_wifi0", ETH_ALEN},
{"veth0_vlan", ETH_ALEN},
{"veth1_vlan", ETH_ALEN},
{"vlan0", ETH_ALEN},
{"vlan1", ETH_ALEN},
{"macvlan0", ETH_ALEN},
{"macvlan1", ETH_ALEN},
{"ipvlan0", ETH_ALEN},
{"ipvlan1", ETH_ALEN},
{"veth0_macvtap", ETH_ALEN},
{"veth1_macvtap", ETH_ALEN},
{"macvtap0", ETH_ALEN},
{"macsec0", ETH_ALEN},
{"veth0_to_batadv", ETH_ALEN},
{"veth1_to_batadv", ETH_ALEN},
{"batadv_slave_0", ETH_ALEN},
{"batadv_slave_1", ETH_ALEN},
{"geneve0", ETH_ALEN},
{"geneve1", ETH_ALEN},
{"wg0", 0},
{"wg1", 0},
{"wg2", 0},
};
int sock = socket(AF_NETLINK, SOCK_RAW, NETLINK_ROUTE);
if (sock == -1)
exit(1);
unsigned i;
for (i = 0; i < sizeof(devtypes) / sizeof(devtypes[0]); i++)
netlink_add_device(&nlmsg, sock, devtypes[i].type, devtypes[i].dev);
for (i = 0; i < sizeof(devmasters) / (sizeof(devmasters[0])); i++) {
char master[32], slave0[32], veth0[32], slave1[32], veth1[32];
sprintf(slave0, "%s_slave_0", devmasters[i]);
sprintf(veth0, "veth0_to_%s", devmasters[i]);
netlink_add_veth(&nlmsg, sock, slave0, veth0);
sprintf(slave1, "%s_slave_1", devmasters[i]);
sprintf(veth1, "veth1_to_%s", devmasters[i]);
netlink_add_veth(&nlmsg, sock, slave1, veth1);
sprintf(master, "%s0", devmasters[i]);
netlink_device_change(&nlmsg, sock, slave0, false, master, 0, 0, NULL);
netlink_device_change(&nlmsg, sock, slave1, false, master, 0, 0, NULL);
}
netlink_device_change(&nlmsg, sock, "bridge_slave_0", true, 0, 0, 0, NULL);
netlink_device_change(&nlmsg, sock, "bridge_slave_1", true, 0, 0, 0, NULL);
netlink_add_veth(&nlmsg, sock, "hsr_slave_0", "veth0_to_hsr");
netlink_add_veth(&nlmsg, sock, "hsr_slave_1", "veth1_to_hsr");
netlink_add_hsr(&nlmsg, sock, "hsr0", "hsr_slave_0", "hsr_slave_1");
netlink_device_change(&nlmsg, sock, "hsr_slave_0", true, 0, 0, 0, NULL);
netlink_device_change(&nlmsg, sock, "hsr_slave_1", true, 0, 0, 0, NULL);
netlink_add_veth(&nlmsg, sock, "veth0_virt_wifi", "veth1_virt_wifi");
netlink_add_linked(&nlmsg, sock, "virt_wifi", "virt_wifi0",
"veth1_virt_wifi");
netlink_add_veth(&nlmsg, sock, "veth0_vlan", "veth1_vlan");
netlink_add_vlan(&nlmsg, sock, "vlan0", "veth0_vlan", 0, htons(ETH_P_8021Q));
netlink_add_vlan(&nlmsg, sock, "vlan1", "veth0_vlan", 1, htons(ETH_P_8021AD));
netlink_add_macvlan(&nlmsg, sock, "macvlan0", "veth1_vlan");
netlink_add_macvlan(&nlmsg, sock, "macvlan1", "veth1_vlan");
netlink_add_ipvlan(&nlmsg, sock, "ipvlan0", "veth0_vlan", IPVLAN_MODE_L2, 0);
netlink_add_ipvlan(&nlmsg, sock, "ipvlan1", "veth0_vlan", IPVLAN_MODE_L3S,
IPVLAN_F_VEPA);
netlink_add_veth(&nlmsg, sock, "veth0_macvtap", "veth1_macvtap");
netlink_add_linked(&nlmsg, sock, "macvtap", "macvtap0", "veth0_macvtap");
netlink_add_linked(&nlmsg, sock, "macsec", "macsec0", "veth1_macvtap");
char addr[32];
sprintf(addr, DEV_IPV4, 14 + 10);
struct in_addr geneve_addr4;
if (inet_pton(AF_INET, addr, &geneve_addr4) <= 0)
exit(1);
struct in6_addr geneve_addr6;
if (inet_pton(AF_INET6, "fc00::01", &geneve_addr6) <= 0)
exit(1);
netlink_add_geneve(&nlmsg, sock, "geneve0", 0, &geneve_addr4, 0);
netlink_add_geneve(&nlmsg, sock, "geneve1", 1, 0, &geneve_addr6);
netdevsim_add((int)procid, 4);
netlink_wireguard_setup();
for (i = 0; i < sizeof(devices) / (sizeof(devices[0])); i++) {
char addr[32];
sprintf(addr, DEV_IPV4, i + 10);
netlink_add_addr4(&nlmsg, sock, devices[i].name, addr);
if (!devices[i].noipv6) {
sprintf(addr, DEV_IPV6, i + 10);
netlink_add_addr6(&nlmsg, sock, devices[i].name, addr);
}
uint64_t macaddr = DEV_MAC + ((i + 10ull) << 40);
netlink_device_change(&nlmsg, sock, devices[i].name, true, 0, &macaddr,
devices[i].macsize, NULL);
}
close(sock);
}
static void initialize_netdevices_init(void)
{
int sock = socket(AF_NETLINK, SOCK_RAW, NETLINK_ROUTE);
if (sock == -1)
exit(1);
struct {
const char* type;
int macsize;
bool noipv6;
bool noup;
} devtypes[] = {
{"nr", 7, true},
{"rose", 5, true, true},
};
unsigned i;
for (i = 0; i < sizeof(devtypes) / sizeof(devtypes[0]); i++) {
char dev[32], addr[32];
sprintf(dev, "%s%d", devtypes[i].type, (int)procid);
sprintf(addr, "172.30.%d.%d", i, (int)procid + 1);
netlink_add_addr4(&nlmsg, sock, dev, addr);
if (!devtypes[i].noipv6) {
sprintf(addr, "fe88::%02x:%02x", i, (int)procid + 1);
netlink_add_addr6(&nlmsg, sock, dev, addr);
}
int macsize = devtypes[i].macsize;
uint64_t macaddr = 0xbbbbbb +
((unsigned long long)i << (8 * (macsize - 2))) +
(procid << (8 * (macsize - 1)));
netlink_device_change(&nlmsg, sock, dev, !devtypes[i].noup, 0, &macaddr,
macsize, NULL);
}
close(sock);
}
static int read_tun(char* data, int size)
{
if (tunfd < 0)
return -1;
int rv = read(tunfd, data, size);
if (rv < 0) {
if (errno == EAGAIN || errno == EBADFD)
return -1;
exit(1);
}
return rv;
}
static void flush_tun()
{
char data[1000];
while (read_tun(&data[0], sizeof(data)) != -1) {
}
}
#define MAX_FDS 30
#define BTPROTO_HCI 1
#define ACL_LINK 1
#define SCAN_PAGE 2
typedef struct {
uint8_t b[6];
} __attribute__((packed)) bdaddr_t;
#define HCI_COMMAND_PKT 1
#define HCI_EVENT_PKT 4
#define HCI_VENDOR_PKT 0xff
struct hci_command_hdr {
uint16_t opcode;
uint8_t plen;
} __attribute__((packed));
struct hci_event_hdr {
uint8_t evt;
uint8_t plen;
} __attribute__((packed));
#define HCI_EV_CONN_COMPLETE 0x03
struct hci_ev_conn_complete {
uint8_t status;
uint16_t handle;
bdaddr_t bdaddr;
uint8_t link_type;
uint8_t encr_mode;
} __attribute__((packed));
#define HCI_EV_CONN_REQUEST 0x04
struct hci_ev_conn_request {
bdaddr_t bdaddr;
uint8_t dev_class[3];
uint8_t link_type;
} __attribute__((packed));
#define HCI_EV_REMOTE_FEATURES 0x0b
struct hci_ev_remote_features {
uint8_t status;
uint16_t handle;
uint8_t features[8];
} __attribute__((packed));
#define HCI_EV_CMD_COMPLETE 0x0e
struct hci_ev_cmd_complete {
uint8_t ncmd;
uint16_t opcode;
} __attribute__((packed));
#define HCI_OP_WRITE_SCAN_ENABLE 0x0c1a
#define HCI_OP_READ_BUFFER_SIZE 0x1005
struct hci_rp_read_buffer_size {
uint8_t status;
uint16_t acl_mtu;
uint8_t sco_mtu;
uint16_t acl_max_pkt;
uint16_t sco_max_pkt;
} __attribute__((packed));
#define HCI_OP_READ_BD_ADDR 0x1009
struct hci_rp_read_bd_addr {
uint8_t status;
bdaddr_t bdaddr;
} __attribute__((packed));
#define HCI_EV_LE_META 0x3e
struct hci_ev_le_meta {
uint8_t subevent;
} __attribute__((packed));
#define HCI_EV_LE_CONN_COMPLETE 0x01
struct hci_ev_le_conn_complete {
uint8_t status;
uint16_t handle;
uint8_t role;
uint8_t bdaddr_type;
bdaddr_t bdaddr;
uint16_t interval;
uint16_t latency;
uint16_t supervision_timeout;
uint8_t clk_accurancy;
} __attribute__((packed));
struct hci_dev_req {
uint16_t dev_id;
uint32_t dev_opt;
};
struct vhci_vendor_pkt {
uint8_t type;
uint8_t opcode;
uint16_t id;
};
#define HCIDEVUP _IOW('H', 201, int)
#define HCISETSCAN _IOW('H', 221, int)
static int vhci_fd = -1;
static void rfkill_unblock_all()
{
int fd = open("/dev/rfkill", O_WRONLY);
if (fd < 0)
exit(1);
struct rfkill_event event = {0};
event.idx = 0;
event.type = RFKILL_TYPE_ALL;
event.op = RFKILL_OP_CHANGE_ALL;
event.soft = 0;
event.hard = 0;
if (write(fd, &event, sizeof(event)) < 0)
exit(1);
close(fd);
}
static void hci_send_event_packet(int fd, uint8_t evt, void* data,
size_t data_len)
{
struct iovec iv[3];
struct hci_event_hdr hdr;
hdr.evt = evt;
hdr.plen = data_len;
uint8_t type = HCI_EVENT_PKT;
iv[0].iov_base = &type;
iv[0].iov_len = sizeof(type);
iv[1].iov_base = &hdr;
iv[1].iov_len = sizeof(hdr);
iv[2].iov_base = data;
iv[2].iov_len = data_len;
if (writev(fd, iv, sizeof(iv) / sizeof(struct iovec)) < 0)
exit(1);
}
static void hci_send_event_cmd_complete(int fd, uint16_t opcode, void* data,
size_t data_len)
{
struct iovec iv[4];
struct hci_event_hdr hdr;
hdr.evt = HCI_EV_CMD_COMPLETE;
hdr.plen = sizeof(struct hci_ev_cmd_complete) + data_len;
struct hci_ev_cmd_complete evt_hdr;
evt_hdr.ncmd = 1;
evt_hdr.opcode = opcode;
uint8_t type = HCI_EVENT_PKT;
iv[0].iov_base = &type;
iv[0].iov_len = sizeof(type);
iv[1].iov_base = &hdr;
iv[1].iov_len = sizeof(hdr);
iv[2].iov_base = &evt_hdr;
iv[2].iov_len = sizeof(evt_hdr);
iv[3].iov_base = data;
iv[3].iov_len = data_len;
if (writev(fd, iv, sizeof(iv) / sizeof(struct iovec)) < 0)
exit(1);
}
static bool process_command_pkt(int fd, char* buf, ssize_t buf_size)
{
struct hci_command_hdr* hdr = (struct hci_command_hdr*)buf;
if (buf_size < (ssize_t)sizeof(struct hci_command_hdr) ||
hdr->plen != buf_size - sizeof(struct hci_command_hdr)) {
exit(1);
}
switch (hdr->opcode) {
case HCI_OP_WRITE_SCAN_ENABLE: {
uint8_t status = 0;
hci_send_event_cmd_complete(fd, hdr->opcode, &status, sizeof(status));
return true;
}
case HCI_OP_READ_BD_ADDR: {
struct hci_rp_read_bd_addr rp = {0};
rp.status = 0;
memset(&rp.bdaddr, 0xaa, 6);
hci_send_event_cmd_complete(fd, hdr->opcode, &rp, sizeof(rp));
return false;
}
case HCI_OP_READ_BUFFER_SIZE: {
struct hci_rp_read_buffer_size rp = {0};
rp.status = 0;
rp.acl_mtu = 1021;
rp.sco_mtu = 96;
rp.acl_max_pkt = 4;
rp.sco_max_pkt = 6;
hci_send_event_cmd_complete(fd, hdr->opcode, &rp, sizeof(rp));
return false;
}
}
char dummy[0xf9] = {0};
hci_send_event_cmd_complete(fd, hdr->opcode, dummy, sizeof(dummy));
return false;
}
static void* event_thread(void* arg)
{
while (1) {
char buf[1024] = {0};
ssize_t buf_size = read(vhci_fd, buf, sizeof(buf));
if (buf_size < 0)
exit(1);
if (buf_size > 0 && buf[0] == HCI_COMMAND_PKT) {
if (process_command_pkt(vhci_fd, buf + 1, buf_size - 1))
break;
}
}
return NULL;
}
#define HCI_HANDLE_1 200
#define HCI_HANDLE_2 201
static void initialize_vhci()
{
int hci_sock = socket(AF_BLUETOOTH, SOCK_RAW, BTPROTO_HCI);
if (hci_sock < 0)
exit(1);
vhci_fd = open("/dev/vhci", O_RDWR);
if (vhci_fd == -1)
exit(1);
const int kVhciFd = 241;
if (dup2(vhci_fd, kVhciFd) < 0)
exit(1);
close(vhci_fd);
vhci_fd = kVhciFd;
struct vhci_vendor_pkt vendor_pkt;
if (read(vhci_fd, &vendor_pkt, sizeof(vendor_pkt)) != sizeof(vendor_pkt))
exit(1);
if (vendor_pkt.type != HCI_VENDOR_PKT)
exit(1);
pthread_t th;
if (pthread_create(&th, NULL, event_thread, NULL))
exit(1);
int ret = ioctl(hci_sock, HCIDEVUP, vendor_pkt.id);
if (ret) {
if (errno == ERFKILL) {
rfkill_unblock_all();
ret = ioctl(hci_sock, HCIDEVUP, vendor_pkt.id);
}
if (ret && errno != EALREADY)
exit(1);
}
struct hci_dev_req dr = {0};
dr.dev_id = vendor_pkt.id;
dr.dev_opt = SCAN_PAGE;
if (ioctl(hci_sock, HCISETSCAN, &dr))
exit(1);
struct hci_ev_conn_request request;
memset(&request, 0, sizeof(request));
memset(&request.bdaddr, 0xaa, 6);
*(uint8_t*)&request.bdaddr.b[5] = 0x10;
request.link_type = ACL_LINK;
hci_send_event_packet(vhci_fd, HCI_EV_CONN_REQUEST, &request,
sizeof(request));
struct hci_ev_conn_complete complete;
memset(&complete, 0, sizeof(complete));
complete.status = 0;
complete.handle = HCI_HANDLE_1;
memset(&complete.bdaddr, 0xaa, 6);
*(uint8_t*)&complete.bdaddr.b[5] = 0x10;
complete.link_type = ACL_LINK;
complete.encr_mode = 0;
hci_send_event_packet(vhci_fd, HCI_EV_CONN_COMPLETE, &complete,
sizeof(complete));
struct hci_ev_remote_features features;
memset(&features, 0, sizeof(features));
features.status = 0;
features.handle = HCI_HANDLE_1;
hci_send_event_packet(vhci_fd, HCI_EV_REMOTE_FEATURES, &features,
sizeof(features));
struct {
struct hci_ev_le_meta le_meta;
struct hci_ev_le_conn_complete le_conn;
} le_conn;
memset(&le_conn, 0, sizeof(le_conn));
le_conn.le_meta.subevent = HCI_EV_LE_CONN_COMPLETE;
memset(&le_conn.le_conn.bdaddr, 0xaa, 6);
*(uint8_t*)&le_conn.le_conn.bdaddr.b[5] = 0x11;
le_conn.le_conn.role = 1;
le_conn.le_conn.handle = HCI_HANDLE_2;
hci_send_event_packet(vhci_fd, HCI_EV_LE_META, &le_conn, sizeof(le_conn));
pthread_join(th, NULL);
close(hci_sock);
}
#define XT_TABLE_SIZE 1536
#define XT_MAX_ENTRIES 10
struct xt_counters {
uint64_t pcnt, bcnt;
};
struct ipt_getinfo {
char name[32];
unsigned int valid_hooks;
unsigned int hook_entry[5];
unsigned int underflow[5];
unsigned int num_entries;
unsigned int size;
};
struct ipt_get_entries {
char name[32];
unsigned int size;
void* entrytable[XT_TABLE_SIZE / sizeof(void*)];
};
struct ipt_replace {
char name[32];
unsigned int valid_hooks;
unsigned int num_entries;
unsigned int size;
unsigned int hook_entry[5];
unsigned int underflow[5];
unsigned int num_counters;
struct xt_counters* counters;
char entrytable[XT_TABLE_SIZE];
};
struct ipt_table_desc {
const char* name;
struct ipt_getinfo info;
struct ipt_replace replace;
};
static struct ipt_table_desc ipv4_tables[] = {
{.name = "filter"}, {.name = "nat"}, {.name = "mangle"},
{.name = "raw"}, {.name = "security"},
};
static struct ipt_table_desc ipv6_tables[] = {
{.name = "filter"}, {.name = "nat"}, {.name = "mangle"},
{.name = "raw"}, {.name = "security"},
};
#define IPT_BASE_CTL 64
#define IPT_SO_SET_REPLACE (IPT_BASE_CTL)
#define IPT_SO_GET_INFO (IPT_BASE_CTL)
#define IPT_SO_GET_ENTRIES (IPT_BASE_CTL + 1)
struct arpt_getinfo {
char name[32];
unsigned int valid_hooks;
unsigned int hook_entry[3];
unsigned int underflow[3];
unsigned int num_entries;
unsigned int size;
};
struct arpt_get_entries {
char name[32];
unsigned int size;
void* entrytable[XT_TABLE_SIZE / sizeof(void*)];
};
struct arpt_replace {
char name[32];
unsigned int valid_hooks;
unsigned int num_entries;
unsigned int size;
unsigned int hook_entry[3];
unsigned int underflow[3];
unsigned int num_counters;
struct xt_counters* counters;
char entrytable[XT_TABLE_SIZE];
};
struct arpt_table_desc {
const char* name;
struct arpt_getinfo info;
struct arpt_replace replace;
};
static struct arpt_table_desc arpt_tables[] = {
{.name = "filter"},
};
#define ARPT_BASE_CTL 96
#define ARPT_SO_SET_REPLACE (ARPT_BASE_CTL)
#define ARPT_SO_GET_INFO (ARPT_BASE_CTL)
#define ARPT_SO_GET_ENTRIES (ARPT_BASE_CTL + 1)
static void checkpoint_iptables(struct ipt_table_desc* tables, int num_tables,
int family, int level)
{
int fd = socket(family, SOCK_STREAM, IPPROTO_TCP);
if (fd == -1) {
switch (errno) {
case EAFNOSUPPORT:
case ENOPROTOOPT:
return;
}
exit(1);
}
for (int i = 0; i < num_tables; i++) {
struct ipt_table_desc* table = &tables[i];
strcpy(table->info.name, table->name);
strcpy(table->replace.name, table->name);
socklen_t optlen = sizeof(table->info);
if (getsockopt(fd, level, IPT_SO_GET_INFO, &table->info, &optlen)) {
switch (errno) {
case EPERM:
case ENOENT:
case ENOPROTOOPT:
continue;
}
exit(1);
}
if (table->info.size > sizeof(table->replace.entrytable))
exit(1);
if (table->info.num_entries > XT_MAX_ENTRIES)
exit(1);
struct ipt_get_entries entries;
memset(&entries, 0, sizeof(entries));
strcpy(entries.name, table->name);
entries.size = table->info.size;
optlen = sizeof(entries) - sizeof(entries.entrytable) + table->info.size;
if (getsockopt(fd, level, IPT_SO_GET_ENTRIES, &entries, &optlen))
exit(1);
table->replace.valid_hooks = table->info.valid_hooks;
table->replace.num_entries = table->info.num_entries;
table->replace.size = table->info.size;
memcpy(table->replace.hook_entry, table->info.hook_entry,
sizeof(table->replace.hook_entry));
memcpy(table->replace.underflow, table->info.underflow,
sizeof(table->replace.underflow));
memcpy(table->replace.entrytable, entries.entrytable, table->info.size);
}
close(fd);
}
static void reset_iptables(struct ipt_table_desc* tables, int num_tables,
int family, int level)
{
int fd = socket(family, SOCK_STREAM, IPPROTO_TCP);
if (fd == -1) {
switch (errno) {
case EAFNOSUPPORT:
case ENOPROTOOPT:
return;
}
exit(1);
}
for (int i = 0; i < num_tables; i++) {
struct ipt_table_desc* table = &tables[i];
if (table->info.valid_hooks == 0)
continue;
struct ipt_getinfo info;
memset(&info, 0, sizeof(info));
strcpy(info.name, table->name);
socklen_t optlen = sizeof(info);
if (getsockopt(fd, level, IPT_SO_GET_INFO, &info, &optlen))
exit(1);
if (memcmp(&table->info, &info, sizeof(table->info)) == 0) {
struct ipt_get_entries entries;
memset(&entries, 0, sizeof(entries));
strcpy(entries.name, table->name);
entries.size = table->info.size;
optlen = sizeof(entries) - sizeof(entries.entrytable) + entries.size;
if (getsockopt(fd, level, IPT_SO_GET_ENTRIES, &entries, &optlen))
exit(1);
if (memcmp(table->replace.entrytable, entries.entrytable,
table->info.size) == 0)
continue;
}
struct xt_counters counters[XT_MAX_ENTRIES];
table->replace.num_counters = info.num_entries;
table->replace.counters = counters;
optlen = sizeof(table->replace) - sizeof(table->replace.entrytable) +
table->replace.size;
if (setsockopt(fd, level, IPT_SO_SET_REPLACE, &table->replace, optlen))
exit(1);
}
close(fd);
}
static void checkpoint_arptables(void)
{
int fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (fd == -1) {
switch (errno) {
case EAFNOSUPPORT:
case ENOPROTOOPT:
return;
}
exit(1);
}
for (unsigned i = 0; i < sizeof(arpt_tables) / sizeof(arpt_tables[0]); i++) {
struct arpt_table_desc* table = &arpt_tables[i];
strcpy(table->info.name, table->name);
strcpy(table->replace.name, table->name);
socklen_t optlen = sizeof(table->info);
if (getsockopt(fd, SOL_IP, ARPT_SO_GET_INFO, &table->info, &optlen)) {
switch (errno) {
case EPERM:
case ENOENT:
case ENOPROTOOPT:
continue;
}
exit(1);
}
if (table->info.size > sizeof(table->replace.entrytable))
exit(1);
if (table->info.num_entries > XT_MAX_ENTRIES)
exit(1);
struct arpt_get_entries entries;
memset(&entries, 0, sizeof(entries));
strcpy(entries.name, table->name);
entries.size = table->info.size;
optlen = sizeof(entries) - sizeof(entries.entrytable) + table->info.size;
if (getsockopt(fd, SOL_IP, ARPT_SO_GET_ENTRIES, &entries, &optlen))
exit(1);
table->replace.valid_hooks = table->info.valid_hooks;
table->replace.num_entries = table->info.num_entries;
table->replace.size = table->info.size;
memcpy(table->replace.hook_entry, table->info.hook_entry,
sizeof(table->replace.hook_entry));
memcpy(table->replace.underflow, table->info.underflow,
sizeof(table->replace.underflow));
memcpy(table->replace.entrytable, entries.entrytable, table->info.size);
}
close(fd);
}
static void reset_arptables()
{
int fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (fd == -1) {
switch (errno) {
case EAFNOSUPPORT:
case ENOPROTOOPT:
return;
}
exit(1);
}
for (unsigned i = 0; i < sizeof(arpt_tables) / sizeof(arpt_tables[0]); i++) {
struct arpt_table_desc* table = &arpt_tables[i];
if (table->info.valid_hooks == 0)
continue;
struct arpt_getinfo info;
memset(&info, 0, sizeof(info));
strcpy(info.name, table->name);
socklen_t optlen = sizeof(info);
if (getsockopt(fd, SOL_IP, ARPT_SO_GET_INFO, &info, &optlen))
exit(1);
if (memcmp(&table->info, &info, sizeof(table->info)) == 0) {
struct arpt_get_entries entries;
memset(&entries, 0, sizeof(entries));
strcpy(entries.name, table->name);
entries.size = table->info.size;
optlen = sizeof(entries) - sizeof(entries.entrytable) + entries.size;
if (getsockopt(fd, SOL_IP, ARPT_SO_GET_ENTRIES, &entries, &optlen))
exit(1);
if (memcmp(table->replace.entrytable, entries.entrytable,
table->info.size) == 0)
continue;
} else {
}
struct xt_counters counters[XT_MAX_ENTRIES];
table->replace.num_counters = info.num_entries;
table->replace.counters = counters;
optlen = sizeof(table->replace) - sizeof(table->replace.entrytable) +
table->replace.size;
if (setsockopt(fd, SOL_IP, ARPT_SO_SET_REPLACE, &table->replace, optlen))
exit(1);
}
close(fd);
}
#define NF_BR_NUMHOOKS 6
#define EBT_TABLE_MAXNAMELEN 32
#define EBT_CHAIN_MAXNAMELEN 32
#define EBT_BASE_CTL 128
#define EBT_SO_SET_ENTRIES (EBT_BASE_CTL)
#define EBT_SO_GET_INFO (EBT_BASE_CTL)
#define EBT_SO_GET_ENTRIES (EBT_SO_GET_INFO + 1)
#define EBT_SO_GET_INIT_INFO (EBT_SO_GET_ENTRIES + 1)
#define EBT_SO_GET_INIT_ENTRIES (EBT_SO_GET_INIT_INFO + 1)
struct ebt_replace {
char name[EBT_TABLE_MAXNAMELEN];
unsigned int valid_hooks;
unsigned int nentries;
unsigned int entries_size;
struct ebt_entries* hook_entry[NF_BR_NUMHOOKS];
unsigned int num_counters;
struct ebt_counter* counters;
char* entries;
};
struct ebt_entries {
unsigned int distinguisher;
char name[EBT_CHAIN_MAXNAMELEN];
unsigned int counter_offset;
int policy;
unsigned int nentries;
char data[0] __attribute__((aligned(__alignof__(struct ebt_replace))));
};
struct ebt_table_desc {
const char* name;
struct ebt_replace replace;
char entrytable[XT_TABLE_SIZE];
};
static struct ebt_table_desc ebt_tables[] = {
{.name = "filter"},
{.name = "nat"},
{.name = "broute"},
};
static void checkpoint_ebtables(void)
{
int fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (fd == -1) {
switch (errno) {
case EAFNOSUPPORT:
case ENOPROTOOPT:
return;
}
exit(1);
}
for (size_t i = 0; i < sizeof(ebt_tables) / sizeof(ebt_tables[0]); i++) {
struct ebt_table_desc* table = &ebt_tables[i];
strcpy(table->replace.name, table->name);
socklen_t optlen = sizeof(table->replace);
if (getsockopt(fd, SOL_IP, EBT_SO_GET_INIT_INFO, &table->replace,
&optlen)) {
switch (errno) {
case EPERM:
case ENOENT:
case ENOPROTOOPT:
continue;
}
exit(1);
}
if (table->replace.entries_size > sizeof(table->entrytable))
exit(1);
table->replace.num_counters = 0;
table->replace.entries = table->entrytable;
optlen = sizeof(table->replace) + table->replace.entries_size;
if (getsockopt(fd, SOL_IP, EBT_SO_GET_INIT_ENTRIES, &table->replace,
&optlen))
exit(1);
}
close(fd);
}
static void reset_ebtables()
{
int fd = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (fd == -1) {
switch (errno) {
case EAFNOSUPPORT:
case ENOPROTOOPT:
return;
}
exit(1);
}
for (unsigned i = 0; i < sizeof(ebt_tables) / sizeof(ebt_tables[0]); i++) {
struct ebt_table_desc* table = &ebt_tables[i];
if (table->replace.valid_hooks == 0)
continue;
struct ebt_replace replace;
memset(&replace, 0, sizeof(replace));
strcpy(replace.name, table->name);
socklen_t optlen = sizeof(replace);
if (getsockopt(fd, SOL_IP, EBT_SO_GET_INFO, &replace, &optlen))
exit(1);
replace.num_counters = 0;
table->replace.entries = 0;
for (unsigned h = 0; h < NF_BR_NUMHOOKS; h++)
table->replace.hook_entry[h] = 0;
if (memcmp(&table->replace, &replace, sizeof(table->replace)) == 0) {
char entrytable[XT_TABLE_SIZE];
memset(&entrytable, 0, sizeof(entrytable));
replace.entries = entrytable;
optlen = sizeof(replace) + replace.entries_size;
if (getsockopt(fd, SOL_IP, EBT_SO_GET_ENTRIES, &replace, &optlen))
exit(1);
if (memcmp(table->entrytable, entrytable, replace.entries_size) == 0)
continue;
}
for (unsigned j = 0, h = 0; h < NF_BR_NUMHOOKS; h++) {
if (table->replace.valid_hooks & (1 << h)) {
table->replace.hook_entry[h] =
(struct ebt_entries*)table->entrytable + j;
j++;
}
}
table->replace.entries = table->entrytable;
optlen = sizeof(table->replace) + table->replace.entries_size;
if (setsockopt(fd, SOL_IP, EBT_SO_SET_ENTRIES, &table->replace, optlen))
exit(1);
}
close(fd);
}
static void checkpoint_net_namespace(void)
{
checkpoint_ebtables();
checkpoint_arptables();
checkpoint_iptables(ipv4_tables, sizeof(ipv4_tables) / sizeof(ipv4_tables[0]),
AF_INET, SOL_IP);
checkpoint_iptables(ipv6_tables, sizeof(ipv6_tables) / sizeof(ipv6_tables[0]),
AF_INET6, SOL_IPV6);
}
static void reset_net_namespace(void)
{
reset_ebtables();
reset_arptables();
reset_iptables(ipv4_tables, sizeof(ipv4_tables) / sizeof(ipv4_tables[0]),
AF_INET, SOL_IP);
reset_iptables(ipv6_tables, sizeof(ipv6_tables) / sizeof(ipv6_tables[0]),
AF_INET6, SOL_IPV6);
}
static void setup_cgroups()
{
if (mkdir("/syzcgroup", 0777)) {
}
if (mkdir("/syzcgroup/unified", 0777)) {
}
if (mount("none", "/syzcgroup/unified", "cgroup2", 0, NULL)) {
}
if (chmod("/syzcgroup/unified", 0777)) {
}
write_file("/syzcgroup/unified/cgroup.subtree_control",
"+cpu +memory +io +pids +rdma");
if (mkdir("/syzcgroup/cpu", 0777)) {
}
if (mount("none", "/syzcgroup/cpu", "cgroup", 0,
"cpuset,cpuacct,perf_event,hugetlb")) {
}
write_file("/syzcgroup/cpu/cgroup.clone_children", "1");
write_file("/syzcgroup/cpu/cpuset.memory_pressure_enabled", "1");
if (chmod("/syzcgroup/cpu", 0777)) {
}
if (mkdir("/syzcgroup/net", 0777)) {
}
if (mount("none", "/syzcgroup/net", "cgroup", 0,
"net_cls,net_prio,devices,freezer")) {
}
if (chmod("/syzcgroup/net", 0777)) {
}
}
static void setup_cgroups_loop()
{
int pid = getpid();
char file[128];
char cgroupdir[64];
snprintf(cgroupdir, sizeof(cgroupdir), "/syzcgroup/unified/syz%llu", procid);
if (mkdir(cgroupdir, 0777)) {
}
snprintf(file, sizeof(file), "%s/pids.max", cgroupdir);
write_file(file, "32");
snprintf(file, sizeof(file), "%s/memory.low", cgroupdir);
write_file(file, "%d", 298 << 20);
snprintf(file, sizeof(file), "%s/memory.high", cgroupdir);
write_file(file, "%d", 299 << 20);
snprintf(file, sizeof(file), "%s/memory.max", cgroupdir);
write_file(file, "%d", 300 << 20);
snprintf(file, sizeof(file), "%s/cgroup.procs", cgroupdir);
write_file(file, "%d", pid);
snprintf(cgroupdir, sizeof(cgroupdir), "/syzcgroup/cpu/syz%llu", procid);
if (mkdir(cgroupdir, 0777)) {
}
snprintf(file, sizeof(file), "%s/cgroup.procs", cgroupdir);
write_file(file, "%d", pid);
snprintf(cgroupdir, sizeof(cgroupdir), "/syzcgroup/net/syz%llu", procid);
if (mkdir(cgroupdir, 0777)) {
}
snprintf(file, sizeof(file), "%s/cgroup.procs", cgroupdir);
write_file(file, "%d", pid);
}
static void setup_cgroups_test()
{
char cgroupdir[64];
snprintf(cgroupdir, sizeof(cgroupdir), "/syzcgroup/unified/syz%llu", procid);
if (symlink(cgroupdir, "./cgroup")) {
}
snprintf(cgroupdir, sizeof(cgroupdir), "/syzcgroup/cpu/syz%llu", procid);
if (symlink(cgroupdir, "./cgroup.cpu")) {
}
snprintf(cgroupdir, sizeof(cgroupdir), "/syzcgroup/net/syz%llu", procid);
if (symlink(cgroupdir, "./cgroup.net")) {
}
}
static void setup_common()
{
if (mount(0, "/sys/fs/fuse/connections", "fusectl", 0, 0)) {
}
setup_cgroups();
}
static void loop();
static void sandbox_common()
{
prctl(PR_SET_PDEATHSIG, SIGKILL, 0, 0, 0);
setpgrp();
setsid();
struct rlimit rlim;
rlim.rlim_cur = rlim.rlim_max = (200 << 20);
setrlimit(RLIMIT_AS, &rlim);
rlim.rlim_cur = rlim.rlim_max = 32 << 20;
setrlimit(RLIMIT_MEMLOCK, &rlim);
rlim.rlim_cur = rlim.rlim_max = 136 << 20;
setrlimit(RLIMIT_FSIZE, &rlim);
rlim.rlim_cur = rlim.rlim_max = 1 << 20;
setrlimit(RLIMIT_STACK, &rlim);
rlim.rlim_cur = rlim.rlim_max = 0;
setrlimit(RLIMIT_CORE, &rlim);
rlim.rlim_cur = rlim.rlim_max = 256;
setrlimit(RLIMIT_NOFILE, &rlim);
if (unshare(CLONE_NEWNS)) {
}
if (mount(NULL, "/", NULL, MS_REC | MS_PRIVATE, NULL)) {
}
if (unshare(CLONE_NEWIPC)) {
}
if (unshare(0x02000000)) {
}
if (unshare(CLONE_NEWUTS)) {
}
if (unshare(CLONE_SYSVSEM)) {
}
typedef struct {
const char* name;
const char* value;
} sysctl_t;
static const sysctl_t sysctls[] = {
{"/proc/sys/kernel/shmmax", "16777216"},
{"/proc/sys/kernel/shmall", "536870912"},
{"/proc/sys/kernel/shmmni", "1024"},
{"/proc/sys/kernel/msgmax", "8192"},
{"/proc/sys/kernel/msgmni", "1024"},
{"/proc/sys/kernel/msgmnb", "1024"},
{"/proc/sys/kernel/sem", "1024 1048576 500 1024"},
};
unsigned i;
for (i = 0; i < sizeof(sysctls) / sizeof(sysctls[0]); i++)
write_file(sysctls[i].name, sysctls[i].value);
}
static int wait_for_loop(int pid)
{
if (pid < 0)
exit(1);
int status = 0;
while (waitpid(-1, &status, __WALL) != pid) {
}
return WEXITSTATUS(status);
}
static void drop_caps(void)
{
struct __user_cap_header_struct cap_hdr = {};
struct __user_cap_data_struct cap_data[2] = {};
cap_hdr.version = _LINUX_CAPABILITY_VERSION_3;
cap_hdr.pid = getpid();
if (syscall(SYS_capget, &cap_hdr, &cap_data))
exit(1);
const int drop = (1 << CAP_SYS_PTRACE) | (1 << CAP_SYS_NICE);
cap_data[0].effective &= ~drop;
cap_data[0].permitted &= ~drop;
cap_data[0].inheritable &= ~drop;
if (syscall(SYS_capset, &cap_hdr, &cap_data))
exit(1);
}
static int do_sandbox_none(void)
{
if (unshare(CLONE_NEWPID)) {
}
int pid = fork();
if (pid != 0)
return wait_for_loop(pid);
setup_common();
initialize_vhci();
sandbox_common();
drop_caps();
initialize_netdevices_init();
if (unshare(CLONE_NEWNET)) {
}
initialize_tun();
initialize_netdevices();
loop();
exit(1);
}
#define FS_IOC_SETFLAGS _IOW('f', 2, long)
static void remove_dir(const char* dir)
{
int iter = 0;
DIR* dp = 0;
retry:
while (umount2(dir, MNT_DETACH) == 0) {
}
dp = opendir(dir);
if (dp == NULL) {
if (errno == EMFILE) {
exit(1);
}
exit(1);
}
struct dirent* ep = 0;
while ((ep = readdir(dp))) {
if (strcmp(ep->d_name, ".") == 0 || strcmp(ep->d_name, "..") == 0)
continue;
char filename[FILENAME_MAX];
snprintf(filename, sizeof(filename), "%s/%s", dir, ep->d_name);
while (umount2(filename, MNT_DETACH) == 0) {
}
struct stat st;
if (lstat(filename, &st))
exit(1);
if (S_ISDIR(st.st_mode)) {
remove_dir(filename);
continue;
}
int i;
for (i = 0;; i++) {
if (unlink(filename) == 0)
break;
if (errno == EPERM) {
int fd = open(filename, O_RDONLY);
if (fd != -1) {
long flags = 0;
if (ioctl(fd, FS_IOC_SETFLAGS, &flags) == 0) {
}
close(fd);
continue;
}
}
if (errno == EROFS) {
break;
}
if (errno != EBUSY || i > 100)
exit(1);
if (umount2(filename, MNT_DETACH))
exit(1);
}
}
closedir(dp);
for (int i = 0;; i++) {
if (rmdir(dir) == 0)
break;
if (i < 100) {
if (errno == EPERM) {
int fd = open(dir, O_RDONLY);
if (fd != -1) {
long flags = 0;
if (ioctl(fd, FS_IOC_SETFLAGS, &flags) == 0) {
}
close(fd);
continue;
}
}
if (errno == EROFS) {
break;
}
if (errno == EBUSY) {
if (umount2(dir, MNT_DETACH))
exit(1);
continue;
}
if (errno == ENOTEMPTY) {
if (iter < 100) {
iter++;
goto retry;
}
}
}
exit(1);
}
}
static void kill_and_wait(int pid, int* status)
{
kill(-pid, SIGKILL);
kill(pid, SIGKILL);
for (int i = 0; i < 100; i++) {
if (waitpid(-1, status, WNOHANG | __WALL) == pid)
return;
usleep(1000);
}
DIR* dir = opendir("/sys/fs/fuse/connections");
if (dir) {
for (;;) {
struct dirent* ent = readdir(dir);
if (!ent)
break;
if (strcmp(ent->d_name, ".") == 0 || strcmp(ent->d_name, "..") == 0)
continue;
char abort[300];
snprintf(abort, sizeof(abort), "/sys/fs/fuse/connections/%s/abort",
ent->d_name);
int fd = open(abort, O_WRONLY);
if (fd == -1) {
continue;
}
if (write(fd, abort, 1) < 0) {
}
close(fd);
}
closedir(dir);
} else {
}
while (waitpid(-1, status, __WALL) != pid) {
}
}
static void setup_loop()
{
setup_cgroups_loop();
checkpoint_net_namespace();
}
static void reset_loop()
{
reset_net_namespace();
}
static void setup_test()
{
prctl(PR_SET_PDEATHSIG, SIGKILL, 0, 0, 0);
setpgrp();
setup_cgroups_test();
write_file("/proc/self/oom_score_adj", "1000");
flush_tun();
}
static void close_fds()
{
for (int fd = 3; fd < MAX_FDS; fd++)
close(fd);
}
static void setup_binfmt_misc()
{
if (mount(0, "/proc/sys/fs/binfmt_misc", "binfmt_misc", 0, 0)) {
}
write_file("/proc/sys/fs/binfmt_misc/register", ":syz0:M:0:\x01::./file0:");
write_file("/proc/sys/fs/binfmt_misc/register",
":syz1:M:1:\x02::./file0:POC");
}
struct thread_t {
int created, call;
event_t ready, done;
};
static struct thread_t threads[16];
static void execute_call(int call);
static int running;
static void* thr(void* arg)
{
struct thread_t* th = (struct thread_t*)arg;
for (;;) {
event_wait(&th->ready);
event_reset(&th->ready);
execute_call(th->call);
__atomic_fetch_sub(&running, 1, __ATOMIC_RELAXED);
event_set(&th->done);
}
return 0;
}
static void execute_one(void)
{
int i, call, thread;
int collide = 0;
again:
for (call = 0; call < 5; call++) {
for (thread = 0; thread < (int)(sizeof(threads) / sizeof(threads[0]));
thread++) {
struct thread_t* th = &threads[thread];
if (!th->created) {
th->created = 1;
event_init(&th->ready);
event_init(&th->done);
event_set(&th->done);
thread_start(thr, th);
}
if (!event_isset(&th->done))
continue;
event_reset(&th->done);
th->call = call;
__atomic_fetch_add(&running, 1, __ATOMIC_RELAXED);
event_set(&th->ready);
if (collide && (call % 2) == 0)
break;
event_timedwait(&th->done, 45);
break;
}
}
for (i = 0; i < 100 && __atomic_load_n(&running, __ATOMIC_RELAXED); i++)
sleep_ms(1);
close_fds();
if (!collide) {
collide = 1;
goto again;
}
}
static void execute_one(void);
#define WAIT_FLAGS __WALL
static void loop(void)
{
setup_loop();
int iter = 0;
for (;; iter++) {
char cwdbuf[32];
sprintf(cwdbuf, "./%d", iter);
if (mkdir(cwdbuf, 0777))
exit(1);
reset_loop();
int pid = fork();
if (pid < 0)
exit(1);
if (pid == 0) {
if (chdir(cwdbuf))
exit(1);
setup_test();
execute_one();
exit(0);
}
int status = 0;
uint64_t start = current_time_ms();
for (;;) {
if (waitpid(-1, &status, WNOHANG | WAIT_FLAGS) == pid)
break;
sleep_ms(1);
if (current_time_ms() - start < 5 * 1000)
continue;
kill_and_wait(pid, &status);
break;
}
remove_dir(cwdbuf);
}
}
uint64_t r[3] = {0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff};
void execute_call(int call)
{
intptr_t res = 0;
switch (call) {
case 0:
res = syscall(__NR_epoll_create, 1);
if (res != -1)
r[0] = res;
break;
case 1:
NONFAILING(memcpy((void*)0x20000000, "/dev/adsp1\000", 11));
res = syscall(__NR_openat, 0xffffffffffffff9cul, 0x20000000ul, 0ul, 0ul);
if (res != -1)
r[1] = res;
break;
case 2:
NONFAILING(*(uint32_t*)0x200000c0 = 0x60000001);
NONFAILING(*(uint64_t*)0x200000c4 = 0);
syscall(__NR_epoll_ctl, r[0], 1ul, r[1], 0x200000c0ul);
break;
case 3:
res = syscall(__NR_epoll_create, 0xaf);
if (res != -1)
r[2] = res;
break;
case 4:
NONFAILING(*(uint32_t*)0x20000100 = 0);
NONFAILING(*(uint64_t*)0x20000104 = 0);
syscall(__NR_epoll_ctl, r[2], 1ul, r[0], 0x20000100ul);
break;
}
}
int main(void)
{
syscall(__NR_mmap, 0x1ffff000ul, 0x1000ul, 0ul, 0x32ul, -1, 0ul);
syscall(__NR_mmap, 0x20000000ul, 0x1000000ul, 7ul, 0x32ul, -1, 0ul);
syscall(__NR_mmap, 0x21000000ul, 0x1000ul, 0ul, 0x32ul, -1, 0ul);
setup_binfmt_misc();
install_segv_handler();
for (procid = 0; procid < 6; procid++) {
if (fork() == 0) {
use_temporary_dir();
do_sandbox_none();
}
}
sleep(1000000);
return 0;
}
|
/*
* This header is generated by classdump-dyld 1.0
* on Sunday, June 7, 2020 at 11:45:59 AM Mountain Standard Time
* Operating System: Version 13.4.5 (Build 17L562)
* Image Source: /System/Library/PrivateFrameworks/UIKitCore.framework/UIKitCore
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
@protocol _UIAutoScrollBehavior <NSObject>
@required
-(SCD_Struct_UI120*)offsetForAutoScroller:(id)arg1 timeDelta:(double)arg2;
@end
|
import Typography from 'typography'
import theme from 'typography-theme-grand-view'
theme.overrideThemeStyles = ({ rhythm }, options) => ({
'article h1': {
textAlign: 'center',
},
'article h2': {
borderBottom: '.5rem solid #fff',
lineHeight: rhythm(1.5),
},
'article a': {
textDecoration: 'underline',
color: '#000',
},
'article a:hover,a:active': {
color: '#d65947',
},
})
const typography = new Typography(theme)
export default typography
|
module.exports={A:{A:{"2":"I D F E nB","8":"A B"},B:{"1":"Z MB M N S T U ZB W","8":"C O H P J K L"},C:{"1":"JB KB LB R NB OB PB HB Y X WB SB TB UB VB QB Z MB M mB N S T U","2":"lB cB G a I D F E A B C O H P J K L b c d e f g h i j k l uB xB","8":"0 1 2 3 4 5 m n o p q r s t u v w x y z","456":"6 7 8 9 AB BB CB DB EB","712":"FB GB RB IB"},D:{"1":"NB OB PB HB Y X WB SB TB UB VB QB Z MB M N S T U ZB W zB dB eB","2":"0 1 2 3 4 5 6 7 G a I D F E A B C O H P J K L b c d e f g h i j k l m n o p q r s t u v w x y z","8":"8 9","132":"AB BB CB DB EB FB GB RB IB JB KB LB R"},E:{"2":"G a I D fB XB hB iB jB","8":"F E A kB","132":"B C O H YB V Q oB pB"},F:{"1":"KB LB R NB OB PB HB Y X","2":"E B C P J K L b c d e f g h i j k l m n o p q r s t u v w qB rB sB tB V aB vB Q","132":"0 1 2 3 4 5 6 7 8 9 x y z AB BB CB DB EB GB IB JB"},G:{"2":"F XB wB bB yB XC 0B 1B 2B 3B 4B","132":"5B 6B 7B 8B 9B AC BC CC DC EC"},H:{"2":"FC"},I:{"1":"M","2":"cB G GC HC IC JC bB KC LC"},J:{"2":"D A"},K:{"1":"FB","2":"A B C V aB Q"},L:{"1":"W"},M:{"1":"N"},N:{"2":"A B"},O:{"1":"MC"},P:{"1":"OC PC QC RC YB SC TC UC","2":"G","132":"NC"},Q:{"132":"VC"},R:{"132":"WC"},S:{"8":"gB"}},B:1,C:"Custom Elements (V1)"};
|
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""gov_report dataset."""
import json
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.summarization.gov_report import gov_report
_REPORT = r"""
{
"section_title":"",
"paragraphs":[
],
"subsections":[
{
"section_title":"Introduction",
"paragraphs":[
"p1",
"p2"
],
"subsections":[
{
"section_title":"Background",
"paragraphs":[
"p3"
],
"subsections":[
]
}
]
},
{
"section_title":"Conclusion",
"paragraphs":[
"p6"
],
"subsections":[
]
}
]
}
"""
class GovReportTest(tfds.testing.DatasetBuilderTestCase):
"""Tests for gov_report dataset."""
DATASET_CLASS = gov_report.GovReport
SPLITS = {
tfds.Split.TRAIN: 1,
tfds.Split.VALIDATION: 1,
tfds.Split.TEST: 1,
}
def test_flatten_structures_whitespace(self):
self.assertEqual(
gov_report._flatten_structure(json.loads(_REPORT), " ", 1, False),
" Introduction p1 p2 Background p3 Conclusion p6")
def test_flatten_structures_html(self):
self.assertEqual(
gov_report._flatten_structure(json.loads(_REPORT), "\n", 1, True),
"<h1></h1>\n<h2>Introduction</h2>\np1\np2\n<h3>Background</h3>\np3\n<h2>Conclusion</h2>\np6"
)
if __name__ == "__main__":
tfds.testing.test_main()
|
__author__ = 'tsungyi'
from . import _mask
# Interface for manipulating masks stored in RLE format.
#
# RLE is a simple yet efficient format for storing binary masks. RLE
# first divides a vector (or vectorized image) into a series of piecewise
# constant regions and then for each piece simply stores the length of
# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would
# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1]
# (note that the odd counts are always the numbers of zeros). Instead of
# storing the counts directly, additional compression is achieved with a
# variable bitrate representation based on a common scheme called LEB128.
#
# Compression is greatest given large piecewise constant regions.
# Specifically, the size of the RLE is proportional to the number of
# *boundaries* in M (or for an image the number of boundaries in the y
# direction). Assuming fairly simple shapes, the RLE representation is
# O(sqrt(n)) where n is number of pixels in the object. Hence space usage
# is substantially lower, especially for large simple objects (large n).
#
# Many common operations on masks can be computed directly using the RLE
# (without need for decoding). This includes computations such as area,
# union, intersection, etc. All of these operations are linear in the
# size of the RLE, in other words they are O(sqrt(n)) where n is the area
# of the object. Computing these operations on the original mask is O(n).
# Thus, using the RLE can result in substantial computational savings.
#
# The following API functions are defined:
# encode - Encode binary masks using RLE.
# decode - Decode binary masks encoded via RLE.
# merge - Compute union or intersection of encoded masks.
# iou - Compute intersection over union between masks.
# area - Compute area of encoded masks.
# toBbox - Get bounding boxes surrounding encoded masks.
# frPyObjects - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask.
#
# Usage:
# Rs = encode( masks )
# masks = decode( Rs )
# R = merge( Rs, intersect=false )
# o = iou( dt, gt, iscrowd )
# a = area( Rs )
# bbs = toBbox( Rs )
# Rs = frPyObjects( [pyObjects], h, w )
#
# In the API the following formats are used:
# Rs - [dict] Run-length encoding of binary masks
# R - dict Run-length encoding of binary mask
# masks - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order)
# iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore
# bbs - [nx4] Bounding box(es) stored as [x y w h]
# poly - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list)
# dt,gt - May be either bounding boxes or encoded masks
# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel).
#
# Finally, a note about the intersection over union (iou) computation.
# The standard iou of a ground truth (gt) and detected (dt) object is
# iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt))
# For "crowd" regions, we use a modified criteria. If a gt object is
# marked as "iscrowd", we allow a dt to match any subregion of the gt.
# Choosing gt' in the crowd gt that best matches the dt can be done using
# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing
# iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt)
# For crowd gt regions we use this modified criteria above for the iou.
#
# To compile run "python setup.py build_ext --inplace"
# Please do not contact us for help with compiling.
#
# Microsoft COCO Toolbox. version 2.0
# Data, paper, and tutorials available at: http://mscoco.org/
# Code written by Piotr Dollar and Tsung-Yi Lin, 2015.
# Licensed under the Simplified BSD License [see coco/license.txt]
iou = _mask.iou
merge = _mask.merge
frPyObjects = _mask.frPyObjects
def encode(bimask):
if len(bimask.shape) == 3:
return _mask.encode(bimask)
elif len(bimask.shape) == 2:
h, w = bimask.shape
return _mask.encode(bimask.reshape((h, w, 1), order='F'))[0]
def decode(rleObjs):
if type(rleObjs) == list:
return _mask.decode(rleObjs)
else:
return _mask.decode([rleObjs])[:,:,0]
def area(rleObjs):
if type(rleObjs) == list:
return _mask.area(rleObjs)
else:
return _mask.area([rleObjs])[0]
def toBbox(rleObjs):
if type(rleObjs) == list:
return _mask.toBbox(rleObjs)
else:
return _mask.toBbox([rleObjs])[0]
|
# OrcaWindTurbine imports
from IO import IO
import AuxFunctions as aux
from OrcaflexModel import OrcaflexModel
# Other imports
from collections import namedtuple
from itertools import product
import numpy as np
def set_and_run_batch(orca_model: OrcaflexModel, post) -> None:
# Get a string with batch type...
batch_type = aux.get_ith_key(IO.input_data["Batch"], 0)
batch_type = aux.to_title_and_remove_ws(batch_type)
# ... initialize object and do the analyses
batch = eval(batch_type + "(post)")
print() # blank line
batch.execute_batch(orca_model, post)
class BatchSimulations:
"""[summary]"""
def __init__(self, post) -> None:
"""[summary]
Args:
post (Post): [description]
"""
class ThrustCurve(BatchSimulations):
"""[summary]"""
def __init__(self, post):
"""[summary]
Args:
post (Post): [description]
"""
super().__init__(post)
# Input options
opt = IO.input_data["Batch"]["thrust curve"]
# wind speed range to simulate
self.eval_range = aux.get_range_or_list(opt["wind speed"])
self.vars_to_eval, self.names = self.set_vars_to_eval(opt["monitors"])
self.wind_direction = opt.get("direction", 0.0)
self.profile = self.set_profile(opt.get("profile", None))
def execute_batch(self, orca_model: OrcaflexModel, post) -> None:
"""[summary]
Args:
orca_model (OrcaflexModel): [description]
post ([type]): [description]
"""
self.eval_thrust_curve(orca_model, post)
if IO.actions["plot results"]:
post.plot.plot_batch(post, self)
def set_vars_to_eval(self, opt: dict) -> dict[str, list]:
"""[summary]
Args:
opt (dict): [description]
Returns:
dict[str, list]: [description]
"""
to_eval = dict(
{
"vars": [],
"specific blade": [],
"specific position": [],
"specific blade and position": [],
}
)
var_names = []
keys = opt.keys()
if "rotor" in keys:
to_eval["vars"].extend(
["Rotor aero " + var_name.title() for var_name in opt["rotor"]]
)
if "generator" in keys:
to_eval["vars"].extend(
["Generator " + var_name for var_name in opt["generator"]]
)
if "connection" in keys:
to_eval["vars"].extend(
["Connection " + var_name for var_name in opt["connection"]]
)
# other variables defined just with its name (eg Angular Velocity)
if "others" in keys:
to_eval["vars"].extend([var_name for var_name in opt["others"]])
var_names.extend(to_eval["vars"])
if "specific blade" in keys:
for data in opt["specific blade"]:
to_eval["specific blade"].append((data["vars"], data["id"]))
# Get all combinations for (variable, blade id)
comb = product(data["vars"], aux.get_range_or_list(data["id"]))
var_names.extend([i[0] + " - " + str(i[1]) for i in comb])
return to_eval, var_names
def eval_thrust_curve(self, orca_model: OrcaflexModel, post) -> None:
"""[summary]
Args:
orca_model (OrcaflexModel): [description]
post ([type]): [description]
"""
expoent = str(
IO.input_data["Batch"]["thrust curve"]["profile"]["expoent"],
)
# Set wind profile (same for all velocities)
orca_model.create_wind_profile(
self.profile["height"],
self.profile["vertical factor"],
f"wind profile - exp{expoent}",
)
# Iterate speeds
for speed in self.eval_range:
# Set wind speed
orca_model.set_wind(
{
"type": "constant",
"speed": speed,
"direction": self.wind_direction,
}
)
# Run simulation
print("Running with wind speed: ", speed, "m/s")
orca_model.model.RunSimulation()
# Save
IO.save_step_from_batch(
orca_model.model,
"wind_speed_" + str(speed),
post,
)
# Get results
post.append_thrust_results(
orca_model.orca_refs["turbines"][1], self.vars_to_eval
)
# Mount curves data
post.set_thrust_curves(self.names, self.eval_range)
def set_profile(self, opt):
if opt is None:
return None
if isinstance(opt["height"], list) and isinstance(opt["factor"], list):
return {"height": opt["height"], "vertical factor": opt["factor"]}
height = aux.get_range_or_list(opt["height"])
h_ref = opt["reference height"]
coef = opt["expoent"]
return {
"height": height,
"vertical factor": [(h / h_ref) ** coef for h in height],
}
class VesselHarmonicMotion(BatchSimulations):
"""[summary]"""
def __init__(self, post) -> None:
"""[summary]
Args:
post (Post): [description]
"""
super().__init__(post)
# Input options
opt = IO.input_data["Batch"]["vessel harmonic motion"]
self.combine_dofs = opt.get("combine dofs", False)
self.dof_position = opt["position"]
self.dofs_to_oscilate = list(opt["position"].keys())
def execute_batch(self, orca_model: OrcaflexModel, post) -> None:
"""[summary]
Args:
orca_model (OrcaflexModel): [description]
post (Post): [description]
"""
self.impose_motion(orca_model, post)
if IO.actions["plot results"]:
post.plot.plot_batch(post, self)
def impose_motion(self, orca_model: OrcaflexModel, post) -> None:
"""[summary]
Args:
orca_model (OrcaflexModel): [description]
post ([type]): [description]
"""
DoF = namedtuple("DoF", ["name", "period", "amplitude", "phase"])
vessel = orca_model.orca_refs["vessels"][1]
vessel.SuperimposedMotion = "Displacement RAOs + harmonic motion"
# Iterate thorugh DoFs...
for dof, cases in self.get_all_combinations().items():
# ... check if it has oscilation ...
if dof not in self.dofs_to_oscilate:
continue
# ... if it is, simulate all combinations
for c in cases:
dof_name = dof.title()
dof_data = DoF(dof_name, c[0], c[1], c[2])
orca_model.set_vessel_harmonic_motion(dof_data)
# Run simulation
print(f"\nRunning scenario with oscilation in {dof_name}")
print(f"period= {c[0]}, amplitude= {c[1]}, phase= {c[2]}")
orca_model.model.RunSimulation()
# Get results
post.process_simulation_results(orca_model.orca_refs)
# Save
file_name = dof + f"_period{c[0]}_ampl{c[1]}_phase{c[2]}"
IO.save_step_from_batch(orca_model.model, file_name, post)
def get_all_combinations(self) -> dict[str, list]:
"""[summary]
Returns:
dict[str, list[tuple[float, float, float] | itertools.product]]:
[description]
"""
# Specify motion for each DoF individually
# if not self.combine_dofs:
combs = dict(
{
"surge": [(0.0, 0.0, 0.0)],
"sway": [(0.0, 0.0, 0.0)],
"heave": [(0.0, 0.0, 0.0)],
"roll": [(0.0, 0.0, 0.0)],
"pitch": [(0.0, 0.0, 0.0)],
"yaw": [(0.0, 0.0, 0.0)],
}
)
for dof in combs:
data = self.dof_position.get(dof)
# if is defined, update the combination in the dict 'combs'
if data:
combs[dof] = self.get_dof_combinations(data)
return combs
def get_dof_combinations(self, input_options: dict) -> product:
"""[summary]
Args:
input_options (dict): [description]
Returns:
itertools.product: [description]
"""
return product(
aux.get_range_or_list(input_options["period"]),
aux.get_range_or_list(input_options["amplitude"]),
aux.get_range_or_list(input_options["phase"]),
)
class WaveSeed(BatchSimulations):
"""[summary]"""
def __init__(self, post) -> None:
"""[summary]
Args:
post (Post): [description]
"""
super().__init__(post)
# Input options
self.n_cases = IO.input_data["Batch"]["wave seed"]["number of cases"]
self.rng = aux.get_numpy_random_gen(
IO.input_data["Batch"]["wave seed"].get("seed generator", None)
)
def execute_batch(self, orca_model: OrcaflexModel, post) -> None:
"""[summary]
Args:
orca_model (OrcaflexModel): [description]
post (Post): [description]
"""
self.update_wave_seeds_and_run(orca_model, post)
if IO.actions["plot results"]:
post.plot.plot_batch(post, self)
def update_wave_seeds_and_run(self, orca_model: OrcaflexModel, post):
"""[summary]
Args:
orca_model (OrcaflexModel): [description]
post (Post): [description]
"""
orca_env = orca_model.model.environment
for case in range(1, self.n_cases, 1):
orca_env.WaveSeed = aux.get_seed(self.rng)
# Run simulation
print(
f"\nRunning simulation {case}/{self.n_cases}:",
f"\twave seed={orca_env.WaveSeed}",
)
orca_model.model.RunSimulation()
# Get results
post.process_simulation_results(orca_model.orca_refs)
# Save
file_name = f"wave_seed_{case}_of_{self.n_cases}"
IO.save_step_from_batch(orca_model.model, file_name, post)
|
#
# For licensing see accompanying LICENSE.txt file.
# Copyright (C) 2020 Apple Inc. All Rights Reserved.
#
from pylab import *
import inspect
import os
import time
import vray
import path_utils
path_utils.add_path_to_sys_path("..", mode="relative_to_current_source_dir", frame=inspect.currentframe())
import _system_config
current_source_file_path = path_utils.get_current_source_file_path(frame=inspect.currentframe())
vrscene_file = os.path.abspath(os.path.join(current_source_file_path, "..", "..", "..", "examples", "00_empty_scene", "empty.vrscene"))
# create renderer
renderer = vray.VRayRenderer()
# create logging callback
def log_msg(renderer, message, level, instant):
global fail_message
if message.startswith("Failed"):
fail_message = message
fail_message = None
renderer.setOnLogMessage(log_msg)
renderer.load(vrscene_file)
time.sleep(0.5)
renderer.close()
if fail_message is not None:
print("\n[HYPERSIM: CHECK_VRAY_APPSDK_INSTALL] The V-Ray AppSDK is not configured correctly on your system: " + fail_message + "\n")
exit(-1)
else:
current_source_file_path = path_utils.get_current_source_file_path(frame=inspect.currentframe())
cwd = os.getcwd()
os.chdir(current_source_file_path)
cmd = _system_config.python_bin + " _check_vray_appsdk_install.py"
retval = os.system(cmd)
assert retval == 0
os.chdir(cwd)
print("\n[HYPERSIM: CHECK_VRAY_APPSDK_INSTALL] The V-Ray AppSDK is configured correctly on your system.\n")
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef vm_JSAtom_inl_h
#define vm_JSAtom_inl_h
#include "vm/JSAtom.h"
#include "mozilla/FloatingPoint.h"
#include "mozilla/RangedPtr.h"
#include "jsnum.h"
#include "gc/MaybeRooted.h"
#include "vm/Runtime.h"
#include "vm/StringType.h"
namespace js {
MOZ_ALWAYS_INLINE jsid AtomToId(JSAtom* atom) {
static_assert(JSID_INT_MIN == 0);
uint32_t index;
if (atom->isIndex(&index) && index <= JSID_INT_MAX) {
return INT_TO_JSID(int32_t(index));
}
return JS::PropertyKey::fromNonIntAtom(atom);
}
// Use the NameToId method instead!
inline jsid AtomToId(PropertyName* name) = delete;
MOZ_ALWAYS_INLINE bool ValueToIntId(const Value& v, jsid* id) {
int32_t i;
if (v.isInt32()) {
i = v.toInt32();
} else if (!v.isDouble() || !mozilla::NumberEqualsInt32(v.toDouble(), &i)) {
return false;
}
if (!INT_FITS_IN_JSID(i)) {
return false;
}
*id = INT_TO_JSID(i);
return true;
}
inline bool ValueToIdPure(const Value& v, jsid* id) {
if (v.isString()) {
if (v.toString()->isAtom()) {
*id = AtomToId(&v.toString()->asAtom());
return true;
}
return false;
}
if (ValueToIntId(v, id)) {
return true;
}
if (v.isSymbol()) {
*id = SYMBOL_TO_JSID(v.toSymbol());
return true;
}
return false;
}
template <AllowGC allowGC>
inline bool PrimitiveValueToId(
JSContext* cx, typename MaybeRooted<Value, allowGC>::HandleType v,
typename MaybeRooted<jsid, allowGC>::MutableHandleType idp) {
// Non-primitive values should call ToPropertyKey.
MOZ_ASSERT(v.isPrimitive());
if (v.isString()) {
if (v.toString()->isAtom()) {
idp.set(AtomToId(&v.toString()->asAtom()));
return true;
}
} else {
if (ValueToIntId(v, idp.address())) {
return true;
}
if (v.isSymbol()) {
idp.set(SYMBOL_TO_JSID(v.toSymbol()));
return true;
}
}
JSAtom* atom = ToAtom<allowGC>(cx, v);
if (!atom) {
return false;
}
idp.set(AtomToId(atom));
return true;
}
/*
* Write out character representing |index| to the memory just before |end|.
* Thus |*end| is not touched, but |end[-1]| and earlier are modified as
* appropriate. There must be at least js::UINT32_CHAR_BUFFER_LENGTH elements
* before |end| to avoid buffer underflow. The start of the characters written
* is returned and is necessarily before |end|.
*/
template <typename T>
inline mozilla::RangedPtr<T> BackfillIndexInCharBuffer(
uint32_t index, mozilla::RangedPtr<T> end) {
#ifdef DEBUG
/*
* Assert that the buffer we're filling will hold as many characters as we
* could write out, by dereferencing the index that would hold the most
* significant digit.
*/
(void)*(end - UINT32_CHAR_BUFFER_LENGTH);
#endif
do {
uint32_t next = index / 10, digit = index % 10;
*--end = '0' + digit;
index = next;
} while (index > 0);
return end;
}
bool IndexToIdSlow(JSContext* cx, uint32_t index, MutableHandleId idp);
inline bool IndexToId(JSContext* cx, uint32_t index, MutableHandleId idp) {
if (index <= JSID_INT_MAX) {
idp.set(INT_TO_JSID(index));
return true;
}
return IndexToIdSlow(cx, index, idp);
}
static MOZ_ALWAYS_INLINE JSLinearString* IdToString(JSContext* cx, jsid id) {
if (JSID_IS_STRING(id)) {
return id.toAtom();
}
if (MOZ_LIKELY(JSID_IS_INT(id))) {
return Int32ToString<CanGC>(cx, JSID_TO_INT(id));
}
RootedValue idv(cx, IdToValue(id));
JSString* str = ToStringSlow<CanGC>(cx, idv);
if (!str) {
return nullptr;
}
return str->ensureLinear(cx);
}
inline Handle<PropertyName*> TypeName(JSType type, const JSAtomState& names) {
MOZ_ASSERT(type < JSTYPE_LIMIT);
static_assert(offsetof(JSAtomState, undefined) +
JSTYPE_LIMIT * sizeof(ImmutablePropertyNamePtr) <=
sizeof(JSAtomState));
static_assert(JSTYPE_UNDEFINED == 0);
return (&names.undefined)[type];
}
inline Handle<PropertyName*> ClassName(JSProtoKey key, JSAtomState& atomState) {
MOZ_ASSERT(key < JSProto_LIMIT);
static_assert(offsetof(JSAtomState, Null) +
JSProto_LIMIT * sizeof(ImmutablePropertyNamePtr) <=
sizeof(JSAtomState));
static_assert(JSProto_Null == 0);
return (&atomState.Null)[key];
}
} // namespace js
#endif /* vm_JSAtom_inl_h */
|
var path = require('path');
var webpack = require('webpack');
module.exports = {
entry: [
'react-hot-loader/patch',
// activate HMR for React
'webpack-dev-server/client?http://localhost:9700',
// bundle the client for webpack-dev-server
// and connect to the provided endpoint
'webpack/hot/only-dev-server',
// bundle the client for hot reloading
// only- means to only hot reload for successful updates
'./src/index.js',
// the entry point of our app
],
output: {
filename: 'bundle.js',
// the output bundle
path: path.resolve(__dirname, 'dist'),
publicPath: '/static/'
// necessary for HMR to know where to load the hot update chunks
},
devtool: 'inline-source-map',
module: {
rules: [
{
test: /\.jsx?$/,
use: [
'babel-loader',
],
exclude: /node_modules/,
},
{
test: /\.(css|scss)$/,
loader:"style-loader!css-loader!sass-loader"
// loader:"style-loader!css-loader!autoprefixer-loader!sass-loader"
},
{
test: /\.(png|jpg)$/,
use: 'url-loader?limit=8192'
}
]
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
// enable HMR globally
new webpack.NamedModulesPlugin(),
// prints more readable module names in the browser console on HMR updates
new webpack.NoEmitOnErrorsPlugin(),
// do not emit compiled assets that include errors
],
devServer: {
host: '0.0.0.0',
port: 9700,
historyApiFallback: true,
// respond to 404s with index.html
hot: true,
// enable HMR on the server
},
};
|
#ifndef __FREEGLUT_STD_H__
#define __FREEGLUT_STD_H__
/*
* freeglut_std.h
*
* The GLUT-compatible part of the freeglut library include file
*
* Copyright (c) 1999-2000 Pawel W. Olszta. All Rights Reserved.
* Written by Pawel W. Olszta, <olszta@sourceforge.net>
* Creation date: Thu Dec 2 1999
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* PAWEL W. OLSZTA BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifdef __cplusplus
extern "C" {
#endif
/*
* Under windows, we have to differentiate between static and dynamic libraries
*/
#ifdef _WIN32
/* #pragma may not be supported by some compilers.
* Discussion by FreeGLUT developers suggests that
* Visual C++ specific code involving pragmas may
* need to move to a separate header. 24th Dec 2003
*/
/* Define FREEGLUT_LIB_PRAGMAS to 1 to include library
* pragmas or to 0 to exclude library pragmas.
* The default behavior depends on the compiler/platform.
*/
# ifndef FREEGLUT_LIB_PRAGMAS
# if ( defined(_MSC_VER) || defined(__WATCOMC__) ) && !defined(_WIN32_WCE)
# define FREEGLUT_LIB_PRAGMAS 1
# else
# define FREEGLUT_LIB_PRAGMAS 0
# endif
# endif
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN 1
# endif
# ifndef NOMINMAX
# define NOMINMAX
# endif
# include <windows.h>
/* Windows static library */
# ifdef FREEGLUT_STATIC
# define FGAPI
# define FGAPIENTRY
/* Link with Win32 static freeglut lib */
# if FREEGLUT_LIB_PRAGMAS
# ifdef NDEBUG
# pragma comment (lib, "freeglut_staticd.lib")
# else
# pragma comment (lib, "freeglut_static.lib")
# endif
# endif
/* Windows shared library (DLL) */
# else
# define FGAPIENTRY __stdcall
# if defined(FREEGLUT_EXPORTS)
# define FGAPI __declspec(dllexport)
# else
# define FGAPI __declspec(dllimport)
/* Link with Win32 shared freeglut lib */
# if FREEGLUT_LIB_PRAGMAS
// # ifdef NDEBUG
// # pragma comment (lib, "freeglutd.lib")
// # else
# pragma comment (lib, "freeglut.lib")
// # endif
# endif
# endif
# endif
/* Drag in other Windows libraries as required by FreeGLUT */
# if FREEGLUT_LIB_PRAGMAS
# pragma comment (lib, "glu32.lib") /* link OpenGL Utility lib */
# pragma comment (lib, "opengl32.lib") /* link Microsoft OpenGL lib */
# pragma comment (lib, "gdi32.lib") /* link Windows GDI lib */
# pragma comment (lib, "winmm.lib") /* link Windows MultiMedia lib */
# pragma comment (lib, "user32.lib") /* link Windows user lib */
# endif
#else
/* Non-Windows definition of FGAPI and FGAPIENTRY */
# define FGAPI
# define FGAPIENTRY
#endif
/*
* The freeglut and GLUT API versions
*/
#define FREEGLUT 1
#define GLUT_API_VERSION 4
#define GLUT_XLIB_IMPLEMENTATION 13
/* Deprecated:
cf. http://sourceforge.net/mailarchive/forum.php?thread_name=CABcAi1hw7cr4xtigckaGXB5X8wddLfMcbA_rZ3NAuwMrX_zmsw%40mail.gmail.com&forum_name=freeglut-developer */
#define FREEGLUT_VERSION_2_0 1
/*
* Always include OpenGL and GLU headers
*/
/* Note: FREEGLUT_GLES is only used to cleanly bootstrap headers
inclusion here; use GLES constants directly
(e.g. GL_ES_VERSION_2_0) for all other needs */
#ifdef FREEGLUT_GLES
# include <EGL/egl.h>
# include <GLES/gl.h>
# include <GLES2/gl2.h>
#elif __APPLE__
# include <OpenGL/gl.h>
# include <OpenGL/glu.h>
#else
# include <GL/gl.h>
# include <GL/glu.h>
#endif
/*
* GLUT API macro definitions -- the special key codes:
*/
#define GLUT_KEY_F1 0x0001
#define GLUT_KEY_F2 0x0002
#define GLUT_KEY_F3 0x0003
#define GLUT_KEY_F4 0x0004
#define GLUT_KEY_F5 0x0005
#define GLUT_KEY_F6 0x0006
#define GLUT_KEY_F7 0x0007
#define GLUT_KEY_F8 0x0008
#define GLUT_KEY_F9 0x0009
#define GLUT_KEY_F10 0x000A
#define GLUT_KEY_F11 0x000B
#define GLUT_KEY_F12 0x000C
#define GLUT_KEY_LEFT 0x0064
#define GLUT_KEY_UP 0x0065
#define GLUT_KEY_RIGHT 0x0066
#define GLUT_KEY_DOWN 0x0067
#define GLUT_KEY_PAGE_UP 0x0068
#define GLUT_KEY_PAGE_DOWN 0x0069
#define GLUT_KEY_HOME 0x006A
#define GLUT_KEY_END 0x006B
#define GLUT_KEY_INSERT 0x006C
/*
* GLUT API macro definitions -- mouse state definitions
*/
#define GLUT_LEFT_BUTTON 0x0000
#define GLUT_MIDDLE_BUTTON 0x0001
#define GLUT_RIGHT_BUTTON 0x0002
#define GLUT_DOWN 0x0000
#define GLUT_UP 0x0001
#define GLUT_LEFT 0x0000
#define GLUT_ENTERED 0x0001
/*
* GLUT API macro definitions -- the display mode definitions
*/
#define GLUT_RGB 0x0000
#define GLUT_RGBA 0x0000
#define GLUT_INDEX 0x0001
#define GLUT_SINGLE 0x0000
#define GLUT_DOUBLE 0x0002
#define GLUT_ACCUM 0x0004
#define GLUT_ALPHA 0x0008
#define GLUT_DEPTH 0x0010
#define GLUT_STENCIL 0x0020
#define GLUT_MULTISAMPLE 0x0080
#define GLUT_STEREO 0x0100
#define GLUT_LUMINANCE 0x0200
/*
* GLUT API macro definitions -- windows and menu related definitions
*/
#define GLUT_MENU_NOT_IN_USE 0x0000
#define GLUT_MENU_IN_USE 0x0001
#define GLUT_NOT_VISIBLE 0x0000
#define GLUT_VISIBLE 0x0001
#define GLUT_HIDDEN 0x0000
#define GLUT_FULLY_RETAINED 0x0001
#define GLUT_PARTIALLY_RETAINED 0x0002
#define GLUT_FULLY_COVERED 0x0003
/*
* GLUT API macro definitions -- fonts definitions
*
* Steve Baker suggested to make it binary compatible with GLUT:
*/
#if defined(_MSC_VER) || defined(__CYGWIN__) || defined(__MINGW32__) || defined(__WATCOMC__)
# define GLUT_STROKE_ROMAN ((void *)0x0000)
# define GLUT_STROKE_MONO_ROMAN ((void *)0x0001)
# define GLUT_BITMAP_9_BY_15 ((void *)0x0002)
# define GLUT_BITMAP_8_BY_13 ((void *)0x0003)
# define GLUT_BITMAP_TIMES_ROMAN_10 ((void *)0x0004)
# define GLUT_BITMAP_TIMES_ROMAN_24 ((void *)0x0005)
# define GLUT_BITMAP_HELVETICA_10 ((void *)0x0006)
# define GLUT_BITMAP_HELVETICA_12 ((void *)0x0007)
# define GLUT_BITMAP_HELVETICA_18 ((void *)0x0008)
#else
/*
* I don't really know if it's a good idea... But here it goes:
*/
extern void* glutStrokeRoman;
extern void* glutStrokeMonoRoman;
extern void* glutBitmap9By15;
extern void* glutBitmap8By13;
extern void* glutBitmapTimesRoman10;
extern void* glutBitmapTimesRoman24;
extern void* glutBitmapHelvetica10;
extern void* glutBitmapHelvetica12;
extern void* glutBitmapHelvetica18;
/*
* Those pointers will be used by following definitions:
*/
# define GLUT_STROKE_ROMAN ((void *) &glutStrokeRoman)
# define GLUT_STROKE_MONO_ROMAN ((void *) &glutStrokeMonoRoman)
# define GLUT_BITMAP_9_BY_15 ((void *) &glutBitmap9By15)
# define GLUT_BITMAP_8_BY_13 ((void *) &glutBitmap8By13)
# define GLUT_BITMAP_TIMES_ROMAN_10 ((void *) &glutBitmapTimesRoman10)
# define GLUT_BITMAP_TIMES_ROMAN_24 ((void *) &glutBitmapTimesRoman24)
# define GLUT_BITMAP_HELVETICA_10 ((void *) &glutBitmapHelvetica10)
# define GLUT_BITMAP_HELVETICA_12 ((void *) &glutBitmapHelvetica12)
# define GLUT_BITMAP_HELVETICA_18 ((void *) &glutBitmapHelvetica18)
#endif
/*
* GLUT API macro definitions -- the glutGet parameters
*/
#define GLUT_WINDOW_X 0x0064
#define GLUT_WINDOW_Y 0x0065
#define GLUT_WINDOW_WIDTH 0x0066
#define GLUT_WINDOW_HEIGHT 0x0067
#define GLUT_WINDOW_BUFFER_SIZE 0x0068
#define GLUT_WINDOW_STENCIL_SIZE 0x0069
#define GLUT_WINDOW_DEPTH_SIZE 0x006A
#define GLUT_WINDOW_RED_SIZE 0x006B
#define GLUT_WINDOW_GREEN_SIZE 0x006C
#define GLUT_WINDOW_BLUE_SIZE 0x006D
#define GLUT_WINDOW_ALPHA_SIZE 0x006E
#define GLUT_WINDOW_ACCUM_RED_SIZE 0x006F
#define GLUT_WINDOW_ACCUM_GREEN_SIZE 0x0070
#define GLUT_WINDOW_ACCUM_BLUE_SIZE 0x0071
#define GLUT_WINDOW_ACCUM_ALPHA_SIZE 0x0072
#define GLUT_WINDOW_DOUBLEBUFFER 0x0073
#define GLUT_WINDOW_RGBA 0x0074
#define GLUT_WINDOW_PARENT 0x0075
#define GLUT_WINDOW_NUM_CHILDREN 0x0076
#define GLUT_WINDOW_COLORMAP_SIZE 0x0077
#define GLUT_WINDOW_NUM_SAMPLES 0x0078
#define GLUT_WINDOW_STEREO 0x0079
#define GLUT_WINDOW_CURSOR 0x007A
#define GLUT_SCREEN_WIDTH 0x00C8
#define GLUT_SCREEN_HEIGHT 0x00C9
#define GLUT_SCREEN_WIDTH_MM 0x00CA
#define GLUT_SCREEN_HEIGHT_MM 0x00CB
#define GLUT_MENU_NUM_ITEMS 0x012C
#define GLUT_DISPLAY_MODE_POSSIBLE 0x0190
#define GLUT_INIT_WINDOW_X 0x01F4
#define GLUT_INIT_WINDOW_Y 0x01F5
#define GLUT_INIT_WINDOW_WIDTH 0x01F6
#define GLUT_INIT_WINDOW_HEIGHT 0x01F7
#define GLUT_INIT_DISPLAY_MODE 0x01F8
#define GLUT_ELAPSED_TIME 0x02BC
#define GLUT_WINDOW_FORMAT_ID 0x007B
/*
* GLUT API macro definitions -- the glutDeviceGet parameters
*/
#define GLUT_HAS_KEYBOARD 0x0258
#define GLUT_HAS_MOUSE 0x0259
#define GLUT_HAS_SPACEBALL 0x025A
#define GLUT_HAS_DIAL_AND_BUTTON_BOX 0x025B
#define GLUT_HAS_TABLET 0x025C
#define GLUT_NUM_MOUSE_BUTTONS 0x025D
#define GLUT_NUM_SPACEBALL_BUTTONS 0x025E
#define GLUT_NUM_BUTTON_BOX_BUTTONS 0x025F
#define GLUT_NUM_DIALS 0x0260
#define GLUT_NUM_TABLET_BUTTONS 0x0261
#define GLUT_DEVICE_IGNORE_KEY_REPEAT 0x0262
#define GLUT_DEVICE_KEY_REPEAT 0x0263
#define GLUT_HAS_JOYSTICK 0x0264
#define GLUT_OWNS_JOYSTICK 0x0265
#define GLUT_JOYSTICK_BUTTONS 0x0266
#define GLUT_JOYSTICK_AXES 0x0267
#define GLUT_JOYSTICK_POLL_RATE 0x0268
/*
* GLUT API macro definitions -- the glutLayerGet parameters
*/
#define GLUT_OVERLAY_POSSIBLE 0x0320
#define GLUT_LAYER_IN_USE 0x0321
#define GLUT_HAS_OVERLAY 0x0322
#define GLUT_TRANSPARENT_INDEX 0x0323
#define GLUT_NORMAL_DAMAGED 0x0324
#define GLUT_OVERLAY_DAMAGED 0x0325
/*
* GLUT API macro definitions -- the glutVideoResizeGet parameters
*/
#define GLUT_VIDEO_RESIZE_POSSIBLE 0x0384
#define GLUT_VIDEO_RESIZE_IN_USE 0x0385
#define GLUT_VIDEO_RESIZE_X_DELTA 0x0386
#define GLUT_VIDEO_RESIZE_Y_DELTA 0x0387
#define GLUT_VIDEO_RESIZE_WIDTH_DELTA 0x0388
#define GLUT_VIDEO_RESIZE_HEIGHT_DELTA 0x0389
#define GLUT_VIDEO_RESIZE_X 0x038A
#define GLUT_VIDEO_RESIZE_Y 0x038B
#define GLUT_VIDEO_RESIZE_WIDTH 0x038C
#define GLUT_VIDEO_RESIZE_HEIGHT 0x038D
/*
* GLUT API macro definitions -- the glutUseLayer parameters
*/
#define GLUT_NORMAL 0x0000
#define GLUT_OVERLAY 0x0001
/*
* GLUT API macro definitions -- the glutGetModifiers parameters
*/
#define GLUT_ACTIVE_SHIFT 0x0001
#define GLUT_ACTIVE_CTRL 0x0002
#define GLUT_ACTIVE_ALT 0x0004
/*
* GLUT API macro definitions -- the glutSetCursor parameters
*/
#define GLUT_CURSOR_RIGHT_ARROW 0x0000
#define GLUT_CURSOR_LEFT_ARROW 0x0001
#define GLUT_CURSOR_INFO 0x0002
#define GLUT_CURSOR_DESTROY 0x0003
#define GLUT_CURSOR_HELP 0x0004
#define GLUT_CURSOR_CYCLE 0x0005
#define GLUT_CURSOR_SPRAY 0x0006
#define GLUT_CURSOR_WAIT 0x0007
#define GLUT_CURSOR_TEXT 0x0008
#define GLUT_CURSOR_CROSSHAIR 0x0009
#define GLUT_CURSOR_UP_DOWN 0x000A
#define GLUT_CURSOR_LEFT_RIGHT 0x000B
#define GLUT_CURSOR_TOP_SIDE 0x000C
#define GLUT_CURSOR_BOTTOM_SIDE 0x000D
#define GLUT_CURSOR_LEFT_SIDE 0x000E
#define GLUT_CURSOR_RIGHT_SIDE 0x000F
#define GLUT_CURSOR_TOP_LEFT_CORNER 0x0010
#define GLUT_CURSOR_TOP_RIGHT_CORNER 0x0011
#define GLUT_CURSOR_BOTTOM_RIGHT_CORNER 0x0012
#define GLUT_CURSOR_BOTTOM_LEFT_CORNER 0x0013
#define GLUT_CURSOR_INHERIT 0x0064
#define GLUT_CURSOR_NONE 0x0065
#define GLUT_CURSOR_FULL_CROSSHAIR 0x0066
/*
* GLUT API macro definitions -- RGB color component specification definitions
*/
#define GLUT_RED 0x0000
#define GLUT_GREEN 0x0001
#define GLUT_BLUE 0x0002
/*
* GLUT API macro definitions -- additional keyboard and joystick definitions
*/
#define GLUT_KEY_REPEAT_OFF 0x0000
#define GLUT_KEY_REPEAT_ON 0x0001
#define GLUT_KEY_REPEAT_DEFAULT 0x0002
#define GLUT_JOYSTICK_BUTTON_A 0x0001
#define GLUT_JOYSTICK_BUTTON_B 0x0002
#define GLUT_JOYSTICK_BUTTON_C 0x0004
#define GLUT_JOYSTICK_BUTTON_D 0x0008
/*
* GLUT API macro definitions -- game mode definitions
*/
#define GLUT_GAME_MODE_ACTIVE 0x0000
#define GLUT_GAME_MODE_POSSIBLE 0x0001
#define GLUT_GAME_MODE_WIDTH 0x0002
#define GLUT_GAME_MODE_HEIGHT 0x0003
#define GLUT_GAME_MODE_PIXEL_DEPTH 0x0004
#define GLUT_GAME_MODE_REFRESH_RATE 0x0005
#define GLUT_GAME_MODE_DISPLAY_CHANGED 0x0006
/*
* Initialization functions, see fglut_init.c
*/
FGAPI void FGAPIENTRY glutInit( int* pargc, char** argv );
FGAPI void FGAPIENTRY glutInitWindowPosition( int x, int y );
FGAPI void FGAPIENTRY glutInitWindowSize( int width, int height );
FGAPI void FGAPIENTRY glutInitDisplayMode( unsigned int displayMode );
FGAPI void FGAPIENTRY glutInitDisplayString( const char* displayMode );
/*
* Process loop function, see fg_main.c
*/
FGAPI void FGAPIENTRY glutMainLoop( void );
/*
* Window management functions, see fg_window.c
*/
FGAPI int FGAPIENTRY glutCreateWindow( const char* title );
FGAPI int FGAPIENTRY glutCreateSubWindow( int window, int x, int y, int width, int height );
FGAPI void FGAPIENTRY glutDestroyWindow( int window );
FGAPI void FGAPIENTRY glutSetWindow( int window );
FGAPI int FGAPIENTRY glutGetWindow( void );
FGAPI void FGAPIENTRY glutSetWindowTitle( const char* title );
FGAPI void FGAPIENTRY glutSetIconTitle( const char* title );
FGAPI void FGAPIENTRY glutReshapeWindow( int width, int height );
FGAPI void FGAPIENTRY glutPositionWindow( int x, int y );
FGAPI void FGAPIENTRY glutShowWindow( void );
FGAPI void FGAPIENTRY glutHideWindow( void );
FGAPI void FGAPIENTRY glutIconifyWindow( void );
FGAPI void FGAPIENTRY glutPushWindow( void );
FGAPI void FGAPIENTRY glutPopWindow( void );
FGAPI void FGAPIENTRY glutFullScreen( void );
/*
* Display-related functions, see fg_display.c
*/
FGAPI void FGAPIENTRY glutPostWindowRedisplay( int window );
FGAPI void FGAPIENTRY glutPostRedisplay( void );
FGAPI void FGAPIENTRY glutSwapBuffers( void );
/*
* Mouse cursor functions, see fg_cursor.c
*/
FGAPI void FGAPIENTRY glutWarpPointer( int x, int y );
FGAPI void FGAPIENTRY glutSetCursor( int cursor );
/*
* Overlay stuff, see fg_overlay.c
*/
FGAPI void FGAPIENTRY glutEstablishOverlay( void );
FGAPI void FGAPIENTRY glutRemoveOverlay( void );
FGAPI void FGAPIENTRY glutUseLayer( GLenum layer );
FGAPI void FGAPIENTRY glutPostOverlayRedisplay( void );
FGAPI void FGAPIENTRY glutPostWindowOverlayRedisplay( int window );
FGAPI void FGAPIENTRY glutShowOverlay( void );
FGAPI void FGAPIENTRY glutHideOverlay( void );
/*
* Menu stuff, see fg_menu.c
*/
FGAPI int FGAPIENTRY glutCreateMenu( void (* callback)( int menu ) );
FGAPI void FGAPIENTRY glutDestroyMenu( int menu );
FGAPI int FGAPIENTRY glutGetMenu( void );
FGAPI void FGAPIENTRY glutSetMenu( int menu );
FGAPI void FGAPIENTRY glutAddMenuEntry( const char* label, int value );
FGAPI void FGAPIENTRY glutAddSubMenu( const char* label, int subMenu );
FGAPI void FGAPIENTRY glutChangeToMenuEntry( int item, const char* label, int value );
FGAPI void FGAPIENTRY glutChangeToSubMenu( int item, const char* label, int value );
FGAPI void FGAPIENTRY glutRemoveMenuItem( int item );
FGAPI void FGAPIENTRY glutAttachMenu( int button );
FGAPI void FGAPIENTRY glutDetachMenu( int button );
/*
* Global callback functions, see fg_callbacks.c
*/
FGAPI void FGAPIENTRY glutTimerFunc( unsigned int time, void (* callback)( int ), int value );
FGAPI void FGAPIENTRY glutIdleFunc( void (* callback)( void ) );
/*
* Window-specific callback functions, see fg_callbacks.c
*/
FGAPI void FGAPIENTRY glutKeyboardFunc( void (* callback)( unsigned char, int, int ) );
FGAPI void FGAPIENTRY glutSpecialFunc( void (* callback)( int, int, int ) );
FGAPI void FGAPIENTRY glutReshapeFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutVisibilityFunc( void (* callback)( int ) );
FGAPI void FGAPIENTRY glutDisplayFunc( void (* callback)( void ) );
FGAPI void FGAPIENTRY glutMouseFunc( void (* callback)( int, int, int, int ) );
FGAPI void FGAPIENTRY glutMotionFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutPassiveMotionFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutEntryFunc( void (* callback)( int ) );
FGAPI void FGAPIENTRY glutKeyboardUpFunc( void (* callback)( unsigned char, int, int ) );
FGAPI void FGAPIENTRY glutSpecialUpFunc( void (* callback)( int, int, int ) );
FGAPI void FGAPIENTRY glutJoystickFunc( void (* callback)( unsigned int, int, int, int ), int pollInterval );
FGAPI void FGAPIENTRY glutMenuStateFunc( void (* callback)( int ) );
FGAPI void FGAPIENTRY glutMenuStatusFunc( void (* callback)( int, int, int ) );
FGAPI void FGAPIENTRY glutOverlayDisplayFunc( void (* callback)( void ) );
FGAPI void FGAPIENTRY glutWindowStatusFunc( void (* callback)( int ) );
FGAPI void FGAPIENTRY glutSpaceballMotionFunc( void (* callback)( int, int, int ) );
FGAPI void FGAPIENTRY glutSpaceballRotateFunc( void (* callback)( int, int, int ) );
FGAPI void FGAPIENTRY glutSpaceballButtonFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutButtonBoxFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutDialsFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutTabletMotionFunc( void (* callback)( int, int ) );
FGAPI void FGAPIENTRY glutTabletButtonFunc( void (* callback)( int, int, int, int ) );
/*
* State setting and retrieval functions, see fg_state.c
*/
FGAPI int FGAPIENTRY glutGet( GLenum query );
FGAPI int FGAPIENTRY glutDeviceGet( GLenum query );
FGAPI int FGAPIENTRY glutGetModifiers( void );
FGAPI int FGAPIENTRY glutLayerGet( GLenum query );
/*
* Font stuff, see fg_font.c
*/
FGAPI void FGAPIENTRY glutBitmapCharacter( void* font, int character );
FGAPI int FGAPIENTRY glutBitmapWidth( void* font, int character );
FGAPI void FGAPIENTRY glutStrokeCharacter( void* font, int character );
FGAPI int FGAPIENTRY glutStrokeWidth( void* font, int character );
FGAPI GLfloat FGAPIENTRY glutStrokeWidthf( void* font, int character ); /* GLUT 3.8 */
FGAPI int FGAPIENTRY glutBitmapLength( void* font, const unsigned char* string );
FGAPI int FGAPIENTRY glutStrokeLength( void* font, const unsigned char* string );
FGAPI GLfloat FGAPIENTRY glutStrokeLengthf( void* font, const unsigned char *string ); /* GLUT 3.8 */
/*
* Geometry functions, see fg_geometry.c
*/
FGAPI void FGAPIENTRY glutWireCube( double size );
FGAPI void FGAPIENTRY glutSolidCube( double size );
FGAPI void FGAPIENTRY glutWireSphere( double radius, GLint slices, GLint stacks );
FGAPI void FGAPIENTRY glutSolidSphere( double radius, GLint slices, GLint stacks );
FGAPI void FGAPIENTRY glutWireCone( double base, double height, GLint slices, GLint stacks );
FGAPI void FGAPIENTRY glutSolidCone( double base, double height, GLint slices, GLint stacks );
FGAPI void FGAPIENTRY glutWireTorus( double innerRadius, double outerRadius, GLint sides, GLint rings );
FGAPI void FGAPIENTRY glutSolidTorus( double innerRadius, double outerRadius, GLint sides, GLint rings );
FGAPI void FGAPIENTRY glutWireDodecahedron( void );
FGAPI void FGAPIENTRY glutSolidDodecahedron( void );
FGAPI void FGAPIENTRY glutWireOctahedron( void );
FGAPI void FGAPIENTRY glutSolidOctahedron( void );
FGAPI void FGAPIENTRY glutWireTetrahedron( void );
FGAPI void FGAPIENTRY glutSolidTetrahedron( void );
FGAPI void FGAPIENTRY glutWireIcosahedron( void );
FGAPI void FGAPIENTRY glutSolidIcosahedron( void );
/*
* Teapot rendering functions, found in fg_teapot.c
* NB: front facing polygons have clockwise winding, not counter clockwise
*/
FGAPI void FGAPIENTRY glutWireTeapot( double size );
FGAPI void FGAPIENTRY glutSolidTeapot( double size );
/*
* Game mode functions, see fg_gamemode.c
*/
FGAPI void FGAPIENTRY glutGameModeString( const char* string );
FGAPI int FGAPIENTRY glutEnterGameMode( void );
FGAPI void FGAPIENTRY glutLeaveGameMode( void );
FGAPI int FGAPIENTRY glutGameModeGet( GLenum query );
/*
* Video resize functions, see fg_videoresize.c
*/
FGAPI int FGAPIENTRY glutVideoResizeGet( GLenum query );
FGAPI void FGAPIENTRY glutSetupVideoResizing( void );
FGAPI void FGAPIENTRY glutStopVideoResizing( void );
FGAPI void FGAPIENTRY glutVideoResize( int x, int y, int width, int height );
FGAPI void FGAPIENTRY glutVideoPan( int x, int y, int width, int height );
/*
* Colormap functions, see fg_misc.c
*/
FGAPI void FGAPIENTRY glutSetColor( int color, GLfloat red, GLfloat green, GLfloat blue );
FGAPI GLfloat FGAPIENTRY glutGetColor( int color, int component );
FGAPI void FGAPIENTRY glutCopyColormap( int window );
/*
* Misc keyboard and joystick functions, see fg_misc.c
*/
FGAPI void FGAPIENTRY glutIgnoreKeyRepeat( int ignore );
FGAPI void FGAPIENTRY glutSetKeyRepeat( int repeatMode );
FGAPI void FGAPIENTRY glutForceJoystickFunc( void );
/*
* Misc functions, see fg_misc.c
*/
FGAPI int FGAPIENTRY glutExtensionSupported( const char* extension );
FGAPI void FGAPIENTRY glutReportErrors( void );
/* Comment from glut.h of classic GLUT:
Win32 has an annoying issue where there are multiple C run-time
libraries (CRTs). If the executable is linked with a different CRT
from the GLUT DLL, the GLUT DLL will not share the same CRT static
data seen by the executable. In particular, atexit callbacks registered
in the executable will not be called if GLUT calls its (different)
exit routine). GLUT is typically built with the
"/MD" option (the CRT with multithreading DLL support), but the Visual
C++ linker default is "/ML" (the single threaded CRT).
One workaround to this issue is requiring users to always link with
the same CRT as GLUT is compiled with. That requires users supply a
non-standard option. GLUT 3.7 has its own built-in workaround where
the executable's "exit" function pointer is covertly passed to GLUT.
GLUT then calls the executable's exit function pointer to ensure that
any "atexit" calls registered by the application are called if GLUT
needs to exit.
Note that the __glut*WithExit routines should NEVER be called directly.
To avoid the atexit workaround, #define GLUT_DISABLE_ATEXIT_HACK. */
/* to get the prototype for exit() */
#include <stdlib.h>
#if defined(_WIN32) && !defined(GLUT_DISABLE_ATEXIT_HACK) && !defined(__WATCOMC__)
FGAPI void FGAPIENTRY __glutInitWithExit(int *argcp, char **argv, void (__cdecl *exitfunc)(int));
FGAPI int FGAPIENTRY __glutCreateWindowWithExit(const char *title, void (__cdecl *exitfunc)(int));
FGAPI int FGAPIENTRY __glutCreateMenuWithExit(void (* func)(int), void (__cdecl *exitfunc)(int));
#ifndef FREEGLUT_BUILDING_LIB
#if defined(__GNUC__)
#define FGUNUSED __attribute__((unused))
#else
#define FGUNUSED
#endif
static void FGAPIENTRY FGUNUSED glutInit_ATEXIT_HACK(int *argcp, char **argv) { __glutInitWithExit(argcp, argv, exit); }
#define glutInit glutInit_ATEXIT_HACK
static int FGAPIENTRY FGUNUSED glutCreateWindow_ATEXIT_HACK(const char *title) { return __glutCreateWindowWithExit(title, exit); }
#define glutCreateWindow glutCreateWindow_ATEXIT_HACK
static int FGAPIENTRY FGUNUSED glutCreateMenu_ATEXIT_HACK(void (* func)(int)) { return __glutCreateMenuWithExit(func, exit); }
#define glutCreateMenu glutCreateMenu_ATEXIT_HACK
#endif
#endif
#ifdef __cplusplus
}
#endif
/*** END OF FILE ***/
#endif /* __FREEGLUT_STD_H__ */
|
export class PrefetchLink {
constructor() {
this.prefetchedLinks = {};
}
addEventListener(dom) {
dom.addEventListener('mouseenter', (e) => {
const target = e.currentTarget || e.target || e.delegateTarget;
// disable anchor or javascript
if (target.href.indexOf('#') !== -1 || target.href.indexOf('javascript:') !== -1) {
return;
}
// disable blank
if (target.target === 'blank' || target.target === '_blank') {
return;
}
// disable already loaded
if (this.prefetchedLinks[target.href] === true) {
return;
}
// load
this.prefetchedLinks[target.href] = true;
const tag = document.createElement('link');
tag.rel = 'prefetch';
tag.href = target.href;
document.head.appendChild(tag);
});
}
}
|
"""
Module for loading cross-correlation features.
"""
from __future__ import absolute_import
import re
import pandas as pd
from . import fileutils
from . import dataset
channel_pattern = re.compile(r'(?:[a-zA-Z0-9]*_)*(c[0-9]*|[A-Z]*_[0-9]*)$')
def convert_channel_name(name):
"""
Converts channel names to more compact versions, especially useful for the dogs since their channels have
really long names.
:param name: The name to convert.
:return: A shortened version of the name if possible, otherwise the original name.
"""
match = re.match(channel_pattern, name)
if match:
return match.group(1) or match.group(2)
else:
return name
def old_load_and_pivot(dataframe):
"""Old version of load and pivot which uses the old, redundant version where channel_i and channel_j are columns"""
channel_i = dataframe['channel_i'].map(convert_channel_name)
channel_j = dataframe['channel_j'].map(convert_channel_name)
dataframe['channels'] = channel_i.str.cat(channel_j, sep=":")
dataframe.drop(['channel_i', 'channel_j', 'end_sample', 't_offset'], axis=1, inplace=True)
max_corrs = dataframe.groupby(['channels', 'start_sample'], as_index=False).max()
pivoted = max_corrs.pivot('start_sample', 'channels', 'correlation')
return pivoted
def new_load_and_pivot(dataframe):
"""New version which assumes the columns where the channel pairs are already columns"""
dataframe.drop(['end_sample', 't_offset'], axis=1, inplace=True)
max_corrs = dataframe.groupby('start_sample').max()
return max_corrs
def load_and_pivot(filename, frame_length=1, sliding_frames=True):
"""
Loads the cross correlation features from the given filename.
:param filename: The filename to load features from.
:param frame_length: The desired frame length in windows to use.
:param sliding_frames: If True, the data will be extended by using sliding frames of the feature windows.
:return: A DataFrame with the loaded features.
"""
with open(filename) as fp:
dataframe = pd.read_csv(fp, sep="\t")
#Figure out if this file contains the old or new format
if 'channel_i' in dataframe.columns:
pivoted = old_load_and_pivot(dataframe)
else:
pivoted = new_load_and_pivot(dataframe)
if frame_length == 1:
return pivoted
else:
if sliding_frames:
return dataset.create_sliding_frames(pivoted, frame_length=frame_length)
else:
return dataset.reshape_frames(pivoted, frame_length=frame_length)
def load_data_frames(feature_folder,
**kwargs):
"""
Loads the DataFrames for the feature files in *feature_folder*.
:param feature_folder: The folder containing the feature files.
:param kwargs: keyword arguments to the load function
:return: A triplet of DataFrames (interictal, preictal, test)
"""
return dataset.load_data_frames(feature_folder,
load_function=load_and_pivot,
find_features_function=fileutils.find_feature_files,
**kwargs)
|
// flow-typed signature: 5cc118bf54f582e12c263a7f5411b691
// flow-typed version: <<STUB>>/make-error_v^1.0.4/flow_v0.37.0
/**
* This is an autogenerated libdef stub for:
*
* 'make-error'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'make-error' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
// Filename aliases
declare module 'make-error/index' {
declare module.exports: $Exports<'make-error'>;
}
declare module 'make-error/index.js' {
declare module.exports: $Exports<'make-error'>;
}
|
import argparse
import json
import os
from detectron2.config import get_cfg
from detectron2.data.datasets import register_coco_instances
from cvat.api import CVATAPI
from training.train_cvat import strip_annotation
from evaluate.performance import evaluate_on_dataset
def remap_annotation(coco_json, remap_map):
for ann in coco_json["annotations"]:
ann["category_id"] = remap_map[ann["category_id"]]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--job_id", action="append")
parser.add_argument("--config", required=True)
parser.add_argument("--cvat_host", default="http://localhost:8080")
parser.add_argument("--cvat_username")
parser.add_argument("--cvat_password")
parser.add_argument("--output_dir", default="./eval_output")
parser.add_argument("--cvat_base")
parser.add_argument("--weights")
parser.add_argument("--mapping", help="[old_id, new_id]")
args = parser.parse_args()
cfg = get_cfg()
cfg.merge_from_file(args.config)
api = CVATAPI(args.cvat_host)
api.login(args.cvat_username, args.cvat_password)
eval_tasks = []
for job_id in args.job_id:
job = api.get_job(job_id).json()
task_id = job["task_id"]
data = api.export_data(task_id).json()
if args.mapping is not None:
mapping = json.loads(args.mapping)
mapping = {mapping[0]: mapping[1]}
remap_annotation(data, mapping)
coco_json = f"datasets/eval_cvat_{task_id}.coco.json"
with open(coco_json, "w") as f:
json.dump(data, f)
eval_task = f"cvat/eval_{task_id}"
register_coco_instances(eval_task, {}, coco_json, args.cvat_base)
eval_tasks.append(eval_task)
for eval_task in eval_tasks:
os.makedirs(args.output_dir, exist_ok=True)
evaluate_on_dataset(args.config, ['MODEL.WEIGHTS', args.weights], eval_tasks)
|
import { combineReducers } from 'redux';
import loginReducer from './loginReducer';
import tokenReducer from './tokenReducer';
export default combineReducers({
login: loginReducer,
tokens: tokenReducer
});
|
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2017-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import unittest # NOQA
from edb.testbase import server as tb
class TestEdgeQLLinkproperties(tb.QueryTestCase):
'''The scope is to test link properties.'''
SCHEMA = os.path.join(os.path.dirname(__file__), 'schemas',
'cards.esdl')
SETUP = os.path.join(os.path.dirname(__file__), 'schemas',
'cards_setup.edgeql')
async def test_edgeql_props_basic_01(self):
await self.assert_query_result(
r'''
WITH MODULE test
SELECT User {
name,
deck: {
name,
element,
cost,
@count
} ORDER BY @count DESC THEN .name ASC
} ORDER BY .name;
''',
[
{
'name': 'Alice',
'deck': [
{
'cost': 2,
'name': 'Bog monster',
'@count': 3,
'element': 'Water'
},
{
'cost': 3,
'name': 'Giant turtle',
'@count': 3,
'element': 'Water'
},
{
'cost': 5,
'name': 'Dragon',
'@count': 2,
'element': 'Fire'
},
{
'cost': 1,
'name': 'Imp',
'@count': 2,
'element': 'Fire'
},
],
},
{
'name': 'Bob',
'deck': [
{
'cost': 2,
'name': 'Bog monster',
'@count': 3,
'element': 'Water'
},
{
'cost': 1,
'name': 'Dwarf',
'@count': 3,
'element': 'Earth'
},
{
'cost': 3,
'name': 'Giant turtle',
'@count': 3,
'element': 'Water'
},
{
'cost': 3,
'name': 'Golem',
'@count': 3,
'element': 'Earth'
},
],
},
{
'name': 'Carol',
'deck': [
{
'cost': 1,
'name': 'Dwarf',
'@count': 4,
'element': 'Earth'
},
{
'cost': 1,
'name': 'Sprite',
'@count': 4,
'element': 'Air'
},
{
'cost': 2,
'name': 'Bog monster',
'@count': 3,
'element': 'Water'
},
{
'cost': 2,
'name': 'Giant eagle',
'@count': 3,
'element': 'Air'
},
{
'cost': 3,
'name': 'Giant turtle',
'@count': 2,
'element': 'Water'
},
{
'cost': 3,
'name': 'Golem',
'@count': 2,
'element': 'Earth'
},
{
'cost': 4,
'name': 'Djinn',
'@count': 1,
'element': 'Air'
},
],
},
{
'name': 'Dave',
'deck': [
{
'cost': 1,
'name': 'Sprite',
'@count': 4,
'element': 'Air'
},
{
'cost': 2,
'name': 'Bog monster',
'@count': 1,
'element': 'Water'
},
{
'cost': 4,
'name': 'Djinn',
'@count': 1,
'element': 'Air'
},
{
'cost': 5,
'name': 'Dragon',
'@count': 1,
'element': 'Fire'
},
{
'cost': 2,
'name': 'Giant eagle',
'@count': 1,
'element': 'Air'
},
{
'cost': 3,
'name': 'Giant turtle',
'@count': 1,
'element': 'Water'
},
{
'cost': 3,
'name': 'Golem',
'@count': 1,
'element': 'Earth'
},
],
}
]
)
async def test_edgeql_props_basic_02(self):
await self.assert_query_result(
r'''
# get users and only cards that have the same count and
# cost in the decks
WITH MODULE test
SELECT User {
name,
deck: {
name,
element,
cost,
@count
} FILTER .cost = @count
ORDER BY @count DESC THEN .name ASC
} ORDER BY .name;
''',
[
{
'name': 'Alice',
'deck': [
{
'cost': 3,
'name': 'Giant turtle',
'@count': 3,
'element': 'Water'
},
],
},
{
'name': 'Bob',
'deck': [
{
'cost': 3,
'name': 'Giant turtle',
'@count': 3,
'element': 'Water'
},
{
'cost': 3,
'name': 'Golem',
'@count': 3,
'element': 'Earth'
},
],
},
{
'name': 'Carol',
'deck': [],
},
{
'name': 'Dave',
'deck': [],
}
]
)
async def test_edgeql_props_basic_03(self):
await self.assert_query_result(
r'''
# get only users who have the same count and cost in the decks
WITH MODULE test
SELECT User {
name,
deck: {
name,
element,
cost,
@count
} ORDER BY @count DESC THEN .name ASC
} FILTER .deck.cost = .deck@count
ORDER BY .name;
''',
[
{
'name': 'Alice',
'deck': [
{
'cost': 2,
'name': 'Bog monster',
'@count': 3,
'element': 'Water'
},
{
'cost': 3,
'name': 'Giant turtle',
'@count': 3,
'element': 'Water'
},
{
'cost': 5,
'name': 'Dragon',
'@count': 2,
'element': 'Fire'
},
{
'cost': 1,
'name': 'Imp',
'@count': 2,
'element': 'Fire'
},
],
},
{
'name': 'Bob',
'deck': [
{
'cost': 2,
'name': 'Bog monster',
'@count': 3,
'element': 'Water'
},
{
'cost': 1,
'name': 'Dwarf',
'@count': 3,
'element': 'Earth'
},
{
'cost': 3,
'name': 'Giant turtle',
'@count': 3,
'element': 'Water'
},
{
'cost': 3,
'name': 'Golem',
'@count': 3,
'element': 'Earth'
},
],
},
]
)
async def test_edgeql_props_basic_04(self):
await self.assert_query_result(
r'''
# get all cards that match their cost to the count in at
# least some deck
WITH MODULE test
SELECT Card {
name,
element,
cost
}
FILTER
.cost = .<deck@count
ORDER BY .name;
''',
[
{
'cost': 3,
'name': 'Giant turtle',
'element': 'Water'
},
{
'cost': 3,
'name': 'Golem',
'element': 'Earth'
},
]
)
async def test_edgeql_props_basic_05(self):
await self.assert_query_result(
r'''
# get all the friends of Alice and their nicknames
WITH MODULE test
SELECT User {
name,
friends: {
name,
@nickname,
} ORDER BY .name,
}
FILTER .name = 'Alice';
''',
[
{
'name': 'Alice',
'friends': [
{'name': 'Bob', '@nickname': 'Swampy'},
{'name': 'Carol', '@nickname': 'Firefighter'},
{'name': 'Dave', '@nickname': 'Grumpy'},
]
}
]
)
async def test_edgeql_props_cross_01(self):
await self.assert_query_result(
r'''
# get cards that have the same count in some deck as their cost
WITH MODULE test
SELECT Card {
name,
}
FILTER .cost = .<deck@count
ORDER BY .name;
''',
[
{'name': 'Giant turtle'},
{'name': 'Golem'},
]
)
async def test_edgeql_props_cross_02(self):
await self.assert_query_result(
r'''
# get cards that have the same count in some deck as their cost
WITH MODULE test
SELECT Card {
name,
same := EXISTS (
SELECT User
FILTER
Card.cost = User.deck@count AND
Card = User.deck
)
}
ORDER BY .name;
''',
[
{'name': 'Bog monster', 'same': False},
{'name': 'Djinn', 'same': False},
{'name': 'Dragon', 'same': False},
{'name': 'Dwarf', 'same': False},
{'name': 'Giant eagle', 'same': False},
{'name': 'Giant turtle', 'same': True},
{'name': 'Golem', 'same': True},
{'name': 'Imp', 'same': False},
{'name': 'Sprite', 'same': False},
]
)
async def test_edgeql_props_cross_03(self):
await self.assert_query_result(
r'''
# get cards that have the same count in some deck as their cost
WITH MODULE test
SELECT Card {
name,
same := EXISTS (
SELECT
User
FILTER
Card.cost = User.deck@count AND
Card = User.deck
)
}
ORDER BY .name;
''',
[
{'name': 'Bog monster', 'same': False},
{'name': 'Djinn', 'same': False},
{'name': 'Dragon', 'same': False},
{'name': 'Dwarf', 'same': False},
{'name': 'Giant eagle', 'same': False},
{'name': 'Giant turtle', 'same': True},
{'name': 'Golem', 'same': True},
{'name': 'Imp', 'same': False},
{'name': 'Sprite', 'same': False},
]
)
async def test_edgeql_props_cross_04(self):
await self.assert_query_result(
r'''
# get cards that have the same count in some deck as their cost
WITH MODULE test
SELECT Card {
name,
same := (
SELECT _ := Card.cost = Card.<deck@count
ORDER BY _ DESC LIMIT 1
)
}
ORDER BY .name;
''',
[
{'name': 'Bog monster', 'same': False},
{'name': 'Djinn', 'same': False},
{'name': 'Dragon', 'same': False},
{'name': 'Dwarf', 'same': False},
{'name': 'Giant eagle', 'same': False},
{'name': 'Giant turtle', 'same': True},
{'name': 'Golem', 'same': True},
{'name': 'Imp', 'same': False},
{'name': 'Sprite', 'same': False},
]
)
async def test_edgeql_props_implication_01(self):
await self.assert_query_result(
r'''
# count of 1 in at least some deck implies 'Fire'
WITH MODULE test
SELECT Card {
name,
element,
count := (SELECT _ := Card.<deck@count ORDER BY _),
expr := (
SELECT _ := NOT EXISTS (SELECT Card
FILTER Card.<deck@count = 1) OR
Card.element = 'Fire'
ORDER BY _ DESC LIMIT 1
)
}
ORDER BY .name;
''',
[
{
'expr': False,
'name': 'Bog monster',
'count': [1, 3, 3, 3],
'element': 'Water',
},
{
'expr': False,
'name': 'Djinn',
'count': [1, 1],
'element': 'Air',
},
{
'expr': True,
'name': 'Dragon',
'count': [1, 2],
'element': 'Fire',
},
{
'expr': True,
'name': 'Dwarf',
'count': [3, 4],
'element': 'Earth',
},
{
'expr': False,
'name': 'Giant eagle',
'count': [1, 3],
'element': 'Air',
},
{
'expr': False,
'name': 'Giant turtle',
'count': [1, 2, 3, 3],
'element': 'Water',
},
{
'expr': False,
'name': 'Golem',
'count': [1, 2, 3],
'element': 'Earth',
},
{
'expr': True,
'name': 'Imp',
'count': [2],
'element': 'Fire',
},
{
'expr': True,
'name': 'Sprite',
'count': [4, 4],
'element': 'Air',
},
]
)
async def test_edgeql_props_implication_02(self):
await self.assert_query_result(
r'''
# FILTER by NOT (count of 1 implies 'Fire')
# in at least some deck
WITH MODULE test
SELECT Card {
name,
}
FILTER NOT (NOT .<deck@count = 1 OR .element = 'Fire')
ORDER BY .name;
''',
[
# all of these have count of 1 in some deck and are not 'Fire'
{'name': 'Bog monster'},
{'name': 'Djinn'},
{'name': 'Giant eagle'},
{'name': 'Giant turtle'},
{'name': 'Golem'},
]
)
async def test_edgeql_props_implication_03(self):
await self.assert_query_result(
r'''
# same as above, refactored
WITH MODULE test
SELECT Card {
name,
}
FILTER .<deck@count = 1 AND .element != 'Fire'
ORDER BY .name;
''',
[
# all of these have count of 1 and are not 'Fire' in some deck
{'name': 'Bog monster'},
{'name': 'Djinn'},
{'name': 'Giant eagle'},
{'name': 'Giant turtle'},
{'name': 'Golem'},
]
)
async def test_edgeql_props_implication_04(self):
await self.assert_query_result(
r'''
# count of 1 implies 'Fire' in the deck of Dave
WITH MODULE test
SELECT User {
name,
deck: {
name,
element,
@count,
expr :=
NOT User.deck@count = 1 OR
User.deck.element = 'Fire'
}
}
FILTER .name = 'Dave';
''',
[
{
'name': 'Dave',
'deck': [
{
'name': 'Dragon',
'expr': True,
'@count': 1,
'element': 'Fire',
},
{
'name': 'Bog monster',
'expr': False,
'@count': 1,
'element': 'Water',
},
{
'name': 'Giant turtle',
'expr': False,
'@count': 1,
'element': 'Water',
},
{
'name': 'Golem',
'expr': False,
'@count': 1,
'element': 'Earth',
},
{
'name': 'Sprite',
'expr': True,
'@count': 4,
'element': 'Air',
},
{
'name': 'Giant eagle',
'expr': False,
'@count': 1,
'element': 'Air',
},
{
'name': 'Djinn',
'expr': False,
'@count': 1,
'element': 'Air',
},
],
}
]
)
async def test_edgeql_props_setops_01(self):
await self.assert_query_result(
r'''
WITH MODULE test
SELECT DISTINCT User.deck@count;
''',
{1, 2, 3, 4},
)
await self.assert_query_result(
r'''
WITH MODULE test
SELECT DISTINCT (
SELECT User.deck@count FILTER User.deck.element = 'Fire'
);
''',
{1, 2},
)
await self.assert_query_result(
r'''
WITH MODULE test
SELECT DISTINCT (
SELECT User.deck@count FILTER User.deck.element = 'Water'
);
''',
{1, 2, 3},
)
await self.assert_query_result(
r'''
WITH MODULE test
SELECT DISTINCT (
SELECT (
SELECT Card FILTER Card.element = 'Water').<deck@count
);
''',
{1, 2, 3},
)
async def test_edgeql_props_setops_02(self):
await self.assert_query_result(
r'''
WITH
MODULE test,
C := (
SELECT User FILTER User.name = 'Carol').deck.name,
D := (
SELECT User FILTER User.name = 'Dave').deck.name
SELECT _ := C UNION D
ORDER BY _;
''',
[
'Bog monster',
'Bog monster',
'Djinn',
'Djinn',
'Dragon',
'Dwarf',
'Giant eagle',
'Giant eagle',
'Giant turtle',
'Giant turtle',
'Golem',
'Golem',
'Sprite',
'Sprite'
],
)
await self.assert_query_result(
r'''
WITH
MODULE test,
C := (
SELECT User FILTER User.name = 'Carol').deck.name,
D := (
SELECT User FILTER User.name = 'Dave').deck.name
SELECT _ := DISTINCT (C UNION D)
ORDER BY _;
''',
[
'Bog monster',
'Djinn',
'Dragon',
'Dwarf',
'Giant eagle',
'Giant turtle',
'Golem',
'Sprite'
],
)
async def test_edgeql_props_setops_03(self):
await self.assert_query_result(
r'''
WITH MODULE test
SELECT _ := {
# this is equivalent to UNION
User.name,
User.friends@nickname,
{'Foo', 'Bob'}
}
ORDER BY _;
''',
[
'Alice', 'Bob', 'Bob', 'Carol', 'Dave', 'Firefighter',
'Foo', 'Grumpy', 'Swampy'
],
)
await self.assert_query_result(
r'''
WITH MODULE test
SELECT _ := DISTINCT {
User.name,
User.friends@nickname,
{'Foo', 'Bob'}
}
ORDER BY _;
''',
[
'Alice', 'Bob', 'Carol', 'Dave', 'Firefighter', 'Foo',
'Grumpy', 'Swampy',
],
)
async def test_edgeql_props_setops_04(self):
await self.assert_query_result(
r'''
WITH
MODULE test,
A := (SELECT User FILTER User.name = 'Alice')
# the set of distinct values of card counts in
# the deck of Alice is {2, 3}
SELECT _ := (DISTINCT A.deck@count, A.name)
ORDER BY _;
''',
[
[2, 'Alice'],
[3, 'Alice'],
]
)
async def test_edgeql_props_setops_05(self):
await self.assert_query_result(
r'''
WITH
MODULE test
SELECT DISTINCT
(
SELECT User FILTER User.name = 'Alice'
).deck@count;
''',
{2, 3},
)
async def test_edgeql_props_computable_01(self):
await self.assert_query_result(
r'''
WITH MODULE test
SELECT User {
name,
my_deck := (SELECT Card { @foo := Card.name }
FILTER .name = 'Djinn')
}
FILTER User.name = 'Alice';
''',
[{
'name': 'Alice',
'my_deck': {
'@foo': 'Djinn'
}
}],
)
async def test_edgeql_props_computable_02(self):
await self.assert_query_result(
r'''
WITH
MODULE test,
MyUser := (
SELECT
User {
my_deck := (SELECT Card { @foo := Card.name }
FILTER .name = 'Djinn')
}
FILTER User.name = 'Alice'
)
SELECT MyUser {
name,
my_deck: {
@foo
}
};
''',
[{
'name': 'Alice',
'my_deck': {
'@foo': 'Djinn'
}
}],
)
async def test_edgeql_props_abbrev(self):
await self.assert_query_result(
r'''
WITH
MODULE test
SELECT User {
name,
my_deck := (SELECT .deck {
name,
num_cards := @count
} ORDER BY .name)
} FILTER .name = 'Alice';
''',
[{
'name': 'Alice',
'my_deck': [
{"name": "Bog monster", "num_cards": 3},
{"name": "Dragon", "num_cards": 2},
{"name": "Giant turtle", "num_cards": 3},
{"name": "Imp", "num_cards": 2},
],
}],
)
async def test_edgeql_props_agg_01(self):
await self.assert_query_result(
r'''
WITH MODULE test
SELECT sum(User.deck@count);
''',
[51],
)
await self.assert_query_result(
r'''
WITH MODULE test
SELECT _ := (sum(User.deck@count), User.name)
ORDER BY _;
''',
[
[10, 'Alice'], [10, 'Dave'], [12, 'Bob'], [19, 'Carol'],
],
)
async def test_edgeql_props_link_shadow_01(self):
await self.assert_query_result(
r'''
WITH MODULE test
SELECT User {
name,
deck := (SELECT x := User.deck.name
ORDER BY x ASC
LIMIT 2)
} ORDER BY .name;
''',
[
{"deck": ["Bog monster", "Dragon"], "name": "Alice"},
{"deck": ["Bog monster", "Dwarf"], "name": "Bob"},
{"deck": ["Bog monster", "Djinn"], "name": "Carol"},
{"deck": ["Bog monster", "Djinn"], "name": "Dave"}
]
)
async def test_edgeql_props_link_shadow_02(self):
await self.assert_query_result(
r'''
WITH
MODULE test,
AliasedUser := User {
name,
deck := (SELECT User.deck ORDER BY .name LIMIT 2)
}
SELECT
AliasedUser {
name,
deck: {
@count
}
}
ORDER BY .name;
''',
[
{"deck": [{"@count": 3}, {"@count": 2}], "name": "Alice"},
{"deck": [{"@count": 3}, {"@count": 3}], "name": "Bob"},
{"deck": [{"@count": 3}, {"@count": 1}], "name": "Carol"},
{"deck": [{"@count": 1}, {"@count": 1}], "name": "Dave"},
]
)
|
class linkedListNode:
def __init__(self, value, nextNode=None):
self.value = value
self.nextNode = nextNode
class linkedList:
def __init__(self, head=None):
self.head = head
def printList(self):
currentNode = self.head
while currentNode is not None:
print(currentNode.value, '->', end='')
currentNode = currentNode.nextNode
print('None')
def insertAtBeginning(self, value):
node = linkedListNode(value)
if(self.head is None):
self.head = node
return
node.nextNode = self.head
self.head = node
def shiftLinkedList(self, k):
for i in range(0, k):
self.rotation()
def rotation(self):
currentNode = self.head
prevNode = None
while True:
if(currentNode.nextNode is None):
break
prevNode = currentNode
currentNode = currentNode.nextNode
temp = linkedListNode(currentNode.value)
prevNode.nextNode = None
currentNode = None
temp.nextNode = self.head
self.head = temp
if __name__ == '__main__':
nodeCreation = linkedList()
nodeCreation.insertAtBeginning('1')
nodeCreation.insertAtBeginning('2')
nodeCreation.insertAtBeginning('3')
nodeCreation.insertAtBeginning('4')
nodeCreation.insertAtBeginning('5')
nodeCreation.insertAtBeginning('6')
nodeCreation.printList()
nodeCreation.shiftLinkedList(2)
nodeCreation.printList()
nodeCreation.shiftLinkedList(0)
nodeCreation.printList()
|
var searchData=
[
['enabledynamicobjects',['enableDynamicObjects',['../../../../../doc/html/classchai3d_1_1c_generic_tool.html#ac7f76357b7d878293d2d610808e9df29',1,'chai3d::cGenericTool']]],
['enableforces',['enableForces',['../../../../../doc/html/classchai3d_1_1c_delta_device.html#ab07c0e2688bd5c566eac77b7c8117971',1,'chai3d::cDeltaDevice']]],
['enablelightsourcerendering',['enableLightSourceRendering',['../../../../../doc/html/classchai3d_1_1c_world.html#abb41d51d2bf89eee6c8a1a215faa47c1',1,'chai3d::cWorld']]],
['enclose',['enclose',['../../../../../doc/html/structchai3d_1_1c_collision_a_a_b_b_box.html#a03298d8a870416037dfd832df732e64f',1,'chai3d::cCollisionAABBBox::enclose(const cCollisionAABBBox &a_boxA, const cCollisionAABBBox &a_boxB)'],['../../../../../doc/html/structchai3d_1_1c_collision_a_a_b_b_box.html#a486a668eb9faab81032f6a36252d028d',1,'chai3d::cCollisionAABBBox::enclose(const cVector3d &a_point)'],['../../../../../doc/html/structchai3d_1_1c_collision_a_a_b_b_box.html#a1e3c02917ddf6393e28237b39c2b7d07',1,'chai3d::cCollisionAABBBox::enclose(const cCollisionAABBBox &a_box)']]],
['end',['end',['../../../../../doc/html/classchai3d_1_1c_display_list.html#a59c78a7e964b1f1dfaf6b07eb67556fb',1,'chai3d::cDisplayList']]],
['equals',['equals',['../../../../../doc/html/structchai3d_1_1c_matrix3d.html#a6e570688808747e8c7bd5dc33aefb8ea',1,'chai3d::cMatrix3d::equals()'],['../../../../../doc/html/structchai3d_1_1c_vector3d.html#aa1fa640f44a8b2c6b0562be5beb20c28',1,'chai3d::cVector3d::equals()']]],
['erase',['erase',['../../../../../doc/html/classchai3d_1_1c_image.html#ad0df46aae2be9e57a925777c91e324db',1,'chai3d::cImage::erase()'],['../../../../../doc/html/classchai3d_1_1c_multi_image.html#aa41d184c06e1efbb83a5d0318d895209',1,'chai3d::cMultiImage::erase()'],['../../../../../doc/html/classchai3d_1_1c_video.html#a7d4d10c10acd1ea88c183cd315626b4a',1,'chai3d::cVideo::erase()']]],
['estimateangularvelocity',['estimateAngularVelocity',['../../../../../doc/html/classchai3d_1_1c_generic_haptic_device.html#a7401bb308eed519116ba6b19e7adae07',1,'chai3d::cGenericHapticDevice']]],
['estimategrippervelocity',['estimateGripperVelocity',['../../../../../doc/html/classchai3d_1_1c_generic_haptic_device.html#ac7de0441a1478ece0b82c86cbe72f2a5',1,'chai3d::cGenericHapticDevice']]],
['estimateinertia',['estimateInertia',['../classchai3d_1_1c_bullet_generic_object.html#ac34fcc773d500ba3d61d92e987848da0',1,'chai3d::cBulletGenericObject']]],
['estimatelinearvelocity',['estimateLinearVelocity',['../../../../../doc/html/classchai3d_1_1c_generic_haptic_device.html#a45aeb65232808b5c75c2b360f8ca3dfe',1,'chai3d::cGenericHapticDevice']]]
];
|
"""
@generated
cargo-raze generated Bazel file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") # buildifier: disable=load
def raze_fetch_remote_crates():
"""This function defines a collection of repos and should be called in a WORKSPACE file"""
maybe(
http_archive,
name = "raze__ahash__0_7_4",
url = "https://crates.io/api/v1/crates/ahash/0.7.4/download",
type = "tar.gz",
sha256 = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98",
strip_prefix = "ahash-0.7.4",
build_file = Label("//bazel/cargo/remote:BUILD.ahash-0.7.4.bazel"),
)
maybe(
http_archive,
name = "raze__autocfg__1_0_1",
url = "https://crates.io/api/v1/crates/autocfg/1.0.1/download",
type = "tar.gz",
sha256 = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a",
strip_prefix = "autocfg-1.0.1",
build_file = Label("//bazel/cargo/remote:BUILD.autocfg-1.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__cfg_if__1_0_0",
url = "https://crates.io/api/v1/crates/cfg-if/1.0.0/download",
type = "tar.gz",
sha256 = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd",
strip_prefix = "cfg-if-1.0.0",
build_file = Label("//bazel/cargo/remote:BUILD.cfg-if-1.0.0.bazel"),
)
maybe(
http_archive,
name = "raze__chrono__0_4_19",
url = "https://crates.io/api/v1/crates/chrono/0.4.19/download",
type = "tar.gz",
sha256 = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73",
strip_prefix = "chrono-0.4.19",
build_file = Label("//bazel/cargo/remote:BUILD.chrono-0.4.19.bazel"),
)
maybe(
http_archive,
name = "raze__getrandom__0_2_3",
url = "https://crates.io/api/v1/crates/getrandom/0.2.3/download",
type = "tar.gz",
sha256 = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753",
strip_prefix = "getrandom-0.2.3",
build_file = Label("//bazel/cargo/remote:BUILD.getrandom-0.2.3.bazel"),
)
maybe(
http_archive,
name = "raze__hashbrown__0_11_2",
url = "https://crates.io/api/v1/crates/hashbrown/0.11.2/download",
type = "tar.gz",
sha256 = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e",
strip_prefix = "hashbrown-0.11.2",
build_file = Label("//bazel/cargo/remote:BUILD.hashbrown-0.11.2.bazel"),
)
maybe(
http_archive,
name = "raze__libc__0_2_97",
url = "https://crates.io/api/v1/crates/libc/0.2.97/download",
type = "tar.gz",
sha256 = "12b8adadd720df158f4d70dfe7ccc6adb0472d7c55ca83445f6a5ab3e36f8fb6",
strip_prefix = "libc-0.2.97",
build_file = Label("//bazel/cargo/remote:BUILD.libc-0.2.97.bazel"),
)
maybe(
http_archive,
name = "raze__log__0_4_14",
url = "https://crates.io/api/v1/crates/log/0.4.14/download",
type = "tar.gz",
sha256 = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710",
strip_prefix = "log-0.4.14",
build_file = Label("//bazel/cargo/remote:BUILD.log-0.4.14.bazel"),
)
maybe(
http_archive,
name = "raze__num_integer__0_1_44",
url = "https://crates.io/api/v1/crates/num-integer/0.1.44/download",
type = "tar.gz",
sha256 = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db",
strip_prefix = "num-integer-0.1.44",
build_file = Label("//bazel/cargo/remote:BUILD.num-integer-0.1.44.bazel"),
)
maybe(
http_archive,
name = "raze__num_traits__0_2_14",
url = "https://crates.io/api/v1/crates/num-traits/0.2.14/download",
type = "tar.gz",
sha256 = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290",
strip_prefix = "num-traits-0.2.14",
build_file = Label("//bazel/cargo/remote:BUILD.num-traits-0.2.14.bazel"),
)
maybe(
http_archive,
name = "raze__once_cell__1_8_0",
url = "https://crates.io/api/v1/crates/once_cell/1.8.0/download",
type = "tar.gz",
sha256 = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56",
strip_prefix = "once_cell-1.8.0",
build_file = Label("//bazel/cargo/remote:BUILD.once_cell-1.8.0.bazel"),
)
maybe(
http_archive,
name = "raze__time__0_1_43",
url = "https://crates.io/api/v1/crates/time/0.1.43/download",
type = "tar.gz",
sha256 = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438",
strip_prefix = "time-0.1.43",
build_file = Label("//bazel/cargo/remote:BUILD.time-0.1.43.bazel"),
)
maybe(
http_archive,
name = "raze__version_check__0_9_3",
url = "https://crates.io/api/v1/crates/version_check/0.9.3/download",
type = "tar.gz",
sha256 = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe",
strip_prefix = "version_check-0.9.3",
build_file = Label("//bazel/cargo/remote:BUILD.version_check-0.9.3.bazel"),
)
maybe(
http_archive,
name = "raze__wasi__0_10_2_wasi_snapshot_preview1",
url = "https://crates.io/api/v1/crates/wasi/0.10.2+wasi-snapshot-preview1/download",
type = "tar.gz",
sha256 = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6",
strip_prefix = "wasi-0.10.2+wasi-snapshot-preview1",
build_file = Label("//bazel/cargo/remote:BUILD.wasi-0.10.2+wasi-snapshot-preview1.bazel"),
)
maybe(
http_archive,
name = "raze__winapi__0_3_9",
url = "https://crates.io/api/v1/crates/winapi/0.3.9/download",
type = "tar.gz",
sha256 = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419",
strip_prefix = "winapi-0.3.9",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-0.3.9.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_i686_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-i686-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6",
strip_prefix = "winapi-i686-pc-windows-gnu-0.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-i686-pc-windows-gnu-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_x86_64_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-x86_64-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f",
strip_prefix = "winapi-x86_64-pc-windows-gnu-0.4.0",
build_file = Label("//bazel/cargo/remote:BUILD.winapi-x86_64-pc-windows-gnu-0.4.0.bazel"),
)
|
window.onload = function(){
var game = document.getElementById('demo1');
var c = game.getContext("2d");
//load images
var bird = document.createElement("img");
var Northpipe = document.createElement("img");
var Southpipe = document.createElement("img");
var bg = document.createElement("img");
var fg = document.createElement("img");
if(game.width>window.innerWidth){
game.width = window.innerWidth;
}
if(game.height>window.innerHeight){
game.height = window.innerHeight;
}
bird.src = "bird.png";
Northpipe.src = "pipeNorth.png";
Southpipe.src = "pipeSouth.png";
fg.src = "fg.png";
bg.src = "bg.png";
var gap = game.height*(13.8/100);
var constant = game.height*(51.72/100)+gap;
var bx = 0;
var g = 9.8;
var bX = 20+bx;
var bY = game.height/2;
document.onkeydown = function(e){
if(e.key=="ArrowUp"){
bY-=10;
}
if(e.key=="ArrowDown"){
bY+=2;
}
if(e.key=="Space"){
clearInterval(x);
}
}
var pipe = [];
pipe[0] = {
x:game.width,
y:0
};
let l =1;
//draw
bg.addEventListener('load',myfunc);
var x;
function myfunc(){
x = setInterval(function(){
c.drawImage(bg,0,0,game.width,game.height);
for(let i=0;i<pipe.length;i++){
c.drawImage(Northpipe,pipe[i].x,pipe[i].y);
c.drawImage(Southpipe,pipe[i].x,pipe[i].y+constant);
pipe[i].x -=20;
if(pipe[i].x<=bX && bX<=pipe[i].x+Northpipe.width){
if(bY-bird.height/2<=pipe[i].y+Northpipe.height||bY+bird.height/2>=pipe[i].y+constant){
location.reload();
}
document.getElementById('demo').innerHTML = l;
l++;
}
if(bY+bird.height>=game.height-fg.height){
location.reload();
}
if(pipe[i].x==game.width*(81.81/100)){
pipe.push({
x:game.width,
y:Math.floor(Math.random()*Northpipe.height)-Northpipe.height
});
}
}
c.drawImage(fg,0,game.height- fg.height,game.width,game.height);
c.drawImage(bird,bX,bY);
bY+=g;
//detection collision
},200)
}
}
|
#input
# 41 4
def f(n, k):
if n == 1:
return 1
return ((f(n-1, k)+k-1) % n) + 1
(n, k) = (int(x) for x in input().split())
survivor = f(n, k)
print(survivor)
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
/**
* Base class for endorsement handling
* @class
*/
class EndorsementHandler {
/**
* @typedef {Object} EndorsementHandlerParameters
* @property {Object} request - {@link ChaincodeInvokeRequest}
* @property {Object} signed_proposal - the encoded protobuf "SignedProposal"
* created by the sendTransactionProposal method before calling the
* handler. Will be the object to be endorsed by the target peers.
* @property {Number} timeout - the timeout setting passed on sendTransactionProposal
* method.
*/
/**
* This method will process the request object to calculate the target peers.
* Once the targets have been determined, the channel to send the endorsement
* transaction to all targets. The results will be analyzed to see if
* enough completed endorsements have been received.
*
* @param {EndorsementHandlerParameters} params - A {@link EndorsementHandlerParameters}
* that contains enough information to determine the targets and contains
* a {@link ChaincodeInvokeRequest} to be sent using the included channel
* with the {@link Channel} 'sendTransactionProposal' method.
* @returns {Promise} A Promise for the {@link ProposalResponseObject}, the
* same results as calling the {@link Channel#sendTransactionProposal}
* method directly.
*/
endorse(params) {
if (params) {
throw new Error('The "endorse" method must be implemented');
}
throw new Error('The "endorse" method must be implemented');
}
/**
* This method will be called by the channel when the channel is initialized.
*/
initialize() {
throw new Error('The "initialize" method must be implemented');
}
/**
* This static method will be called by the channel to create an instance of
* this handler. It will be passed the channel object this handler is working
* with.
*/
static create(channel) {
if (channel) {
throw new Error('The "create" method must be implemented');
}
throw new Error('The "create" method must be implemented');
}
}
module.exports = EndorsementHandler;
|
/* kakao map 연동 API */
function sample4_execDaumPostcode() {
new daum.Postcode({
oncomplete: function(data) {
// 팝업에서 검색결과 항목을 클릭했을때 실행할 코드를 작성하는 부분.
// 도로명 주소의 노출 규칙에 따라 주소를 표시한다.
// 내려오는 변수가 값이 없는 경우엔 공백('')값을 가지므로, 이를 참고하여 분기 한다.
var roadAddr = data.roadAddress; // 도로명 주소 변수
var extraRoadAddr = ''; // 참고 항목 변수
// 법정동명이 있을 경우 추가한다. (법정리는 제외)
// 법정동의 경우 마지막 문자가 "동/로/가"로 끝난다.
if(data.bname !== '' && /[동|로|가]$/g.test(data.bname)){
extraRoadAddr += data.bname;
}
// 건물명이 있고, 공동주택일 경우 추가한다.
if(data.buildingName !== '' && data.apartment === 'Y'){
extraRoadAddr += (extraRoadAddr !== '' ? ', ' + data.buildingName : data.buildingName);
}
// 표시할 참고항목이 있을 경우, 괄호까지 추가한 최종 문자열을 만든다.
if(extraRoadAddr !== ''){
extraRoadAddr = ' (' + extraRoadAddr + ')';
}
// 우편번호와 주소 정보를 해당 필드에 넣는다.
document.getElementById('orderRecipientPostcodeInput').value = data.zonecode;
document.getElementById("orderRecipientAddress1Input").value = roadAddr;
document.getElementById("orderRecipientAddress1Input").value = data.jibunAddress;
var guideTextBox = document.getElementById("guide");
// 사용자가 '선택 안함'을 클릭한 경우, 예상 주소라는 표시를 해준다.
if(data.autoRoadAddress) {
var expRoadAddr = data.autoRoadAddress + extraRoadAddr;
guideTextBox.innerHTML = '(예상 도로명 주소 : ' + expRoadAddr + ')';
guideTextBox.style.display = 'block';
} else if(data.autoJibunAddress) {
var expJibunAddr = data.autoJibunAddress;
guideTextBox.innerHTML = '(예상 지번 주소 : ' + expJibunAddr + ')';
guideTextBox.style.display = 'block';
} else {
guideTextBox.innerHTML = '';
guideTextBox.style.display = 'none';
}
// 기존에 입력되어있던 address2 (상세주소) 를 지워준다.
let detailedAddress = document.querySelector('#orderRecipientAddress2Input');
detailedAddress.setAttribute("value", "");
detailedAddress.setAttribute("placeholder", "상세주소를 입력해주세요.");
}
}
).open();
}
/* 배송정보 Input에 이미 존재하던 text를 지워주는 API */
let removeInputText = function () {
//수령인 삭제
let removeOrderRecipientNameInput = document.querySelector('#orderRecipientNameInput');
removeOrderRecipientNameInput.setAttribute("value","");
let removeOrderRecipientPostcode = document.querySelector('#orderRecipientPostcodeInput');
removeOrderRecipientPostcode.setAttribute("value","");
let removeOrderRecipientAddress1 = document.querySelector('#orderRecipientAddress1Input');
removeOrderRecipientAddress1.setAttribute("value","");
let removeOrderRecipientAddress2 = document.querySelector('#orderRecipientAddress2Input');
removeOrderRecipientAddress2.setAttribute("value","");
let removeOrderRecipientPhoneInput = document.querySelector('#orderRecipientPhoneInput');
removeOrderRecipientPhoneInput.setAttribute("value","");
};
/* 모든 마일리지 사용 / 마일리지 사용 안함 api */
let useAllMileage = function() {
let btnName = document.getElementById("btnUseAllMileage");
if (btnName.value === "모두사용"){
btnName.value = "사용안함";
} else {
btnName.value = "모두사용";
}
let jsUseMileageInput = document.getElementById("orderMemberUseMileage");
let jsHasMileage = document.getElementById("orderMemberHasMileage");
let jsTotalPayment = document.getElementById("orderTotalPayment");
let jsTotalProductPrice = document.getElementById("orderTotalProductPrice");
let jsTotalShippingPrice = document.getElementById("orderTotalShippingPrice");
let jsUseMileageInputValue = jsUseMileageInput.value;
let jsHasMileageValue = jsHasMileage.innerText;
let jsTotalProductPriceValue = parseInt(jsTotalProductPrice.innerText);
let jsTotalShippingPriceValue = parseInt(jsTotalShippingPrice.innerText);
jsUseMileageInput.value = jsHasMileageValue;
jsHasMileage.innerText = jsUseMileageInputValue;
document.getElementById("orderTotalUseMileage").innerText = jsHasMileageValue;
jsTotalPayment.innerText = jsTotalProductPriceValue + jsTotalShippingPriceValue - parseInt(jsUseMileageInput.value);
};
/* 마일리지 부분사용 api */
let RecalcuSubTotal = function(source) {
let jsUseMileageInput = document.getElementById("orderMemberUseMileage");
//불변의 총 마일리지
let jsHiddenMileage = document.getElementById("hiddenMileage");
//변화하는 총 마일리지
let jsHasMileage = document.getElementById("orderMemberHasMileage");
let jsTotalUseMileage = document.getElementById("orderTotalUseMileage");
//불변의 총 계산금액
let jsHiddenTotalPayment = document.getElementById("hiddenTotalPayment");
//변화하는 총 계산금액
let jsTotalPayment = document.getElementById("orderTotalPayment");
//밸류값은 무조건 integer
let jsUseMileageInputValue = parseInt(jsUseMileageInput.value);
let jsOriginHasMileageValue = parseInt(jsHiddenMileage.innerText);
let jsHiddenTotalPaymentValue = parseInt(jsHiddenTotalPayment.innerText);
if(isNaN(jsUseMileageInput.value)){
alert("nan이 싫어요 ㅠ");
jsUseMileageInput.setAttribute("value", "0");
}
if (jsUseMileageInputValue > jsOriginHasMileageValue){
jsUseMileageInput.value = jsOriginHasMileageValue;
jsTotalUseMileage.innerText = jsHiddenMileage.innerText;
jsTotalPayment.innerText = jsHiddenTotalPaymentValue - jsOriginHasMileageValue;
} else {
//변화하는 마일리지 = 불변의 마일리지 - 입력값을 뺀 것
jsHasMileage.innerText = (jsOriginHasMileageValue - jsUseMileageInputValue);
jsTotalUseMileage.innerText = jsUseMileageInputValue;
jsTotalPayment.innerText = jsHiddenTotalPaymentValue - jsUseMileageInputValue;
}
};
|
import streamlit as st
import pandas as pd
import numpy as np
from joblib import dump,load
import pandas as pd
import numpy as np
import tensorflow as tf
#import re
from tensorflow.keras.layers import Activation ,Input,LSTM,GRU,Dense ,Flatten,Embedding,Dropout,Bidirectional
from tensorflow.keras.utils import plot_model
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.preprocessing.sequence import pad_sequences
#import pickle
from numpy import asarray
from tqdm import tqdm
#from gensim.models import KeyedVectors
#import pickle
from numpy import zeros
from tensorflow.keras.models import Model
from sklearn.model_selection import train_test_split
import os
import tensorflow_hub as hub
from joblib import load ,dump
import warnings
warnings.filterwarnings('ignore')
st.title('Sarcasm Detection')
st.text('Please upload the Data to detect Sarcasm. Data should have following fields(features).')
st.text("['comment', 'author', 'subreddit', 'score', 'ups', 'downs', 'date','created_utc', 'parent_comment']")
data_load_state = st.text('Loading data...')
data = st.file_uploader("SARCASM DATA")
if data is not None:
df = pd.read_csv(data)
#df.to_csv('data.csv')
st.subheader('SARCASM DATA UPLOAD DONE!')
st.write(df)
data_load_state.text('Loaded data...')
loading_universal_Sentence_encoder=st.text('loading universal sentence encoder')
@st.cache
#Encoder = st.file_uploader("SARCASM DATA")
def load_universal_Sentece_encoder():
embed = hub.load("5")
return embed
embed=load_universal_Sentece_encoder()
loading_universal_Sentence_encoder.text('loaded universal sentence encoder')
def SarcasmDetection(df,embed):
''' this function takes the data , and universal sentence encoder and returns
the prediction about sarcasm or not.'''
print('\nnull values in comment:-',df['comment'].isna().sum())
df['comment']=df['comment'].fillna(' ')
print('after filling null comments:',df['comment'].isna().sum())
print('null values in parent_comment:-',df['parent_comment'].isna().sum())
df['parent_comment']=df['parent_comment'].fillna(' ')
print('after filling null parent_comment:',df['parent_comment'].isna().sum())
##Response Encoding of categorical features
# fit and transform functions for response encoding of categorical variable
def fit_response_encoding(df,feature):
''' this function takes df, feature and
return dict of preobabilities of categories in feature'''
# we need class info for response encoding so add class feature to df
CountOfFeatures_given_yes=df[df['label']==1][feature].value_counts()
CountOfFeatures=df[feature].value_counts()
# if any Category is present in CountOfFeatures and not in CountOfFeatures_given_yes then for that Category
# its value in dict_of_probability will be nan and we can replace this with 0 as it is in CountOfFeatures
# due fact that it has only non surcastic comments so its probability of surcastic comment will be zero
dict_of_probability=(CountOfFeatures_given_yes/CountOfFeatures)
dict_of_probability=dict_of_probability.fillna(0).to_dict()
return dict_of_probability
def transform_response_encoding(df,feature,dict_fit):
''' this function takes the df,feature for reponse coding and dict learned by fit function
and returns df with new feature_response feature with response coding of feature'''
#keys_of_feature_to_transform=df[feature].value_counts().to_dict().keys()
df[feature+'_response']=df[feature]
df[feature+'_response']=df[feature+'_response'].map(dict_fit)
# laplace smooting, if category is not present in train then it's probability will be 0.5
df[feature+'_response']=df[feature+'_response'].fillna(0.5)
return df
#response coding of categorical features
response_DictSubreddit=load('response_DictSubreddit.joblib')
response_DictAuthor=load('response_DictAuthor.joblib')
df=transform_response_encoding(df,'author',response_DictAuthor)
df=transform_response_encoding(df,'subreddit',response_DictAuthor)
print('\nResponse coding is Done!')
#scaling numerical features
scaler=load('scaler.joblib')
scaled_train=scaler.transform(df[['score','ups','downs']].values)
df[['scoreScaled','upsScaled','downsScaled']]=scaled_train
print('\nScaling of Numerical features is Done!')
print('loading Universal Sentence Encoder...')
# using Universal Sentence Encoder
def embedding_using_universalSentenceEncoder(x_train,feat,embed,chunk_size=1):
''' this function takes dataframe ,feature name,universal sentence encoder
and creates the embedding of points in chunks and if any porblem comes while
processing chunk of points then processes one point at a time and then lastly
processes remaing points .'''
start=np.arange(0,len(x_train),chunk_size)[0] # start of set , this will be updated after processing of each set , end point of last set becomes start of next set
t1=embed([x_train[feat].iloc[0]]) # creating one embedding to add the set of embeddings to it to create matrix of embeddngs
for steps in tqdm(np.arange(0,len(x_train),chunk_size)[1:]):
### process in chunks
try:
t2=embed(x_train[feat].iloc[start:steps]) # embedding a set of points at a time to save time
start=steps # end point of last set becomes start of next set
t1=tf.concat([t1, t2], 0) # adding embeddins to main matrix of embeddings
#print('start:',start,'steps:',steps)
###process single point
except: # if any problem occurs while processing (embedding) set of points then embed them using one point at a time
for OneStep in range(start,steps): # iterating one step
t2=embed([x_train[feat].iloc[OneStep]])
t1=tf.concat([t1, t2], 0)
print('start of chunk:',start,'end of chunk:',steps)
start=steps #end point of last set becomes start of next set
###process remaining points after processing in chunks
# now for raming points
print('last step end:',steps)
if steps==len(x_train):
print(True)
else:
print('remaining points:',len(x_train)-steps)
for remainingSteps in tqdm(range(steps,len(x_train))):
t2=embed([x_train[feat].iloc[remainingSteps]])
t1=tf.concat([t1, t2], 0)
print('last point:',remainingSteps)
return t1
#embedding of comment
print('\nEmbedding of comment is bing processed...')
comment_embeddings=embedding_using_universalSentenceEncoder(df,'comment',embed)
#embdedding of paret_comment
print('\nEmbedding of parent_comment is bing processed...')
parent_comment_embeddings=embedding_using_universalSentenceEncoder(df,'parent_comment',embed,chunk_size=1)
print('\nEmbedding using universal sentence encoder is done!')
#creating a variable that is numpy array to use in train dataset creation
NumericalFeaturesTrain=df[['scoreScaled','upsScaled','downsScaled','author_response','subreddit_response']].values
#Creating Dataset
dataset = tf.data.Dataset.from_tensor_slices(({"InputComment":comment_embeddings[1:], # here i created one(1st) embedding to add the set of embeddings to it to create matrix of embeddngs
"InputParentComment":parent_comment_embeddings[1:],
"inputNumerical":NumericalFeaturesTrain}))
dataset = dataset.shuffle(2)
dataset = dataset.batch(10, drop_remainder=False)
dataset=dataset.prefetch(tf.data.AUTOTUNE)
print('\nCreated Dataset!')
#loading model
model = tf.keras.models.load_model('UniversalSentenceEncoderbest_model_13_8_2021SGD1.hdf5')
print('\nloaded model')
print('\npredicting...')
# predictions
return [np.argmax(i) for i in model.predict(dataset)]
if st.checkbox('uploaded all files '):
predictions=SarcasmDetection(df,embed)
print('Prediction is Done!')
dict1={1:'Sarcastic',0:'Non Sarcastic'}
labels=[]
for i in predictions:
labels.append(dict1[i])
st.subheader('Labels:-')
st.write(labels)
st.subheader('Prediction Done!')
|
from __future__ import absolute_import
from talon.quotations import register_xpath_extensions
try:
from talon import signature
ML_ENABLED = True
except ImportError:
ML_ENABLED = False
__version__ = "1.5.2"
def init():
register_xpath_extensions()
if ML_ENABLED:
signature.initialize()
|
import tensorflow as tf
class CustomLoss():
def __init__(self, class_names, class_weights):
self.class_names = class_names
self.class_weights = class_weights
def loss_fn(self, y_true,y_pred):
loss = 0
y_pred = tf.keras.activations.sigmoid(y_pred)
y_pred = tf.clip_by_value(y_pred, 0.0000001, 1-0.0000001)
for i in range(len(self.class_names)):
loss -= (self.class_weights[i][1]*tf.transpose(y_true)[i]*tf.math.log(tf.transpose(y_pred)[i]) + self.class_weights[i][0]*(1-tf.transpose(y_true)[i])*tf.math.log(1-tf.transpose(y_pred)[i]))
return loss/len(self.class_names)
|
# coding: utf-8
import json
import pytest
from mock import Mock
from boxsdk.object.legal_hold_policy import LegalHoldPolicy
from boxsdk.object.legal_hold_policy_assignment import LegalHoldPolicyAssignment
from boxsdk.config import API
from boxsdk.network.default_network import DefaultNetworkResponse
@pytest.fixture(scope='module')
def policy_id_1():
return 101
@pytest.fixture(scope='module')
def policy_id_2():
return 202
@pytest.fixture(scope='module')
def legal_hold_id_1():
return 101
@pytest.fixture(scope='module')
def legal_hold_id_2():
return 202
@pytest.fixture(scope='module')
def policies_response(policy_id_1, policy_id_2):
# pylint disable=redefined-outer-name
mock_network_response = Mock(DefaultNetworkResponse)
mock_network_response.json.return_value = {
'entries': [
{'type': 'legal_hold_policy', 'id': policy_id_1, 'name': 'Test Policy 1'},
{'type': 'legal_hold_policy', 'id': policy_id_2, 'name': 'Test Policy 2'}
],
'limit': 5,
}
return mock_network_response
@pytest.fixture(scope='module')
def legal_hold_response(legal_hold_id_1, legal_hold_id_2):
# pylint disable=redefined-outer-name
mock_network_response = Mock(DefaultNetworkResponse)
mock_network_response.json.return_value = {
'entries': [
{'type': 'legal_hold', 'id': legal_hold_id_1, 'name': 'Test Legal Hold 1'},
{'type': 'legal_hold', 'id': legal_hold_id_2, 'name': 'Test Legal Hold 2'}
],
'limit': 5,
}
return mock_network_response
def test_assign(test_legal_hold_policy, mock_box_session, test_file):
assignment_id = '12345'
assigned_at = '2016-05-18T17:38:03-07:00'
expected_url = '{0}/legal_hold_policy_assignments'.format(API.BASE_API_URL)
expected_body = {
'policy_id': test_legal_hold_policy.object_id,
'assign_to': {
'type': 'file',
'id': test_file.object_id
}
}
mock_box_session.post.return_value.json.return_value = {
'type': 'legal_hold_policy_assignment',
'id': assignment_id,
'assigned_at': assigned_at,
}
new_legal_hold_assignment = test_legal_hold_policy.assign(test_file)
mock_box_session.post.assert_called_once_with(expected_url, data=json.dumps(expected_body))
assert isinstance(new_legal_hold_assignment, LegalHoldPolicyAssignment)
assert new_legal_hold_assignment.assigned_at == assigned_at
assert new_legal_hold_assignment.id == assignment_id
def test_get(test_legal_hold_policy, mock_box_session):
created_at = '2016-05-18T17:38:03-07:00'
expected_url = '{0}/legal_hold_policies/{1}'.format(API.BASE_API_URL, test_legal_hold_policy.object_id)
mock_box_session.get.return_value.json.return_value = {
'type': 'legal_hold_policy',
'id': test_legal_hold_policy.object_id,
'created_at': created_at
}
legal_hold_policy = test_legal_hold_policy.get()
mock_box_session.get.assert_called_once_with(expected_url, headers=None, params=None)
assert isinstance(legal_hold_policy, LegalHoldPolicy)
assert legal_hold_policy.created_at == created_at
@pytest.fixture(scope='module')
def delete_policy_response():
# pylint:disable=redefined-outer-name
mock_network_response = Mock(DefaultNetworkResponse)
mock_network_response.ok = True
return mock_network_response
def test_delete_policy_return_the_correct_response(
test_legal_hold_policy,
mock_box_session,
delete_policy_response,
):
# pylint:disable=redefined-outer-name
mock_box_session.delete.return_value = delete_policy_response
response = test_legal_hold_policy.delete()
# pylint:disable=protected-access
expected_url = test_legal_hold_policy.get_url()
# pylint:enable = protected-access
mock_box_session.delete.assert_called_once_with(expected_url, params={}, expect_json_response=False, headers=None)
assert response is True
@pytest.mark.parametrize('assign_to_type, assign_to_id, params', [
(None, None, {}),
('file', None, {'assign_to_type': 'file'}),
('folder', '22222', {'assign_to_type': 'folder', 'assign_to_id': '22222'})
])
def test_get_assignments(
test_legal_hold_policy,
mock_box_session,
policies_response,
policy_id_1,
policy_id_2,
assign_to_type,
assign_to_id,
params,
):
# pylint:disable=redefined-outer-name
expected_url = '{0}/legal_hold_policy_assignments'.format(API.BASE_API_URL)
expected_params = {'policy_id': test_legal_hold_policy.object_id}
expected_params.update(params)
mock_box_session.get.return_value = policies_response
assignments = test_legal_hold_policy.get_assignments(assign_to_type=assign_to_type, assign_to_id=assign_to_id)
for assignment, expected_id in zip(assignments, [policy_id_1, policy_id_2]):
assert assignment.object_id == expected_id
# pylint:disable=protected-access
assert assignment._session == mock_box_session
mock_box_session.get.assert_called_once_with(expected_url, params=expected_params)
def test_get_file_version_legal_holds(
test_legal_hold_policy,
mock_box_session,
legal_hold_response,
legal_hold_id_1,
legal_hold_id_2
):
# pylint:disable=redefined-outer-name
mock_box_session.get.return_value = legal_hold_response
legal_holds = test_legal_hold_policy.get_file_version_legal_holds()
for legal_hold, expected_id in zip(legal_holds, [legal_hold_id_1, legal_hold_id_2]):
assert legal_hold.object_id == expected_id
# pylint:disable=protected-access
assert legal_hold._session == mock_box_session
|
// COPYRIGHT © 2021 Esri
//
// All rights reserved under the copyright laws of the United States
// and applicable international laws, treaties, and conventions.
//
// This material is licensed for use under the Esri Master License
// Agreement (MLA), and is bound by the terms of that agreement.
// You may redistribute and use this code without modification,
// provided you adhere to the terms of the MLA and include this
// copyright notice.
//
// See use restrictions at http://www.esri.com/legal/pdfs/mla_e204_e300/english
//
// For additional information, contact:
// Environmental Systems Research Institute, Inc.
// Attn: Contracts and Legal Services Department
// 380 New York Street
// Redlands, California, USA 92373
// USA
//
// email: contracts@esri.com
//
// See http://js.arcgis.com/3.37/esri/copyright.txt for details.
define({inputRegionRasterOrFeatures:"Kies regioraster of object",inputCostRaster:"Kies kostenraster (optioneel)",inputBarrierRasterOrFeatures:"Kies barrièreraster of object (optioneel)",distanceMethod:"Afstandmethode (optioneel)",connectionsWithinRegions:"Verbindingen binnen regio's (optioneel)",generateConnections:"Verbindingen genereren",noConnections:"Geen verbindingen",planar:"Planair",geoDesic:"Geodetisch",outputOptimalLinesName:"Resultaat optimale connectiviteitslijnen naam feature layer",outputNeighborConnectionsName:"Resultaat naam feature layer van de naburige verbindingen (optioneel)",outputLayerName:"Optimale regioverbindingen van ${layername}",drawSourcePointLayerName:"Bronpunten",itemDescription:"Feature layer gegenereerd door uitvoering van de Optimale regioverbindingen voor ${layername}. ",itemTags:"Analyseresultaat, Optimale regioverbindingen, ${layername} ${fieldname}",itemSnippet:"Feature layer gegenereerd door de Optimale regioverbindingen.",noValueMessage:"Deze waarde is vereist"});
|
description = "a recurrent scrapper of tweets related to 'elecciones 2020 peru'"
scrapingtime = "each 30 seconds"
about = "you can see the main repository here https://github.com/minskylab/pe2020ru"
endpoints = {
"base": "https://peru2020scrapper.minsky.cc",
"about": "/about",
"frequency_count": "/freqs",
"dataframes": []
}
|
/**
* @license
* Copyright 2021 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.u2.tag',
name: 'Button',
extends: 'foam.u2.View',
documentation: 'Basic button view. Should be extended to add functionality',
requires: [
'foam.net.HTTPRequest',
'foam.u2.ButtonSize',
'foam.u2.ButtonStyle',
'foam.u2.HTMLView',
'foam.u2.tag.CircleIndicator'
],
imports: [ 'theme?' ],
css: `
^ {
font: inherit;
align-items: center;
border: 1px solid transparent;
border-radius: 4px;
box-sizing: border-box;
display: inline-flex;
gap: 8px;
justify-content: center;
margin: 0;
outline: none;
text-align: center;
}
^iconAfter {
flex-direction: row-reverse;
}
^ + ^ {
margin-left: 8px;
}
^:hover:not(:disabled) {
cursor: pointer;
}
^unavailable {
display: none;
}
^ img {
vertical-align: middle;
}
^ svg {
width: 100%;
max-height: 100%;
vertical-align: middle;
}
^.material-icons {
cursor: pointer;
}
/* Unstyled */
^unstyled {
background: none;
border: none;
color: inherit;
}
/* Primary */
^primary, ^primary svg {
background-color: /*%PRIMARY3%*/ #406dea;
box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.06), 0px 1px 3px rgba(0, 0, 0, 0.1);
color: /*%WHITE%*/ white;
fill: /*%WHITE%*/ white;
}
^primary:hover:not(:disabled) {
background-color: /*%PRIMARY2%*/ #144794;
}
^primary:focus {
background-color: /*%PRIMARY2%*/ #144794;
border-color: /*%PRIMARY1%*/ #202341;
}
^primary:disabled {
background-color: /*%PRIMARY4%*/ #C6D2FF;
}
/* Primary destructive */
^primary-destructive,^primary-destructive svg {
background-color: /*%DESTRUCTIVE3%*/ #d9170e;
color: /*%WHITE%*/ white;
fill: /*%WHITE%*/ white;
}
^primary-destructive:hover:not(:disabled) {
background-color: /*%DESTRUCTIVE2%*/ #a61414;
}
^primary-destructive:focus {
background-color: /*%DESTRUCTIVE2%*/ #a61414;
border: 1px solid /*%DESTRUCTIVE1%*/ #631414;
box-shadow: inset 0px 2px 4px rgba(0, 0, 0, 0.06);
}
^primary-destructive:disabled {
background-color: /*%DESTRUCTIVE5%*/ #E5D2D0;
}
/* Secondary */
^secondary{
background-color: /*%WHITE%*/ white;
border: 1px solid /*%GREY3%*/ #B2B6BD;
color: /*%GREY1%*/ #494F59;
}
^secondary svg { fill: /*%GREY1%*/ #494F59; }
^secondary:hover:not(:disabled) {
background-color: /*%GREY5%*/ #B2B6BD;
}
^secondary:focus {
background-color: /*%GREY5%*/ #B2B6BD;
border: 1px solid /*%PRIMARY3%*/ #406DEA;
}
^secondary:disabled{
background-color: /*%GREY5%*/ #F5F7FA;
border-color: /*%GREY4%*/ #DADDE2;
color: /*%GREY4%*/ #DADDE2;
}
^secondary:disabled svg { fill: /*%GREY4%*/ #DADDE2; }
/* Secondary destructive */
^secondary-destructive{
background-color: white;
border: 1px solid /*%GREY3%*/ #B2B6BD;
color: /*%DESTRUCTIVE2%*/ #a61414;
}
^secondary-destructive svg { fill: /*%DESTRUCTIVE2%*/ #a61414; }
^secondary-destructive:hover {
background-color: /*%GREY5%*/ #B2B6BD;
}
^secondary-destructive:focus {
background-color: /*%GREY5%*/ #B2B6BD;
border-color: /*%DESTRUCTIVE2%*/ #a61414;
}
^secondary-destructive:disabled {
background-color: /*%GREY5%*/ #F5F7FA;
border-color: /*%GREY4%*/ #DADDE2;
color: /*%DESTRUCTIVE5%*/ #E5D2D0;
}
^secondary-destructive:disabled svg { fill: /*%DESTRUCTIVE5%*/ #E5D2D0; }
/* Tertiary */
^tertiary{
background: none;
border: 1px solid transparent;
color: /*%GREY1%*/ #5E6061;
}
^tertiary svg { fill: /*%GREY1%*/ #5E6061; }
^tertiary:hover:not(:disabled) {
background-color: /*%GREY5%*/ #F5F7FA;
}
^tertiary:focus,^tertiary:focus svg {
background-color: /*%GREY5%*/ #F5F7FA;
color: /*%PRIMARY3%*/ #494F59;
fill: /*%PRIMARY3%*/ #494F59;
}
^tertiary:disabled,^tertiary:disabled svg {
color: /*%GREY4%*/ #DADDE2;
fill: /*%GREY4%*/ #DADDE2;
}
/* Tertiary destructive */
^tertiary-destructive{
background-color: transparent;
border-color: transparent;
color: /*%DESTRUCTIVE3%*/ #D9170E;
}
^tertiary-destructive svg { fill: /*%DESTRUCTIVE3%*/ #D9170E; }
^tertiary-destructive:hover:not(:disabled) {
background-color: /*%GREY5%*/ #F5F7FA;
}
^tertiary-destructive:focus,^tertiary-destructive:focus svg {
background-color: /*%GREY5%*/ #F5F7FA;
color: /*%DESTRUCTIVE3%*/ #494F59;
fill: /*%DESTRUCTIVE3%*/ #494F59;
}
^tertiary-destructive:disabled,^tertiary-destructive:diabled svg {
color: /*%GREY4%*/ #DADDE2;
fill: /*%GREY4%*/ #DADDE2;
}
/* Link */
^link,^link svg {
background: none;
color: /*%GREY1%*/ #5E6061;
fill: /*%GREY1%*/ #5E6061;
}
^link:hover:not(:disabled),^link:hover svg {
text-decoration: underline;
color: /*%GREY2%*/ #6B778C;
fill: /*%GREY2%*/ #6B778C;
}
^link:focus,^link:focus svg {
color: /*%PRIMARY3%*/ #406DEA;
fill: /*%PRIMARY3%*/ #406DEA;
}
/* Sizes */
^small {
padding: 6px 10px;
}
^medium {
padding: 8px 12px;
max-height: 34px;
}
^large {
min-width: 100px;
padding: 12px 12px;
}
^iconOnly{
padding: 8px;
max-height: inherit;
}
^link^small,
^link^medium,
^link^large {
padding-left: 0;
padding-right: 0;
}
^link > .foam-u2-HTMLView{
height: 1em;
}
^svgIcon {
max-height: 100%;
max-width: 100%;
object-fit: contain;
}
^svgIcon svg {
height: 100%;
}
/* SVGs outside themeGlyphs may have their own heights and widths,
these ensure those are respected rather than imposing new dimensions */
^imgSVGIcon {
display: flex;
align-items: center;
justify-content: center;
}
^imgSVGIcon svg {
height: initial;
}
^small svg,
^small img {
width: 1.15em;
height: 1.15em;
}
^medium svg,
^medium img {
width: 1.71em;
height: 1.71em;
}
^large svg,
^large img {
width: 2.25em;
height: 2.25em;
}
^link svg, link img {
width: 1em;
height: 1em;
}
`,
properties: [
'name',
{
class: 'GlyphProperty',
name: 'themeIcon'
},
{
class: 'URL',
name: 'icon'
},
{
class: 'Boolean',
name: 'isIconAfter'
},
{
class: 'String',
name: 'iconFontFamily'
},
{
class: 'String',
name: 'iconFontClass'
},
{
class: 'String',
name: 'iconFontName'
},
[ 'nodeName', 'button' ],
{
name: 'label'
},
{
class: 'String',
name: 'ariaLabel'
},
{
class: 'Enum',
of: 'foam.u2.ButtonStyle',
name: 'buttonStyle',
value: 'SECONDARY'
},
{
class: 'Boolean',
name: 'isDestructive',
documentation: `
When set to true, this action should be styled in a way that indicates
that data is deleted in some way.
`,
factory: function() {
return false;
}
},
{
class: 'Enum',
of: 'foam.u2.ButtonSize',
name: 'size',
value: 'MEDIUM'
},
{
class: 'String',
name: 'styleClass_',
expression: function(isDestructive, buttonStyle) {
var s = buttonStyle.name.toLowerCase();
return isDestructive ? s + '-destructive' : s;
}
}
],
methods: [
function render() {
this.SUPER();
this.initCls();
this.on('click', this.click);
this.addContent();
this.attrs({ name: this.name || '', 'aria-label': this.ariaLabel });
this.addClass(this.slot(function(styleClass_) {
return this.myClass(styleClass_);
}));
this.addClass(this.myClass(this.size.label.toLowerCase()));
this.enableClass(this.myClass('iconOnly'), ! (this.contents || this.label));
this.enableClass(this.myClass('iconAfter'), this.isIconAfter$);
},
function initCls() {
this.addClass();
},
async function addContent() {
/** Add text or icon to button. **/
var self = this;
if ( ( this.themeIcon && this.theme ) ) {
this
.start({ class: 'foam.u2.tag.Image', glyph: this.themeIcon, role: 'presentation' })
.addClass(this.myClass('SVGIcon'))
.end();
} else if ( this.icon ) {
this
.start({ class: 'foam.u2.tag.Image', data: this.icon, role: 'presentation', embedSVG: true })
.addClasses([this.myClass('SVGIcon'), this.myClass('imgSVGIcon')])
.end();
} else if ( this.iconFontName ) {
this.nodeName = 'i';
this.addClass(this.action.name);
this.addClass(this.iconFontClass); // required by font package
this.attr(role, 'presentation')
this.style({ 'font-family': this.iconFontFamily });
this.add(this.iconFontName);
}
if ( this.label ) {
if ( foam.String.isInstance(this.label) ) {
if ( this.buttonStyle == 'LINK' || this.buttonStyle == 'UNSTYLED' ) {
this.start().addClass('p').add(this.label$).end();
} else {
this.start().addClass('h600').add(this.label$).end();
}
} else {
this.add(this.label$);
}
}
}
],
listeners: [
function click(e) {
// Implemented by subclasses
e.preventDefault();
e.stopPropagation();
}
]
});
|
# -*- coding: utf-8 -*-
# Author: Eric Larson <larson.eric.d@gmail.com>
#
# License: BSD (3-clause)
from distutils.version import LooseVersion
import gc
import os
import os.path as op
from pathlib import Path
import shutil
import sys
import warnings
import pytest
# For some unknown reason, on Travis-xenial there are segfaults caused on
# the line pytest -> pdb.Pdb.__init__ -> "import readline". Forcing an
# import here seems to prevent them (!?). This suggests a potential problem
# with some other library stepping on memory where it shouldn't. It only
# seems to happen on the Linux runs that install Mayavi. Anectodally,
# @larsoner has had problems a couple of years ago where a mayavi import
# seemed to corrupt SciPy linalg function results (!), likely due to the
# associated VTK import, so this could be another manifestation of that.
try:
import readline # noqa
except Exception:
pass
import numpy as np
import mne
from mne.datasets import testing
from mne.utils import _pl, _assert_no_instances
test_path = testing.data_path(download=False)
s_path = op.join(test_path, 'MEG', 'sample')
fname_evoked = op.join(s_path, 'sample_audvis_trunc-ave.fif')
fname_cov = op.join(s_path, 'sample_audvis_trunc-cov.fif')
fname_fwd = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
bem_path = op.join(test_path, 'subjects', 'sample', 'bem')
fname_bem = op.join(bem_path, 'sample-1280-bem.fif')
fname_aseg = op.join(test_path, 'subjects', 'sample', 'mri', 'aseg.mgz')
subjects_dir = op.join(test_path, 'subjects')
fname_src = op.join(bem_path, 'sample-oct-4-src.fif')
subjects_dir = op.join(test_path, 'subjects')
fname_cov = op.join(s_path, 'sample_audvis_trunc-cov.fif')
fname_trans = op.join(s_path, 'sample_audvis_trunc-trans.fif')
def pytest_configure(config):
"""Configure pytest options."""
# Markers
for marker in ('slowtest', 'ultraslowtest'):
config.addinivalue_line('markers', marker)
# Fixtures
for fixture in ('matplotlib_config',):
config.addinivalue_line('usefixtures', fixture)
# Warnings
# - Once SciPy updates not to have non-integer and non-tuple errors (1.2.0)
# we should remove them from here.
# - This list should also be considered alongside reset_warnings in
# doc/conf.py.
warning_lines = r"""
error::
ignore::ImportWarning
ignore:the matrix subclass:PendingDeprecationWarning
ignore:numpy.dtype size changed:RuntimeWarning
ignore:.*HasTraits.trait_.*:DeprecationWarning
ignore:.*takes no parameters:DeprecationWarning
ignore:joblib not installed:RuntimeWarning
ignore:Using a non-tuple sequence for multidimensional indexing:FutureWarning
ignore:using a non-integer number instead of an integer will result in an error:DeprecationWarning
ignore:Importing from numpy.testing.decorators is deprecated:DeprecationWarning
ignore:np.loads is deprecated, use pickle.loads instead:DeprecationWarning
ignore:The oldnumeric module will be dropped:DeprecationWarning
ignore:Collection picker None could not be converted to float:UserWarning
ignore:covariance is not positive-semidefinite:RuntimeWarning
ignore:Can only plot ICA components:RuntimeWarning
ignore:Matplotlib is building the font cache using fc-list:UserWarning
ignore:Using or importing the ABCs from 'collections':DeprecationWarning
ignore:`formatargspec` is deprecated:DeprecationWarning
# This is only necessary until sklearn updates their wheels for NumPy 1.16
ignore:numpy.ufunc size changed:RuntimeWarning
ignore:.*mne-realtime.*:DeprecationWarning
ignore:.*imp.*:DeprecationWarning
ignore:Exception creating Regex for oneOf.*:SyntaxWarning
ignore:scipy\.gradient is deprecated.*:DeprecationWarning
ignore:sklearn\.externals\.joblib is deprecated.*:FutureWarning
ignore:The sklearn.*module.*deprecated.*:FutureWarning
ignore:.*trait.*handler.*deprecated.*:DeprecationWarning
ignore:.*rich_compare.*metadata.*deprecated.*:DeprecationWarning
ignore:.*In future, it will be an error for 'np.bool_'.*:DeprecationWarning
ignore:.*`np.bool` is a deprecated alias.*:DeprecationWarning
ignore:.*`np.int` is a deprecated alias.*:DeprecationWarning
ignore:.*`np.float` is a deprecated alias.*:DeprecationWarning
ignore:.*`np.object` is a deprecated alias.*:DeprecationWarning
ignore:.*`np.long` is a deprecated alias:DeprecationWarning
ignore:.*Converting `np\.character` to a dtype is deprecated.*:DeprecationWarning
ignore:.*sphinx\.util\.smartypants is deprecated.*:
ignore:.*pandas\.util\.testing is deprecated.*:
ignore:.*tostring.*is deprecated.*:DeprecationWarning
ignore:.*QDesktopWidget\.availableGeometry.*:DeprecationWarning
ignore:Unable to enable faulthandler.*:UserWarning
always:.*get_data.* is deprecated in favor of.*:DeprecationWarning
always::ResourceWarning
""" # noqa: E501
for warning_line in warning_lines.split('\n'):
warning_line = warning_line.strip()
if warning_line and not warning_line.startswith('#'):
config.addinivalue_line('filterwarnings', warning_line)
# Have to be careful with autouse=True, but this is just an int comparison
# so it shouldn't really add appreciable overhead
@pytest.fixture(autouse=True)
def check_verbose(request):
"""Set to the default logging level to ensure it's tested properly."""
starting_level = mne.utils.logger.level
yield
# ensures that no tests break the global state
try:
assert mne.utils.logger.level == starting_level
except AssertionError:
pytest.fail('.'.join([request.module.__name__,
request.function.__name__]) +
' modifies logger.level')
@pytest.fixture(autouse=True)
def close_all():
"""Close all matplotlib plots, regardless of test status."""
# This adds < 1 µS in local testing, and we have ~2500 tests, so ~2 ms max
import matplotlib.pyplot as plt
yield
plt.close('all')
@pytest.fixture(scope='function')
def verbose_debug():
"""Run a test with debug verbosity."""
with mne.utils.use_log_level('debug'):
yield
@pytest.fixture(scope='session')
def matplotlib_config():
"""Configure matplotlib for viz tests."""
import matplotlib
from matplotlib import cbook
# Allow for easy interactive debugging with a call like:
#
# $ MNE_MPL_TESTING_BACKEND=Qt5Agg pytest mne/viz/tests/test_raw.py -k annotation -x --pdb # noqa: E501
#
try:
want = os.environ['MNE_MPL_TESTING_BACKEND']
except KeyError:
want = 'agg' # don't pop up windows
with warnings.catch_warnings(record=True): # ignore warning
warnings.filterwarnings('ignore')
matplotlib.use(want, force=True)
import matplotlib.pyplot as plt
assert plt.get_backend() == want
# overwrite some params that can horribly slow down tests that
# users might have changed locally (but should not otherwise affect
# functionality)
plt.ioff()
plt.rcParams['figure.dpi'] = 100
try:
from traits.etsconfig.api import ETSConfig
except Exception:
pass
else:
ETSConfig.toolkit = 'qt4'
# Make sure that we always reraise exceptions in handlers
orig = cbook.CallbackRegistry
class CallbackRegistryReraise(orig):
def __init__(self, exception_handler=None):
args = ()
if LooseVersion(matplotlib.__version__) >= LooseVersion('2.1'):
args += (exception_handler,)
super(CallbackRegistryReraise, self).__init__(*args)
cbook.CallbackRegistry = CallbackRegistryReraise
@pytest.fixture(scope='session')
def ci_macos():
"""Determine if running on MacOS CI."""
return (os.getenv('CI', 'false').lower() == 'true' and
sys.platform == 'darwin')
@pytest.fixture(scope='session')
def azure_windows():
"""Determine if running on Azure Windows."""
return (os.getenv('AZURE_CI_WINDOWS', 'false').lower() == 'true' and
sys.platform.startswith('win'))
@pytest.fixture()
def check_gui_ci(ci_macos, azure_windows):
"""Skip tests that are not reliable on CIs."""
if azure_windows or ci_macos:
pytest.skip('Skipping GUI tests on MacOS CIs and Azure Windows')
@pytest.fixture(scope='session', params=[testing._pytest_param()])
def _evoked():
# This one is session scoped, so be sure not to modify it (use evoked
# instead)
evoked = mne.read_evokeds(fname_evoked, condition='Left Auditory',
baseline=(None, 0))
evoked.crop(0, 0.2)
return evoked
@pytest.fixture()
def evoked(_evoked):
"""Get evoked data."""
return _evoked.copy()
@pytest.fixture(scope='function', params=[testing._pytest_param()])
def noise_cov():
"""Get a noise cov from the testing dataset."""
return mne.read_cov(fname_cov)
@pytest.fixture(scope='function')
def bias_params_free(evoked, noise_cov):
"""Provide inputs for free bias functions."""
fwd = mne.read_forward_solution(fname_fwd)
return _bias_params(evoked, noise_cov, fwd)
@pytest.fixture(scope='function')
def bias_params_fixed(evoked, noise_cov):
"""Provide inputs for fixed bias functions."""
fwd = mne.read_forward_solution(fname_fwd)
fwd = mne.convert_forward_solution(fwd, force_fixed=True, surf_ori=True)
return _bias_params(evoked, noise_cov, fwd)
def _bias_params(evoked, noise_cov, fwd):
evoked.pick_types(meg=True, eeg=True, exclude=())
# restrict to limited set of verts (small src here) and one hemi for speed
vertices = [fwd['src'][0]['vertno'].copy(), []]
stc = mne.SourceEstimate(np.zeros((sum(len(v) for v in vertices), 1)),
vertices, 0., 1.)
fwd = mne.forward.restrict_forward_to_stc(fwd, stc)
assert fwd['sol']['row_names'] == noise_cov['names']
assert noise_cov['names'] == evoked.ch_names
evoked = mne.EvokedArray(fwd['sol']['data'].copy(), evoked.info)
data_cov = noise_cov.copy()
data_cov['data'] = np.dot(fwd['sol']['data'], fwd['sol']['data'].T)
assert data_cov['data'].shape[0] == len(noise_cov['names'])
want = np.arange(fwd['sol']['data'].shape[1])
if not mne.forward.is_fixed_orient(fwd):
want //= 3
return evoked, fwd, noise_cov, data_cov, want
@pytest.fixture(scope="module", params=[
"mayavi",
"pyvista",
])
def backend_name(request):
"""Get the backend name."""
yield request.param
@pytest.yield_fixture
def renderer(backend_name, garbage_collect):
"""Yield the 3D backends."""
from mne.viz.backends.renderer import _use_test_3d_backend
_check_skip_backend(backend_name)
with _use_test_3d_backend(backend_name):
from mne.viz.backends import renderer
yield renderer
renderer.backend._close_all()
@pytest.yield_fixture
def garbage_collect():
"""Garbage collect on exit."""
yield
gc.collect()
@pytest.fixture(scope="module", params=[
"pyvista",
"mayavi",
])
def backend_name_interactive(request):
"""Get the backend name."""
yield request.param
@pytest.yield_fixture
def renderer_interactive(backend_name_interactive):
"""Yield the 3D backends."""
from mne.viz.backends.renderer import _use_test_3d_backend
_check_skip_backend(backend_name_interactive)
with _use_test_3d_backend(backend_name_interactive, interactive=True):
from mne.viz.backends import renderer
yield renderer
renderer.backend._close_all()
def _check_skip_backend(name):
from mne.viz.backends.tests._utils import (has_mayavi, has_pyvista,
has_pyqt5, has_imageio_ffmpeg)
if name == 'mayavi':
if not has_mayavi():
pytest.skip("Test skipped, requires mayavi.")
elif name == 'pyvista':
if not has_pyvista():
pytest.skip("Test skipped, requires pyvista.")
if not has_imageio_ffmpeg():
pytest.skip("Test skipped, requires imageio-ffmpeg")
if not has_pyqt5():
pytest.skip("Test skipped, requires PyQt5.")
@pytest.fixture()
def renderer_notebook():
"""Verify that pytest_notebook is installed."""
from mne.viz.backends import renderer
with renderer._use_test_3d_backend('notebook'):
yield renderer
@pytest.fixture(scope='session')
def pixel_ratio():
"""Get the pixel ratio."""
from mne.viz.backends.tests._utils import (has_mayavi, has_pyvista,
has_pyqt5)
if not (has_mayavi() or has_pyvista()) or not has_pyqt5():
return 1.
from PyQt5.QtWidgets import QApplication, QMainWindow
_ = QApplication.instance() or QApplication([])
window = QMainWindow()
ratio = float(window.devicePixelRatio())
window.close()
return ratio
@pytest.fixture(scope='function', params=[testing._pytest_param()])
def subjects_dir_tmp(tmpdir):
"""Copy MNE-testing-data subjects_dir to a temp dir for manipulation."""
for key in ('sample', 'fsaverage'):
shutil.copytree(op.join(subjects_dir, key), str(tmpdir.join(key)))
return str(tmpdir)
# Scoping these as session will make things faster, but need to make sure
# not to modify them in-place in the tests, so keep them private
@pytest.fixture(scope='session', params=[testing._pytest_param()])
def _evoked_cov_sphere(_evoked):
"""Compute a small evoked/cov/sphere combo for use with forwards."""
evoked = _evoked.copy().pick_types(meg=True)
evoked.pick_channels(evoked.ch_names[::4])
assert len(evoked.ch_names) == 77
cov = mne.read_cov(fname_cov)
sphere = mne.make_sphere_model('auto', 'auto', evoked.info)
return evoked, cov, sphere
@pytest.fixture(scope='session')
def _fwd_surf(_evoked_cov_sphere):
"""Compute a forward for a surface source space."""
evoked, cov, sphere = _evoked_cov_sphere
src_surf = mne.read_source_spaces(fname_src)
return mne.make_forward_solution(
evoked.info, fname_trans, src_surf, sphere, mindist=5.0)
@pytest.fixture(scope='session')
def _fwd_subvolume(_evoked_cov_sphere):
"""Compute a forward for a surface source space."""
pytest.importorskip('nibabel')
evoked, cov, sphere = _evoked_cov_sphere
volume_labels = ['Left-Cerebellum-Cortex', 'right-Cerebellum-Cortex']
with pytest.raises(ValueError,
match=r"Did you mean one of \['Right-Cere"):
mne.setup_volume_source_space(
'sample', pos=20., volume_label=volume_labels,
subjects_dir=subjects_dir)
volume_labels[1] = 'R' + volume_labels[1][1:]
src_vol = mne.setup_volume_source_space(
'sample', pos=20., volume_label=volume_labels,
subjects_dir=subjects_dir, add_interpolator=False)
return mne.make_forward_solution(
evoked.info, fname_trans, src_vol, sphere, mindist=5.0)
@pytest.fixture(scope='session')
def _all_src_types_fwd(_fwd_surf, _fwd_subvolume):
"""Create all three forward types (surf, vol, mixed)."""
fwds = dict(surface=_fwd_surf, volume=_fwd_subvolume)
with pytest.raises(RuntimeError,
match='Invalid source space with kinds'):
fwds['volume']['src'] + fwds['surface']['src']
# mixed (4)
fwd = fwds['surface'].copy()
f2 = fwds['volume']
for keys, axis in [(('source_rr',), 0),
(('source_nn',), 0),
(('sol', 'data'), 1),
(('_orig_sol',), 1)]:
a, b = fwd, f2
key = keys[0]
if len(keys) > 1:
a, b = a[key], b[key]
key = keys[1]
a[key] = np.concatenate([a[key], b[key]], axis=axis)
fwd['sol']['ncol'] = fwd['sol']['data'].shape[1]
fwd['nsource'] = fwd['sol']['ncol'] // 3
fwd['src'] = fwd['src'] + f2['src']
fwds['mixed'] = fwd
return fwds
@pytest.fixture(scope='session')
def _all_src_types_inv_evoked(_evoked_cov_sphere, _all_src_types_fwd):
"""Compute inverses for all source types."""
evoked, cov, _ = _evoked_cov_sphere
invs = dict()
for kind, fwd in _all_src_types_fwd.items():
assert fwd['src'].kind == kind
with pytest.warns(RuntimeWarning, match='has magnitude'):
invs[kind] = mne.minimum_norm.make_inverse_operator(
evoked.info, fwd, cov)
return invs, evoked
@pytest.fixture(scope='function')
def all_src_types_inv_evoked(_all_src_types_inv_evoked):
"""All source types of inverses, allowing for possible modification."""
invs, evoked = _all_src_types_inv_evoked
invs = {key: val.copy() for key, val in invs.items()}
evoked = evoked.copy()
return invs, evoked
@pytest.fixture(scope='function')
def mixed_fwd_cov_evoked(_evoked_cov_sphere, _all_src_types_fwd):
"""Compute inverses for all source types."""
evoked, cov, _ = _evoked_cov_sphere
return _all_src_types_fwd['mixed'].copy(), cov.copy(), evoked.copy()
@pytest.fixture(scope='session')
@pytest.mark.slowtest
@pytest.mark.parametrize(params=[testing._pytest_param()])
def src_volume_labels():
"""Create a 7mm source space with labels."""
pytest.importorskip('nibabel')
volume_labels = mne.get_volume_labels_from_aseg(fname_aseg)
src = mne.setup_volume_source_space(
'sample', 7., mri='aseg.mgz', volume_label=volume_labels,
add_interpolator=False, bem=fname_bem,
subjects_dir=subjects_dir)
lut, _ = mne.read_freesurfer_lut()
assert len(volume_labels) == 46
assert volume_labels[0] == 'Unknown'
assert lut['Unknown'] == 0 # it will be excluded during label gen
return src, tuple(volume_labels), lut
def _fail(*args, **kwargs):
raise AssertionError('Test should not download')
@pytest.fixture(scope='function')
def download_is_error(monkeypatch):
"""Prevent downloading by raising an error when it's attempted."""
monkeypatch.setattr(mne.utils.fetching, '_get_http', _fail)
@pytest.fixture()
def brain_gc(request):
"""Ensure that brain can be properly garbage collected."""
keys = ('renderer_interactive', 'renderer', 'renderer_notebook')
assert set(request.fixturenames) & set(keys) != set()
for key in keys:
if key in request.fixturenames:
is_pv = request.getfixturevalue(key)._get_3d_backend() == 'pyvista'
close_func = request.getfixturevalue(key).backend._close_all
break
if not is_pv:
yield
return
import pyvista
if LooseVersion(pyvista.__version__) <= LooseVersion('0.26.1'):
yield
return
from mne.viz import Brain
_assert_no_instances(Brain, 'before')
ignore = set(id(o) for o in gc.get_objects())
yield
close_func()
_assert_no_instances(Brain, 'after')
# We only check VTK for PyVista -- Mayavi/PySurfer is not as strict
objs = gc.get_objects()
bad = list()
for o in objs:
try:
name = o.__class__.__name__
except Exception: # old Python, probably
pass
else:
if name.startswith('vtk') and id(o) not in ignore:
bad.append(name)
del o
del objs, ignore, Brain
assert len(bad) == 0, 'VTK objects linger:\n' + '\n'.join(bad)
def pytest_sessionfinish(session, exitstatus):
"""Handle the end of the session."""
n = session.config.option.durations
if n is None:
return
print('\n')
try:
import pytest_harvest
except ImportError:
print('Module-level timings require pytest-harvest')
return
from py.io import TerminalWriter
# get the number to print
res = pytest_harvest.get_session_synthesis_dct(session)
files = dict()
for key, val in res.items():
parts = Path(key.split(':')[0]).parts
# split mne/tests/test_whatever.py into separate categories since these
# are essentially submodule-level tests. Keeping just [:3] works,
# except for mne/viz where we want level-4 granulatity
parts = parts[:4 if parts[:2] == ('mne', 'viz') else 3]
if not parts[-1].endswith('.py'):
parts = parts + ('',)
file_key = '/'.join(parts)
files[file_key] = files.get(file_key, 0) + val['pytest_duration_s']
files = sorted(list(files.items()), key=lambda x: x[1])[::-1]
# print
files = files[:n]
if len(files):
writer = TerminalWriter()
writer.line() # newline
writer.sep('=', f'slowest {n} test module{_pl(n)}')
names, timings = zip(*files)
timings = [f'{timing:0.2f}s total' for timing in timings]
rjust = max(len(timing) for timing in timings)
timings = [timing.rjust(rjust) for timing in timings]
for name, timing in zip(names, timings):
writer.line(f'{timing.ljust(15)}{name}')
|
# solve sudoku 4 by 4 using qp-solver
import numpy as np
from qpsolvers import solve_qp
# sudoku setup
sudoku_dim = 4
box_row_n = box_col_n = 2
# sudoku table initialization with 3d-tensor
# each entry of the table takes a value in between 0 and 1
tab = np.zeros((sudoku_dim, sudoku_dim, sudoku_dim))
# convert 3d to 1d
def conversion_3d_1d(row_i, col_i, num_i):
return sudoku_dim ** 2 * row_i + sudoku_dim * col_i + num_i
# coefficient matrix A for Ax = b
mat_A_dim = sudoku_dim ** 3
# construct matrix A as a list
mat_A = []
vec_b = []
for i in range(sudoku_dim):
for j in range(sudoku_dim):
row = [0] * mat_A_dim # initialize a row
for k in range(sudoku_dim):
row[sudoku_dim ** 2 * i + sudoku_dim * j + k] = 1. # coefficient from the constraint
mat_A += [row]
vec_b += [1.]
for i in range(sudoku_dim):
for k in range(sudoku_dim):
row = [0] * mat_A_dim # initialize a row
for j in range(sudoku_dim):
row[sudoku_dim ** 2 * i + sudoku_dim * j + k] = 1. # coefficient from the constraint
mat_A += [row]
vec_b += [1.]
for j in range(sudoku_dim):
for k in range(sudoku_dim):
row = [0] * mat_A_dim # initialize a row
for i in range(sudoku_dim):
row[sudoku_dim ** 2 * i + sudoku_dim * j + k] = 1. # coefficient from the constraint
mat_A += [row]
vec_b += [1.]
# box constraints
for ll in range(2):
for m in range(2):
for k in range(4):
row = [0] * mat_A_dim # initialize a row
for lp in range(2):
for mp in range(2):
ind = sudoku_dim ** 2 * (2 * ll + lp) + sudoku_dim * (2 * m + mp) + k
row[ind] = 1.
mat_A += [row]
vec_b += [1.]
# a list with given members [row_index, col_index, value]
given_nums = [[0, 0, 1],
[0, 1, 2],
[3, 2, 2],
[3, 3, 4]]
# Set up matrix P and q in the objective function
mat_P = np.zeros((mat_A_dim, mat_A_dim))
vec_q = np.zeros((mat_A_dim, 1))
for a_given_num in given_nums:
ind = int(sudoku_dim ** 2 * a_given_num[0] + sudoku_dim * a_given_num[1] + (a_given_num[2]-1))
mat_P[ind, ind] = 1.
vec_q[ind, 0] = -2.
# bounds
lb = np.zeros((mat_A_dim, 1))
ub = lb + 1
x = solve_qp(P=mat_P, q=vec_q, A = np.array(mat_A), b = np.array(vec_b), lb=lb, ub=ub)
print(f"QP solution: x = {x}")
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Version 2 of class Optimizer."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.training import distribution_strategy_context
from tensorflow.python.training import optimizer as optimizer_v1
from tensorflow.python.training import slot_creator
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.util import nest
class _OptimizableVariable(object):
"""Interface for abstracting over variables in the optimizers."""
@abc.abstractmethod
def target(self):
"""Returns the optimization target for this variable."""
raise NotImplementedError("Calling an abstract method.")
@abc.abstractmethod
def update_op(self, optimizer, g, *args):
"""Returns the update ops for updating the variable."""
raise NotImplementedError("Calling an abstract method.")
class _RefVariableProcessor(_OptimizableVariable):
"""Processor for Variable."""
def __init__(self, v):
self._v = v
def target(self):
return self._v._ref() # pylint: disable=protected-access
def update_op(self, optimizer, g, *args):
if isinstance(g, ops.Tensor):
update_op = optimizer._apply_dense(g, self._v, *args) # pylint: disable=protected-access
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
else:
assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a "
"tensor nor IndexedSlices.")
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
# pylint: disable=protected-access
return optimizer._apply_sparse_duplicate_indices(g, self._v, *args)
class _DenseReadResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g, *args):
# pylint: disable=protected-access
update_op = optimizer._resource_apply_dense(g, self._v.op.inputs[0], *args)
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
class _DenseResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g, *args):
# pylint: disable=protected-access
if isinstance(g, ops.IndexedSlices):
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
return optimizer._resource_apply_sparse_duplicate_indices(
g.values, self._v, g.indices, *args)
update_op = optimizer._resource_apply_dense(g, self._v, *args)
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
class _TensorProcessor(_OptimizableVariable):
"""Processor for ordinary Tensors.
Even though a Tensor can't really be updated, sometimes it is useful to
compute the gradients with respect to a Tensor using the optimizer. Updating
the Tensor is, of course, unsupported.
"""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g, *args):
raise NotImplementedError("Trying to update a Tensor ", self._v)
def _get_processor(v):
"""The processor of v."""
if context.executing_eagerly():
if isinstance(v, ops.Tensor):
return _TensorProcessor(v)
else:
return _DenseResourceVariableProcessor(v)
if v.op.type == "VarHandleOp":
return _DenseResourceVariableProcessor(v)
if isinstance(v, variables.Variable):
return _RefVariableProcessor(v)
if isinstance(v, ops.Tensor):
return _TensorProcessor(v)
raise NotImplementedError("Trying to optimize unsupported type ", v)
def _var_key_v2(var):
"""Key for representing a primary variable, for looking up slots."""
# pylint: disable=protected-access
if hasattr(var, "_distributed_container"):
distributed_container = var._distributed_container()
assert distributed_container is not None
if context.executing_eagerly():
return distributed_container._unique_id
return distributed_container._shared_name
if context.executing_eagerly():
return var._unique_id
return var.op.name
def _resolve(value, name):
if callable(value):
value = value()
return ops.convert_to_tensor(value, name=name)
def _is_dynamic(value):
"""Returns true if __init__ arg `value` should be re-evaluated each step."""
if callable(value): return True
# Don't need to do anything special in graph mode, since dynamic values
# will propagate correctly automatically.
# TODO(josh11b): Add per-device caching across steps using variables for
# truly static values once we add distributed support.
if context.executing_eagerly() and isinstance(
value, resource_variable_ops.ResourceVariable):
return True
return False
class _OptimizerV2State(object):
"""Holds per-graph and per-step optimizer state.
Use _init_with_static_hyper() to create the state for a graph, and then
_copy_with_dynamic_hyper() to convert that to state for a particular step.
The difference between the two is that the former only has hyper
parameter values that are static and the latter also has values that
can change every step (according to _is_dynamic()).
"""
def __init__(self, op_name):
self._op_name = op_name
def _init_with_static_hyper(self, hyper):
"""Initialize a fresh state object from hyper dict."""
# self._hyper contains a dict from name to a dict with the Tensor values.
# This dict starts with a single item with key "None" with the hyper
# parameter value converted to a Tensor. Other items have dtype keys
# with that Tensor cast to that dtype.
with ops.init_scope():
self._hyper = {name: {None: ops.convert_to_tensor(value, name=name)}
for name, (dynamic, value) in sorted(hyper.items())
if not dynamic}
self._slots = {}
self._non_slot_dict = {}
# Extra state to help Optimizers implement Checkpointable. Holds information
# about variables which will be restored as soon as they're created.
self._deferred_dependencies = {} # Non-slot variables
self._deferred_slot_restorations = {} # Slot variables
def _copy_with_dynamic_hyper(self, hyper, distribution, non_slot_devices):
"""Create a new state object for a particular step."""
ret = _OptimizerV2State(self._op_name)
# pylint: disable=protected-access
ret._slots = self._slots
ret._non_slot_dict = self._non_slot_dict
ret._deferred_dependencies = self._deferred_dependencies
ret._deferred_slot_restorations = self._deferred_slot_restorations
ret._hyper = {name: {None: _resolve(value, name)}
for name, (dynamic, value) in sorted(hyper.items())
if dynamic}
ret._hyper.update(self._hyper)
ret._non_slot_devices = non_slot_devices
ret._distribution = distribution
return ret
def _variables(self):
"""Returns a list of all variables held by self."""
optimizer_variables = list(self._non_slot_dict.values())
for variable_dict in self._slots.values():
for slot_for_variable in variable_dict.values():
optimizer_variables.append(slot_for_variable)
# Sort variables by name so that the return is deterministic.
return sorted(optimizer_variables, key=lambda v: v.name)
def _slot_dict(self, slot_name):
"""Returns a dict for caching slots created under the given name.
Args:
slot_name: Name for the slot.
Returns:
A dict that maps primary `Variable` objects to the slot created
for that variable, under the given slot name.
"""
named_slots = self._slots.get(slot_name, None)
if named_slots is None:
named_slots = {}
self._slots[slot_name] = named_slots
return named_slots
def create_slot(self, var, val, slot_name, optional_op_name=None):
"""Find or create a slot for a variable.
Args:
var: A `Variable` object.
val: A `Tensor`. The initial value of the slot.
slot_name: Name for the slot.
optional_op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
var_key = _var_key_v2(var)
if var_key not in named_slots:
new_slot_variable = slot_creator.create_slot(
var, val, optional_op_name or self._op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[var_key] = new_slot_variable
return named_slots[var_key]
def create_slot_with_initializer(self, var, initializer, shape, dtype,
slot_name, optional_op_name=None):
"""Find or create a slot for a variable, using an Initializer.
Args:
var: A `Variable` object.
initializer: An `Initializer`. The initial value of the slot.
shape: Shape of the initial value of the slot.
dtype: Type of the value of the slot.
slot_name: Name for the slot.
optional_op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
var_key = _var_key_v2(var)
if var_key not in named_slots:
new_slot_variable = slot_creator.create_slot_with_initializer(
var, initializer, shape, dtype, optional_op_name or self._op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[var_key] = new_slot_variable
return named_slots[var_key]
def zeros_slot(self, var, slot_name, optional_op_name=None):
"""Find or create a slot initialized with 0.0.
Args:
var: A `Variable` object.
slot_name: Name for the slot.
optional_op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
var_key = _var_key_v2(var)
if var_key not in named_slots:
new_slot_variable = slot_creator.create_zeros_slot(
var, optional_op_name or self._op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[var_key] = new_slot_variable
return named_slots[var_key]
def _create_or_restore_slot_variable(
self, slot_variable_position, slot_name, variable,
optional_op_name=None):
"""Restore a slot variable's value, possibly creating it.
Called when a variable which has an associated slot variable is created or
restored. When executing eagerly, we create the slot variable with a
restoring initializer.
No new variables are created when graph building. Instead,
_restore_slot_variable catches these after normal creation and adds restore
ops to the graph. This method is nonetheless important when graph building
for the case when a slot variable has already been created but `variable`
has just been added to a dependency graph (causing us to realize that the
slot variable needs to be restored).
Args:
slot_variable_position: A `checkpointable._CheckpointPosition` object
indicating the slot variable `Checkpointable` object to be restored.
slot_name: The name of this `Optimizer`'s slot to restore into.
variable: The variable object this slot is being created for.
optional_op_name: Name to use when scoping the Variable that
needs to be created for the slot.
"""
slot_variable = self.get_slot(var=variable, name=slot_name)
if (slot_variable is None and context.executing_eagerly() and
slot_variable_position.is_simple_variable()
# Defer slot variable creation if there is an active variable creator
# scope. Generally we'd like to eagerly create/restore slot variables
# when possible, but this may mean that scopes intended to catch
# `variable` also catch its eagerly created slot variable
# unintentionally (specifically make_template would add a dependency on
# a slot variable if not for this case). Deferring is mostly harmless
# (aside from double initialization), and makes variable creator scopes
# behave the same way they do when graph building.
and not ops.get_default_graph()._variable_creator_stack): # pylint: disable=protected-access
initializer = checkpointable.CheckpointInitialValue(
checkpoint_position=slot_variable_position)
slot_variable = self.create_slot(
var=variable,
val=initializer,
slot_name=slot_name,
optional_op_name=optional_op_name)
# Optimizers do not have unconditional dependencies on their slot
# variables (nor do any other objects). They are only saved if the
# variables they were created for are also saved.
if slot_variable is not None:
# If we've either made this slot variable, or if we've pulled out an
# existing slot variable, we should restore it.
slot_variable_position.restore(slot_variable)
else:
# We didn't make the slot variable. Defer restoring until it gets created
# normally. We keep a list rather than the one with the highest restore
# UID in case slot variables have their own dependencies, in which case
# those could differ between restores.
variable_key = _var_key_v2(variable)
self._deferred_slot_restorations.setdefault(
slot_name, {}).setdefault(variable_key, []).append(
slot_variable_position)
def get_slot(self, var, name):
"""Return a slot named `name` created for `var` by the Optimizer.
Some `Optimizer` subclasses use additional variables. For example
`Momentum` and `Adagrad` use variables to accumulate updates. This method
gives access to these `Variable` objects if for some reason you need them.
Use `get_slot_names()` to get the list of slot names created by the
`Optimizer`.
Args:
var: A variable passed to `minimize()` or `apply_gradients()`.
name: A string.
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
named_slots = self._slots.get(name, None)
if not named_slots:
return None
return named_slots.get(_var_key_v2(var), None)
def get_slot_names(self):
"""Return a list of the names of slots created by the `Optimizer`.
See `get_slot()`.
Returns:
A list of strings.
"""
return sorted(self._slots.keys())
def create_non_slot(self, initial_value, name, colocate_with=None):
"""Add an extra variable, not associated with a slot."""
v = self._non_slot_dict.get(name, None)
if v is None:
if colocate_with is None: colocate_with = self._non_slot_devices
with self._distribution.colocate_vars_with(colocate_with):
# TODO(josh11b): Use get_variable() except for the legacy Adam use case.
v = variable_scope.variable(initial_value, name=name, trainable=False)
self._non_slot_dict[name] = v
deferred_dependencies_list = self._deferred_dependencies.pop(name, ())
for checkpoint_position in sorted(
deferred_dependencies_list,
key=lambda restore: restore.checkpoint.restore_uid,
reverse=True):
checkpoint_position.restore(v)
return v
def _restore_slot_variable(self, slot_name, variable, slot_variable):
"""Restore a newly created slot variable's value."""
variable_key = _var_key_v2(variable)
deferred_restorations = self._deferred_slot_restorations.get(
slot_name, {}).pop(variable_key, [])
# Iterate over restores, highest restore UID first to minimize the number
# of assignments.
deferred_restorations.sort(key=lambda position: position.restore_uid,
reverse=True)
for checkpoint_position in deferred_restorations:
checkpoint_position.restore(slot_variable)
def get_non_slot(self, name):
"""Returns the non-slot variable identified by `name`."""
return self._non_slot_dict.get(name, None)
def get_hyper(self, name, dtype=None):
"""Returns the `name` hyper parameter, optionally cast to `dtype`."""
dtype_dict = self._hyper[name]
# Do we have the value cast to dtype already cached? This should always
# succeed when dtype is None.
if dtype in dtype_dict:
return dtype_dict[dtype]
# Not cached, cast to dtype and save the result in the cache.
result = math_ops.cast(dtype_dict[None], dtype)
dtype_dict[dtype] = result
return result
class OptimizerV2(optimizer_v1.Optimizer):
"""Updated base class for optimizers.
This class defines the API to add Ops to train a model. You never use this
class directly, but instead instantiate one of its subclasses such as
`GradientDescentOptimizer`, `AdagradOptimizer`, or `MomentumOptimizer`.
### Usage
```python
# Create an optimizer with the desired parameters.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Add Ops to the graph to minimize a cost by updating a list of variables.
# "cost" is a Tensor, and the list of variables contains tf.Variable
# objects.
opt_op = opt.minimize(cost, var_list=<list of variables>)
```
In the training program you will just have to run the returned Op.
```python
# Execute opt_op to do one step of training:
opt_op.run()
```
### Processing gradients before applying them.
Calling `minimize()` takes care of both computing the gradients and
applying them to the variables. If you want to process the gradients
before applying them you can instead use the optimizer in three steps:
1. Compute the gradients with `compute_gradients()`.
2. Process the gradients as you wish.
3. Apply the processed gradients with `apply_gradients()`.
Example:
```python
# Create an optimizer.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Compute the gradients for a list of variables.
grads_and_vars = opt.compute_gradients(loss, <list of variables>)
# grads_and_vars is a list of tuples (gradient, variable). Do whatever you
# need to the 'gradient' part, for example cap them, etc.
capped_grads_and_vars = [(MyCapper(gv[0]), gv[1]) for gv in grads_and_vars]
# Ask the optimizer to apply the capped gradients.
opt.apply_gradients(capped_grads_and_vars)
```
### Gating Gradients
Both `minimize()` and `compute_gradients()` accept a `gate_gradients`
argument that controls the degree of parallelism during the application of
the gradients.
The possible values are: `GATE_NONE`, `GATE_OP`, and `GATE_GRAPH`.
<b>`GATE_NONE`</b>: Compute and apply gradients in parallel. This provides
the maximum parallelism in execution, at the cost of some non-reproducibility
in the results. For example the two gradients of `matmul` depend on the input
values: With `GATE_NONE` one of the gradients could be applied to one of the
inputs _before_ the other gradient is computed resulting in non-reproducible
results.
<b>`GATE_OP`</b>: For each Op, make sure all gradients are computed before
they are used. This prevents race conditions for Ops that generate gradients
for multiple inputs where the gradients depend on the inputs.
<b>`GATE_GRAPH`</b>: Make sure all gradients for all variables are computed
before any one of them is used. This provides the least parallelism but can
be useful if you want to process all gradients before applying any of them.
### Slots
Some optimizer subclasses, such as `MomentumOptimizer` and `AdagradOptimizer`
allocate and manage additional variables associated with the variables to
train. These are called <i>Slots</i>. Slots have names and you can ask the
optimizer for the names of the slots that it uses. Once you have a slot name
you can ask the optimizer for the variable it created to hold the slot value.
This can be useful if you want to log debug a training algorithm, report stats
about the slots, etc.
### Non-slot variables
Some optimizer subclasses, such as `AdamOptimizer` have variables that
are not associated with the variables to train, just the step itself.
### Hyper parameters
These are arguments passed to the optimizer subclass constructor
(the `__init__` method), and then passed to `self._set_hyper()`.
They can be either regular Python values (like 1.0), tensors, or
callables. If they are callable, the callable will be called during
`apply_gradients()` to get the value for the hyper parameter.
### State
Internal methods are passed a `state` argument with the correct
values to use for the slot and non-slot variables, and the hyper
parameters.
"""
# Values for gate_gradients.
GATE_NONE = 0
GATE_OP = 1
GATE_GRAPH = 2
def __init__(self, name):
"""Create a new Optimizer.
This must be called by the constructors of subclasses.
Note that Optimizer instances should not bind to a single graph,
and so shouldn't keep Tensors as member variables. Generally
you should be able to use the _set_hyper()/state.get_hyper()
facility instead.
Args:
name: A non-empty string. The name to use for accumulators created
for the optimizer.
Raises:
ValueError: If name is malformed.
RuntimeError: If _create_slots has been overridden instead of
_create_vars.
"""
# Note: We intentionally don't call parent __init__.
# Optimizer._create_slots was replaced by _create_vars in OptimizerV2.
if (self.__class__._create_slots.__code__ is not # pylint: disable=protected-access
OptimizerV2._create_slots.__code__):
raise RuntimeError("Override _create_vars instead of _create_slots when "
"descending from OptimizerV2 (class %s)" %
self.__class__.__name__)
if not name:
raise ValueError("Must specify the optimizer name")
self._use_locking = False
self._name = name
# Map from graph_key to state for that graph. We use the graph_key
# since it works in both eager and graph mode, and gives the outer
# graph inside functions.
tower_context = distribution_strategy_context.get_tower_context()
if tower_context is None:
# In a cross-tower context for a DistributionStrategy, which means
# only one Optimizer will be created, not one per tower.
self._per_graph_state = {}
else:
# We use get_tower_context().merge_call() to get a single dict
# shared across all model replicas when running with a
# DistributionStrategy.
self._per_graph_state = tower_context.merge_call(lambda _: {})
# Hyper parameters, and whether they should be re-evaluated every step.
self._hyper = {}
def _set_hyper(self, name, value):
self._hyper[name] = (_is_dynamic(value), value)
def minimize(self, loss, global_step=None, var_list=None,
gate_gradients=GATE_OP, aggregation_method=None,
colocate_gradients_with_ops=False, name=None,
grad_loss=None, stop_gradients=None,
scale_loss_by_num_towers=None):
"""Add operations to minimize `loss` by updating `var_list`.
This method simply combines calls `compute_gradients()` and
`apply_gradients()`. If you want to process the gradient before applying
them call `compute_gradients()` and `apply_gradients()` explicitly instead
of using this function.
Args:
loss: A `Tensor` containing the value to minimize.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
var_list: Optional list or tuple of `Variable` objects to update to
minimize `loss`. Defaults to the list of variables collected in
the graph under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
name: Optional name for the returned operation.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
stop_gradients: Optional. A Tensor or list of tensors not to differentiate
through.
scale_loss_by_num_towers: Optional boolean. If true, scale the loss
down by the number of towers. By default, auto-detects whether this
is needed.
Returns:
An Operation that updates the variables in `var_list`. If `global_step`
was not `None`, that operation also increments `global_step`.
Raises:
ValueError: If some of the variables are not `Variable` objects.
@compatibility(eager)
When eager execution is enabled, `loss` should be a Python function that
takes elements of `var_list` as arguments and computes the value to be
minimized. If `var_list` is None, `loss` should take no arguments.
Minimization (and gradient computation) is done with respect to the
elements of `var_list` if not None, else with respect to any trainable
variables created during the execution of the `loss` function.
`gate_gradients`, `aggregation_method`, `colocate_gradients_with_ops` and
`grad_loss` are ignored when eager execution is enabled.
@end_compatibility
"""
grads_and_vars = self.compute_gradients(
loss, var_list=var_list, gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss, stop_gradients=stop_gradients,
scale_loss_by_num_towers=scale_loss_by_num_towers)
vars_with_grad = [v for g, v in grads_and_vars if g is not None]
if not vars_with_grad:
raise ValueError(
"No gradients provided for any variable, check your graph for ops"
" that do not support gradients, between variables %s and loss %s." %
([str(v) for _, v in grads_and_vars], loss))
return self.apply_gradients(grads_and_vars, global_step=global_step,
name=name)
def compute_gradients(self, loss, var_list=None,
gate_gradients=GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
grad_loss=None, stop_gradients=None,
scale_loss_by_num_towers=None):
"""Compute gradients of `loss` for the variables in `var_list`.
This is the first part of `minimize()`. It returns a list
of (gradient, variable) pairs where "gradient" is the gradient
for "variable". Note that "gradient" can be a `Tensor`, an
`IndexedSlices`, or `None` if there is no gradient for the
given variable.
Args:
loss: A Tensor containing the value to minimize or a callable taking
no arguments which returns the value to minimize. When eager execution
is enabled it must be a callable.
var_list: Optional list or tuple of `tf.Variable` to update to minimize
`loss`. Defaults to the list of variables collected in the graph
under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
stop_gradients: Optional. A Tensor or list of tensors not to differentiate
through.
scale_loss_by_num_towers: Optional boolean. If true, scale the loss
down by the number of towers. By default, auto-detects whether this
is needed.
Returns:
A list of (gradient, variable) pairs. Variable is always present, but
gradient can be `None`.
Raises:
TypeError: If `var_list` contains anything else than `Variable` objects.
ValueError: If some arguments are invalid.
RuntimeError: If called with eager execution enabled and `loss` is
not callable.
@compatibility(eager)
When eager execution is enabled, `gate_gradients`, `aggregation_method`,
and `colocate_gradients_with_ops` are ignored.
@end_compatibility
"""
# TODO(josh11b): Test that we handle weight decay in a reasonable way.
if callable(loss):
with backprop.GradientTape() as tape:
if var_list is not None:
tape.watch(var_list)
loss_value = loss()
# Scale loss for number of towers (callable-loss case). In this case,
# we have to be careful to call distribute_lib.get_loss_reduction()
# *after* loss() is evaluated, so we know what loss reduction it uses.
if scale_loss_by_num_towers is None:
scale_loss_by_num_towers = (
distribute_lib.get_loss_reduction() ==
variable_scope.VariableAggregation.MEAN)
if scale_loss_by_num_towers:
num_towers = distribution_strategy_context.get_distribution_strategy(
).num_towers
if num_towers > 1:
loss_value *= 1. / num_towers
if var_list is None:
var_list = tape.watched_variables()
grads = tape.gradient(loss_value, var_list, grad_loss)
return list(zip(grads, var_list))
if context.executing_eagerly():
raise RuntimeError(
"`loss` passed to Optimizer.compute_gradients should "
"be a function when eager execution is enabled.")
# Scale loss for number of towers (non-callable-loss case).
if scale_loss_by_num_towers is None:
scale_loss_by_num_towers = (
distribute_lib.get_loss_reduction() ==
variable_scope.VariableAggregation.MEAN)
if scale_loss_by_num_towers:
num_towers = distribution_strategy_context.get_distribution_strategy(
).num_towers
if num_towers > 1:
loss *= 1. / num_towers
if gate_gradients not in [optimizer_v1.Optimizer.GATE_NONE,
optimizer_v1.Optimizer.GATE_OP,
optimizer_v1.Optimizer.GATE_GRAPH]:
raise ValueError("gate_gradients must be one of: Optimizer.GATE_NONE, "
"Optimizer.GATE_OP, Optimizer.GATE_GRAPH. Not %s" %
gate_gradients)
self._assert_valid_dtypes([loss])
if grad_loss is not None:
self._assert_valid_dtypes([grad_loss])
if var_list is None:
var_list = (
variables.trainable_variables() +
ops.get_collection(ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES))
else:
var_list = nest.flatten(var_list)
# pylint: disable=protected-access
var_list += ops.get_collection(ops.GraphKeys._STREAMING_MODEL_PORTS)
# pylint: enable=protected-access
processors = [_get_processor(v) for v in var_list]
if not var_list:
raise ValueError("No variables to optimize.")
var_refs = [p.target() for p in processors]
grads = gradients.gradients(
loss, var_refs, grad_ys=grad_loss,
gate_gradients=(gate_gradients == optimizer_v1.Optimizer.GATE_OP),
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
stop_gradients=stop_gradients)
if gate_gradients == optimizer_v1.Optimizer.GATE_GRAPH:
grads = control_flow_ops.tuple(grads)
grads_and_vars = list(zip(grads, var_list))
self._assert_valid_dtypes(
[v for g, v in grads_and_vars
if g is not None and v.dtype != dtypes.resource])
return grads_and_vars
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
This is the second part of `minimize()`. It returns an `Operation` that
applies gradients.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
`compute_gradients()`.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the `Optimizer` constructor.
Returns:
An `Operation` that applies the specified gradients. If `global_step`
was not None, that operation also increments `global_step`.
Raises:
TypeError: If `grads_and_vars` is malformed.
ValueError: If none of the variables have gradients.
"""
# This is a default implementation of apply_gradients() that can be shared
# by most optimizers. It relies on the subclass implementing the following
# methods: _create_vars(), _prepare(), _apply_dense(), and _apply_sparse().
# Filter out variables with gradients of `None`.
grads_and_vars = tuple(grads_and_vars) # Make sure repeat iteration works.
if not grads_and_vars:
raise ValueError("No variables provided.")
filtered = tuple((g, v) for (g, v) in grads_and_vars if g is not None)
if not filtered:
raise ValueError("No gradients provided for any variable: %s." %
([str(v) for _, v in grads_and_vars],))
return distribution_strategy_context.get_tower_context().merge_call(
self._distributed_apply, filtered, global_step=global_step, name=name)
def _get_or_create_state(self, var_list=None):
"""Either looks up or creates `_OptimizerV2State`.
If any variables are available, they should be passed via the `var_list`
argument, and these will be used to determine the graph to create/retrieve
state for. Otherwise the returned state is for the current default graph.
Args:
var_list: A list of variables to extract a graph from.
Returns:
An `_OptimizerV2State` object.
"""
# Determine the graph_key from the current graph.
eager_execution = context.executing_eagerly()
if eager_execution or var_list is None:
graph = ops.get_default_graph()
else:
graph = ops._get_graph_from_inputs(var_list) # pylint: disable=protected-access
assert graph is not None
graph_key = graph._graph_key # pylint: disable=protected-access
# Get the per graph state by looking up the graph_key.
if graph_key in self._per_graph_state:
per_graph_state = self._per_graph_state[graph_key]
else:
per_graph_state = _OptimizerV2State(self._name)
per_graph_state._init_with_static_hyper(self._hyper) # pylint: disable=protected-access
self._per_graph_state[graph_key] = per_graph_state
return per_graph_state
def _distributed_apply(self, distribution, grads_and_vars, global_step, name):
"""`apply_gradients` for use with a `DistributionStrategy`."""
reduced_grads = distribution.batch_reduce(
variable_scope.VariableAggregation.SUM, grads_and_vars)
var_list = [v for _, v in grads_and_vars]
grads_and_vars = zip(reduced_grads, var_list)
unwrapped_var_list = [x for v in var_list for x in distribution.unwrap(v)]
eager_execution = context.executing_eagerly()
if eager_execution:
# Give a clear error in this case instead of "name not supported
# for Eager Tensors" when we compute non_slot_devices.
for v in unwrapped_var_list:
if isinstance(v, ops.Tensor):
raise NotImplementedError("Trying to update a Tensor ", v)
with ops.name_scope(name, self._name) as name:
per_graph_state = self._get_or_create_state(var_list=unwrapped_var_list)
# Include the current value of any dynamic hyper parameters in `state`.
non_slot_devices = distribution.non_slot_devices(var_list)
state = per_graph_state._copy_with_dynamic_hyper( # pylint: disable=protected-access
self._hyper, distribution, non_slot_devices)
# Create any slot and non-slot variables we need in `state`.
with ops.init_scope():
self._create_vars(var_list, state)
with ops.name_scope(name): # Re-enter name_scope created above
# Give the child class a chance to do something before we start
# applying gradients.
self._prepare(state)
def update(v, g):
"""Update variable `v` using gradient `g`."""
assert v is not None
# Convert the grad to Tensor or IndexedSlices if necessary, and
# look up a processor for each variable's type.
try:
g = ops.convert_to_tensor_or_indexed_slices(g)
except TypeError:
raise TypeError(
"Gradient must be convertible to a Tensor"
" or IndexedSlices, or None: %s" % g)
if not isinstance(g, (ops.Tensor, ops.IndexedSlices)):
raise TypeError(
"Gradient must be a Tensor, IndexedSlices, or None: %s" % g)
processor = _get_processor(v)
# We colocate all ops created in _apply_dense or _apply_sparse
# on the same device as the variable.
# TODO(apassos): figure out how to get the variable name here.
scope_name = "" if eager_execution else v.op.name
# device_policy is set because non-mirrored tensors will be read in
# `update_op`.
# TODO(josh11b): Make different state objects for each device to
# avoid needing to set the device_policy.
with ops.name_scope("update_" + scope_name), \
context.context().device_policy(context.DEVICE_PLACEMENT_SILENT):
return processor.update_op(self, g, state)
# Use the processors to update the variables.
update_ops = []
for grad, var in grads_and_vars:
update_ops.extend(distribution.update(var, update, grad, grouped=False))
# Give the child class a chance to do something after applying
# gradients
def finish():
# TODO(josh11b): Make different state objects for each device to
# avoid needing to set the device_policy.
with context.context().device_policy(context.DEVICE_PLACEMENT_SILENT):
return self._finish(state)
update_ops = control_flow_ops.group(update_ops)
with ops.control_dependencies([update_ops]):
finish_updates = distribution.update_non_slot(
non_slot_devices, finish, grouped=False)
# We said grouped=False, which means finish_updates is always a list.
# It will be [None] when finish() returns None.
if finish_updates == [None]:
finish_updates = [update_ops]
# Update `global_step` (if any).
if global_step is None:
apply_updates = distribution.group(finish_updates, name=name)
else:
with ops.control_dependencies(finish_updates):
def update_global_step(global_step, name):
return global_step.assign_add(1, read_value=False, name=name)
apply_updates = distribution.update(global_step, update_global_step,
name)
# Add the training op to the TRAIN_OP graph collection in graph mode.
if not eager_execution:
if isinstance(apply_updates, ops.Tensor):
apply_updates = apply_updates.op
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if apply_updates not in train_op:
train_op.append(apply_updates)
return apply_updates
def get_slot(self, var, name):
"""Return a slot named `name` created for `var` by the Optimizer.
Some `Optimizer` subclasses use additional variables. For example
`Momentum` and `Adagrad` use variables to accumulate updates. This method
gives access to these `Variable` objects if for some reason you need them.
Use `get_slot_names()` to get the list of slot names created by the
`Optimizer`.
Args:
var: A variable passed to `minimize()` or `apply_gradients()`.
name: A string.
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
state = self._get_state_for_var(var)
return state.get_slot(var, name) if state is not None else None
def get_slot_names(self):
"""Return a list of the names of slots created by the `Optimizer`.
See `get_slot()`.
Returns:
A list of strings.
"""
state = self._get_per_graph_state()
return state.get_slot_names() if state is not None else []
def variables(self):
"""A list of variables which encode the current state of `Optimizer`.
Includes slot variables and additional global variables created by the
optimizer in the current default graph.
Returns:
A list of variables.
"""
state = self._get_per_graph_state()
return state._variables() if state is not None else [] # pylint: disable=protected-access
# --------------
# Methods to be implemented by subclasses if they want to use the
# inherited implementation of apply_gradients() or compute_gradients().
# --------------
def _create_vars(self, var_list, state):
"""Create all slots needed by the variables and any non-slot variables.
Args:
var_list: A list of `Variable` objects.
state: An object with these methods:
`create_slot(var, val, slot_name, optional_op_name)`,
`create_slot_with_initializer(`
`var, initializer, shape, dtype, slot_name, optional_op_name)`,
`zeros_slot(var, slot_name, optional_op_name)`,
`create_non_slot_variable(initial_value, name, colocate_with)`,
`get_hyper(name)`
"""
# No slots needed by default
pass
def _prepare(self, state):
"""Code to execute before applying gradients.
Note that most uses of _prepare() in Optimizer have been subsumed
by explicit support for hyper parameters in OptimizerV2
Args:
state: An object with a `get_hyper(name)` method.
Returns:
Return value will be ignored.
"""
pass
def _apply_dense(self, grad, var, state):
"""Add ops to apply dense gradients to `var`.
Args:
grad: A `Tensor`.
var: A `Variable` object.
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
An `Operation`.
"""
raise NotImplementedError()
def _resource_apply_dense(self, grad, handle, state):
"""Add ops to apply dense gradients to the variable `handle`.
Args:
grad: a `Tensor` representing the gradient.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _resource_apply_sparse_duplicate_indices(
self, grad, handle, indices, state):
"""Add ops to apply sparse gradients to `handle`, with repeated indices.
Optimizers which override this method must deal with repeated indices. See
the docstring of `_apply_sparse_duplicate_indices` for details. By default
the correct behavior, to sum non-unique indices and their associated
gradients, is enforced by first pre-processing `grad` and `indices` and
passing them on to `_resource_apply_sparse`. Optimizers which deal correctly
with duplicate indices may instead override this method to avoid the
overhead of summing.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices may be repeated.
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
An `Operation` which updates the value of the variable.
"""
# pylint: disable=protected-access
summed_grad, unique_indices = optimizer_v1._deduplicate_indexed_slices(
values=grad, indices=indices)
# pylint: enable=protected-access
return self._resource_apply_sparse(
summed_grad, handle, unique_indices, state)
def _resource_apply_sparse(self, grad, handle, indices, state):
"""Add ops to apply sparse gradients to the variable `handle`.
Similar to `_apply_sparse`, the `indices` argument to this method has been
de-duplicated. Optimizers which deal correctly with non-unique indices may
instead override `_resource_apply_sparse_duplicate_indices` to avoid this
overhead.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices are unique.
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _apply_sparse_duplicate_indices(self, grad, var, state):
"""Add ops to apply sparse gradients to `var`, with repeated sparse indices.
Optimizers which override this method must deal with IndexedSlices objects
such as the following:
IndexedSlicesValue(values=[1, 1], indices=[0, 0], dense_shape=[1])
The correct interpretation is:
IndexedSlicesValue(values=[2], indices=[0], dense_shape=[1])
Many optimizers deal incorrectly with repeated indices when updating based
on sparse gradients (e.g. summing squares rather than squaring the sum, or
applying momentum terms multiple times). Adding first is always the correct
behavior, so this is enforced here by reconstructing the IndexedSlices to
have only unique indices, then calling _apply_sparse.
Optimizers which deal correctly with repeated indices may instead override
this method to avoid the overhead of summing indices.
Args:
grad: `IndexedSlices`.
var: A `Variable` object.
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
An `Operation`.
"""
# pylint: disable=protected-access
summed_values, unique_indices = optimizer_v1._deduplicate_indexed_slices(
values=grad.values, indices=grad.indices)
# pylint: enable=protected-access
gradient_no_duplicate_indices = ops.IndexedSlices(
indices=unique_indices,
values=summed_values,
dense_shape=grad.dense_shape)
return self._apply_sparse(gradient_no_duplicate_indices, var, state)
def _apply_sparse(self, grad, var, state):
"""Add ops to apply sparse gradients to `var`.
The IndexedSlices object passed to `grad` in this function is by default
pre-processed in `_apply_sparse_duplicate_indices` to remove duplicate
indices (see its docstring for details). Optimizers which can tolerate or
have correct special cases for duplicate sparse indices may override
`_apply_sparse_duplicate_indices` instead of this function, avoiding that
overhead.
Args:
grad: `IndexedSlices`, with no repeated indices.
var: A `Variable` object.
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
An `Operation`.
"""
raise NotImplementedError()
def _finish(self, state):
"""Do what is needed to finish the update.
This is called inside a scope colocated with any non-slot variables.
Args:
state: An object with `get_slot(var, name)`, `get_non_slot(self, name)`,
and `get_hyper(name)` methods.
Returns:
The operation to apply updates, or None if no updates.
"""
return None
# --------------
# Utility methods for subclasses.
# --------------
def _get_per_graph_state(self):
# pylint: disable=protected-access
return self._per_graph_state.get(ops.get_default_graph()._graph_key, None)
def _get_state_for_var(self, var):
# pylint: disable=protected-access
return self._per_graph_state.get(var._graph_key, None)
# --------------
# Overridden methods from Checkpointable.
# --------------
def _track_checkpointable(self, *args, **kwargs):
"""Optimizers may not track dependencies. Raises an error."""
raise NotImplementedError(
"Optimizers may not have dependencies. File a feature request if this "
"limitation bothers you.")
@property
def _checkpoint_dependencies(self):
"""From Checkpointable. Gather graph-specific non-slot variables to save."""
current_graph_non_slot_variables = []
state = self._get_per_graph_state()
if state is not None:
for name, variable_object in sorted(
state._non_slot_dict.items(), # pylint: disable=protected-access
# Avoid comparing variables
key=lambda item: item[0]):
current_graph_non_slot_variables.append(
checkpointable.CheckpointableReference(
name=name, ref=variable_object))
# Note: ignores super(); Optimizers may not have any dependencies outside of
# state objects.
return current_graph_non_slot_variables
def _lookup_dependency(self, name):
"""From Checkpointable. Find a non-slot variable in the current graph."""
state = self._get_per_graph_state()
if state is None:
return None
else:
return state.get_non_slot(name)
@property
def _deferred_dependencies(self):
"""Lets Checkpointable know where non-slot variables are created.
If necessary, creates a new state object for the current default graph.
Checkpointable will then add entries to that state's deferred dependency
dictionary. The state object will check that dictionary when creating
non-slot variables, restoring their value if an entry is found.
Returns:
A dictionary which holds deferred dependencies for the current default
graph.
"""
state = self._get_or_create_state()
return state._deferred_dependencies # pylint: disable=protected-access
def _create_or_restore_slot_variable(
self, slot_variable_position, slot_name, variable):
"""Checkpointable: Restore a slot variable's value, possibly creating it.
Called when a variable which has an associated slot variable is created or
restored.
Args:
slot_variable_position: A `checkpointable._CheckpointPosition` object
indicating the slot variable `Checkpointable` object to be restored.
slot_name: The name of this `Optimizer`'s slot to restore into.
variable: The variable object this slot is being created for.
"""
state = self._get_or_create_state(var_list=[variable])
state._create_or_restore_slot_variable( # pylint: disable=protected-access
slot_variable_position=slot_variable_position,
slot_name=slot_name,
variable=variable,
optional_op_name=self._name)
# --------------
# Unsupported parent methods
# --------------
def _slot_dict(self, slot_name):
raise NotImplementedError(
"_slot_dict() method unsupported in OptimizerV2")
def _get_or_make_slot(self, var, val, slot_name, op_name):
raise NotImplementedError(
"_get_or_make_slot() method unsupported in OptimizerV2")
def _get_or_make_slot_with_initializer(self, var, initializer, shape, dtype,
slot_name, op_name):
raise NotImplementedError(
"_get_or_make_slot_with_initializer() method unsupported in "
"OptimizerV2")
def _create_non_slot_variable(self, initial_value, name, colocate_with):
raise NotImplementedError(
"_create_non_slot_variable() method unsupported in OptimizerV2")
def _get_non_slot_variable(self, name, graph=None):
raise NotImplementedError(
"_get_non_slot_variable() method unsupported in OptimizerV2")
def _non_slot_variables(self):
raise NotImplementedError(
"_non_slot_variables() method unsupported in OptimizerV2")
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libmodbus(AutotoolsPackage):
"""libmodbus is a free software library to send/receive data
according to the Modbus protocol.This library is written in C
and supports RTU (serial) and TCP (Ethernet) communications."""
homepage = "https://libmodbus.org/"
url = "https://libmodbus.org/releases/libmodbus-3.0.8.tar.gz"
version('3.1.6', sha256='d7d9fa94a16edb094e5fdf5d87ae17a0dc3f3e3d687fead81835d9572cf87c16')
version('3.1.5', sha256='f7a9538f23a8786b1ee62a4b75879b5c0e194e728350de1b741ce7d595970f06')
version('3.1.4', sha256='c8c862b0e9a7ba699a49bc98f62bdffdfafd53a5716c0e162696b4bf108d3637')
version('3.1.3', sha256='9e02d79d715522e03b61c313c7278fcf80860816718587819318b8ad9c3fd0ce')
version('3.1.2', sha256='661e14f9dc904f3f1b034464ddaa5fd4b8472f8f5d1ea10a1148af85591b7ee9')
version('3.1.1', sha256='76d93aff749d6029f81dcf1fb3fd6abe10c9b48d376f3a03a4f41c5197c95c99')
version('3.0.8', sha256='022f0691d920b8aee3ee49d7af0f69b7ef80fc3c849a8e0281d5bc27db7a24ea')
version('3.0.7', sha256='6c26850cd5dedcf5dad40977ac7f2ee990a3667f6959a1e05e22959bdf537961')
version('3.0.6', sha256='046d63f10f755e2160dc56ef681e5f5ad3862a57c1955fd82e0ce036b69471b6')
version('3.0.5', sha256='19aad5d55fa315602d6e836a858a3802f1608f9d824afba05fa12a58a1b1e656')
|
require("dotenv").config();
//const io = require('@pm2/io')
const { createBluetooth } = require("./src");
var { Timer } = require("easytimer.js");
var timerInstance = new Timer();
const axios = require('axios');
const { POLAR_MAC_ADRESSE, USERS_ENDPOINT, PULSESENSORS_ENDPOINT, ID } = process.env;
/*const state = io.metric({
name: 'Scanning state',
})
const polarBPM = io.metric({
name: 'Polar BPM',
})
const doneBPM = io.metric({
name: 'User BPM after scan',
})
const lanternSelected = io.metric({
name: 'The current selected lantern',
})
const timer = io.metric({
name: 'The timer when the BPM is stable',
})*/
let _USERBPM;
let _USER;
let _HEARTRATE = null;
async function init() {
console.clear();
const { bluetooth, destroy } = createBluetooth();
const adapter = await bluetooth.defaultAdapter();
if (!(await adapter.isDiscovering()))
await adapter.startDiscovery();
console.log("Discovering device...");
const device = await adapter.waitDevice("A0:9E:1A:9F:0E:B4");
console.log("got device", await device.getAddress(), await device.getName());
await device.connect();
console.log("Connected!");
const gattServer = await device.gatt();
var services = await gattServer.services();
const service = await gattServer.getPrimaryService(
"0000180d-0000-1000-8000-00805f9b34fb"
);
const heartrate = await service.getCharacteristic(
"00002a37-0000-1000-8000-00805f9b34fb"
);
_HEARTRATE = heartrate;
_USER = await axios.get('http://192.168.1.15:8080/api/users/randomUser').catch(async function (error) {
console.log(error.response.data.message)
await axios.put('http://192.168.1.15:8080/api/pulsesensors/s001', { 'state': 4 })
state.set('No lantern!');
process.exit(1);
});
// console.log(_USER.data._id);
//lanternSelected.set(_USER);
await _HEARTRATE.startNotifications();
_HEARTRATE.on("valuechanged", async (buffer) => {
let json = JSON.stringify(buffer);
let bpm = Math.max.apply(null, JSON.parse(json).data);
polarBPM.set(bpm);
})
await axios.put(PULSESENSORS_ENDPOINT + ID, { 'state': 0 })
//state.set('Loading');
//const currentBPM = await getCurrentBPM();
//console.log(currentBpm);
//polarBPM.set(currentBPM);
const readyToScan = await getScanState();
if (readyToScan) {
//clearInterval(twirlTimer)
process.stdout.write("\r\x1b[K")
process.stdout.write('Ready!')
await axios.put(PULSESENSORS_ENDPOINT + ID, { 'state': 1 })
// state.set('Ready');
//set a presence detection to start notification
_USERBPM = await scan();
// userBPM.set(_USERBPM);
//console.log('_USERBPM', _USERBPM);
await axios.put(USERS_ENDPOINT + _USER.data._id, { 'pulse': _USERBPM })
await axios.put(PULSESENSORS_ENDPOINT + ID, { 'state': 3 })
// state.set('done');
}
}
/**
* Check the BPM at his current state
* @return {Promise<number>} return the current bpm value
*/
async function getCurrentBPM() {
return new Promise(async (resolve, reject) => {
_HEARTRATE.on("valuechanged", async (buffer) => {
let json = JSON.stringify(buffer);
let bpm = Math.max.apply(null, JSON.parse(json).data);
resolve(bpm);
})
})
}
/**
* Check the BPM and return true if it's 0
* @return {Promise<boolean>} true if bpm 0
*/
async function getScanState() {
return new Promise(async (resolve, reject) => {
_HEARTRATE.on("valuechanged", async (buffer) => {
let json = JSON.stringify(buffer);
let bpm = Math.max.apply(null, JSON.parse(json).data);
if (bpm == 0) {
resolve(true)
}
})
})
}
/**
* Start the BPM scan. When value is stable we launch the counter and return the last value
* @return {Promise<number>} Last BPM after a certain time
*/
async function scan() {
return new Promise(async (resolve, reject) => {
let scanBPM;
timerInstance.addEventListener("secondsUpdated", function (e) {
// timer.set(timerInstance.getTimeValues().toString())
//console.log(timerInstance.getTimeValues().toString());
});
timerInstance.addEventListener("targetAchieved", async function (e) {
resolve(scanBPM);
});
_HEARTRATE.on("valuechanged", async (buffer) => {
let json = JSON.stringify(buffer);
let bpm = Math.max.apply(null, JSON.parse(json).data);
if (bpm != 0) {
scanBPM = bpm;
await axios.put('http://192.168.1.15:8080/api/pulsesensors/s001', { 'state': 2 })
state.set('Scanning');
timerInstance.start({ countdown: true, startValues: { seconds: 15 } });
}
})
});
}
init().then(console.log).catch(console.error);
|
// - Import react components
import React, { Component } from 'react'
import _ from 'lodash'
import { connect } from 'react-redux'
import { Slider, ScrollView, View, Text, TextInput, Image, TouchableOpacity, KeyboardAvoidingView } from 'react-native'
import { changeData, dbSignup } from './../../actions';
import { Card, CardSection, Input, Button, FlatButton, Spinner, TextField } from './../../layouts'
// - Import component styles
import styles from './styles'
// - Import Actions
import * as authorizeActions from './../../actions/authorizeActions'
/**
* Create component class
*
* @export
* @class Signup
* @extends {Component}
*/
export class Signup extends Component {
static navigationOptions = ({ navigation }) => {
const { params = {} } = navigation.state
const { navigate } = navigation
return {
title: 'Signup',
headerTintColor: "#616161",
headerStyle: styles.header
}
}
constructor(props) {
super(props)
// Default state
this.state = {
fullNameInput: '',
fullNameInputError: '',
emailInput: '',
emailInputError: '',
passwordInput: '',
passwordInputError: '',
confirmPasswordInput: '',
confirmPasswordInputError: '',
loading: false
}
}
/**
* On full name input change
*
* @param {any} text
* @memberof Signup
*/
onfullNameChange(text) {
this.setState({
fullNameInput: text,
fullNameInputError: ''
})
}
/**
* On email input change
*
* @param {any} text
* @memberof Signup
*/
onEmailChange(text) {
this.setState({
emailInput: text,
emailInputError: ''
})
}
/**
* On password input change event
*
* @param {any} text
* @memberof Signup
*/
onPasswordChange(text) {
this.setState({
passwordInput: text,
passwordInputError: ''
})
}
/**
* On confirm password input change event
*
* @param {any} text
* @memberof Signup
*/
onConfirmPasswordChange(text) {
this.setState({
confirmPasswordInput: text,
confirmPasswordInputError: ''
})
}
/**
* On loggin button pressed
*
* @memberof Signup
*/
onSignupButton() {
const { register } = this.props
const { fullNameInput, emailInput, passwordInput, confirmPasswordInput } = this.state;
if (_.trim(fullNameInput) === '') {
this.setState({
fullNameInputError: 'Field is required.'
})
return
}
if (_.trim(emailInput) === '') {
this.setState({
emailInputError: 'Field is required.'
})
return
}
if (_.trim(passwordInput) === '') {
this.setState({
passwordInputError: 'Field is required.'
})
return
}
if (_.trim(confirmPasswordInput) === '') {
this.setState({
confirmPasswordInputError: 'Field is required.'
})
return
}
if (confirmPasswordInput !== passwordInput) {
this.setState({
confirmPasswordInputError: 'Should be equal to password.',
passwordInputError: 'Should be equal to confirm password.'
})
return
}
register({
fullName: fullNameInput,
email:emailInput,
password: passwordInput
})
}
renderButton() {
if (this.props.loading) {
return (
<Button textStyle={styles.buttonText} buttonStyle={styles.button}>
Loading ...
</Button>
)
}
const { navigation } = this.props
return (
<CardSection style={styles.buttons}>
<Button onPress={this.onSignupButton.bind(this)}>
Signup
</Button>
</CardSection>
)
}
render() {
const {
fullNameInput,
fullNameInputError,
emailInput,
emailInputError,
passwordInput,
passwordInputError,
confirmPasswordInput,
confirmPasswordInputError } = this.state
return (
<KeyboardAvoidingView
behavior="padding"
style={{ flex: 1 }}
>
<ScrollView>
<Card>
<CardSection style={styles.logo}>
<Image
style={styles.logoImage}
source={{ uri: 'https://raw.githubusercontent.com/Qolzam/react-social-network/master/docs/app/logo.png' }}
/>
</CardSection>
<Text style={{ alignSelf: 'center', fontSize: 30, color: '#eeeeee' }}>Green</Text>
<View style={{ height: 20 }} />
<View style={{ padding: 20 }}>
<TextField
label="Full Name"
onChangeText={this.onfullNameChange.bind(this)}
value={fullNameInput}
error={fullNameInputError !== ''}
helperText={fullNameInputError}
/>
<View style={{ height: 20 }} />
<TextField
label="Email"
keyboardType='email-address'
onChangeText={this.onEmailChange.bind(this)}
value={emailInput}
error={emailInputError !== ''}
helperText={emailInputError}
/>
<View style={{ height: 20 }} />
<TextField
secureTextEntry
label="Password"
onChangeText={this.onPasswordChange.bind(this)}
value={passwordInput}
error={passwordInputError !== ''}
helperText={passwordInputError}
/>
<View style={{ height: 20 }} />
<TextField
secureTextEntry
label="Confirm Password"
onChangeText={this.onConfirmPasswordChange.bind(this)}
value={confirmPasswordInput}
error={confirmPasswordInputError !== ''}
helperText={confirmPasswordInputError}
/>
</View>
<View style={{ height: 20 }} />
<Text style={styles.errorTextStyle}>
{this.props.error}
</Text>
{this.renderButton()}
</Card>
</ScrollView>
</KeyboardAvoidingView>
)
}
}
/**
* Map dispatch to props
* @param {func} dispatch is the function to dispatch action to reducers
* @param {object} ownProps is the props belong to component
* @return {object} props of component
*/
const mapDispatchToProps = (dispatch, ownProps) => {
return {
register: (data) => dispatch(authorizeActions.dbSignup(data))
}
}
/**
* Map state to props
* @param {object} param0
*/
const mapStateToProps = ({ global }) => {
const { error, loading } = global
return { error, loading }
}
/**
* Connect component to redux store
*/
export default connect(mapStateToProps, mapDispatchToProps)(Signup)
|
function ShowPerformPage(res) {
const _movies = res.movies;
const _theatres = res.theatres;
var movies_opts = "";
var theatres_opts = "";
for (var key in _movies)
movies_opts += `<option value=${_movies[key].uuid}>${_movies[key].title}</option>`;
for (var key in _theatres)
theatres_opts += `<option value=${_theatres[key].uuid}>${_theatres[key].name}</option>`;
App.innerHTML = `
<div class="perform wrap">
<div class="perform wrapb">
<h3>New Perfomance</h3>
<div class="dff">
<label for="theatres">Theatre </label>
<select id="theatres">
${theatres_opts}
</select>
</div>
<div class="dff">
<label for="movies">Movie </label>
<select id="movies">
${movies_opts}
</select>
</div>
<p>Start Date and Time</p>
<input type="date" id="p_date">
<input type="time" id="p_time">
<input type="number" id="duration" placeholder="Duration">
<input type="number" id="price" placeholder="Price">
<i class="fa fa-plus-circle" onclick="AddPerformance()"></i>
</div>
<div class="perform wrapb">
<h3>Add New Theatre</h3>
<input type="text" id="theatre" placeholder="Threatre Name">
<input type="number" id="noseats" placeholder="No of Seats">
<i class="fa fa-plus-circle" onclick="AddTheatre()"></i>
</div>
</div>
`;
Topbar.SideBar(false);
}
function Perfomance() {
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'getforshow');
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
ShowPerformPage(response);
});
}
function AddPerformance() {
const _theatre = document.getElementById("theatres").value;
const _movie = document.getElementById("movies").value;
const date = document.getElementById("p_date").value;
const time = document.getElementById("p_time").value;
const duration = document.getElementById("duration").value;
const price = document.getElementById("price").value;
const timestamp = Date.parse(date + " " + time);
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'regnewshow');
data.append('uuid', user.get().uuid);
data.append('theatre_id', _theatre);
data.append('movie_id', _movie);
data.append('starttime', timestamp);
data.append('duration', duration);
data.append('price', price);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
Perfomance();
});
}
function AddTheatre() {
const theatre = document.getElementById("theatre").value;
const noseats = Number(document.getElementById("noseats").value);
if (theatre.length < 1 || noseats == NaN)
return;
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'addnewtheatre');
data.append('uuid', user.get().uuid);
data.append('name', theatre);
data.append('noseats', noseats);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
Perfomance();
});
document.getElementById("theatre").value = "";
document.getElementById("noseats").value = 0;
}
function gettheatres(_movie, movie_name) {
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'gettheatres');
data.append('movie_id', _movie);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
ShowPopUpSelectTheatre(response, movie_name);
});
}
function ShowPopUpSelectTheatre(theatres, movie_name) {
if (theatres.length == 0) {
PopUP.show(
`
<div class="wrap">
<h6><i class="fa fa-times-circle" aria-hidden="true" onclick="PopUP.hide()"></i></h6>
<h3>Movie : ${movie_name}</h3>
<h3>No Shows Going on</h3>
<div class="book" onclick="PopUP.hide()">OK</div>
</div>
`
);
return;
}
var theatre_opts = "";
for (var key in theatres) {
var starttime = new Date(Number(theatres[key].starttime));
starttime = starttime.toLocaleDateString() + " " + starttime.toLocaleTimeString();
theatre_opts += `<option value='${theatres[key].perfomance_id}'>${theatres[key].name} <br> (${starttime})</option>`;
movie_id = theatres[key].movie_id;
}
PopUP.show(
`
<div class="wrap">
<h6><i class="fa fa-times-circle" aria-hidden="true" onclick="PopUP.hide()"></i></h6>
<h3>Movie : ${movie_name}</h3>
<div class="ele">
<label for="popup_theatres">Choose Theatre</label>
<select id="popup_theatres">
${theatre_opts}
</select>
</div>
<div class="book" onclick="BookShow()">Book</div>
</div>
`
);
}
function BookShow() {
const selected = document.getElementById("popup_theatres").value;
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'getshowdetails');
data.append('perfomance_id', selected);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
ShowPage(response);
PopUP.hide();
});
}
var lastroominforamtion;
function BookTicketsWarn() {
var _count = 0;
for (var i in SEATARRAY) {
if (SEATARRAY[i] == 2)
_count++;
}
if (_count == 0)
return;
PopUP.show(
`
<div class="wrap">
<h6><i class="fa fa-times-circle" aria-hidden="true" onclick="PopUP.hide()"></i></h6>
<h3>Confirm</h3>
<h3>You Have Selceted ${_count} ticket${_count==1?"":"s"}</h3>
<div class="book" onclick="BookTickets()">Pay ${_count*lastroominforamtion.perfomance.price}</div>
</div>
`
);
}
function BookTickets() {
PopUP.hide();
var _seatdata = "";
for (var i in SEATARRAY) {
if (SEATARRAY[i] == 2) {
if (_seatdata == "") {
_seatdata = i.split("_")[1];
} else {
_seatdata += "-" + i.split("_")[1];
}
}
}
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'bookshow');
data.append('perfomance_id', lastroominforamtion.perfomance.uuid);
data.append('seats', _seatdata);
data.append('user_id', user.get().uuid);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'getshowdetails');
data.append('perfomance_id', lastroominforamtion.perfomance.uuid);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
ShowPage(response);
});
});
}
function ShowPage(data) {
lastroominforamtion = data;
const _theatre = data.theatre;
const _perfomance = data.perfomance;
const _movie = data.movie;
const _seats = data.seats;
var _date = new Date(Number(_perfomance.starttime));
var totalseats = _theatre.noseats;
var seatshtml = "";
var _available = totalseats;
var _bookedseats = 0;
var _yourbookedseats = 0;
for (var i = 0; i < totalseats; i++) {
SEATARRAY["seat_" + (i + 1)] = -1;
}
for (var i in _seats) {
console.log(i, _seats[i]);
var _ar = _seats[i].seats.split("-");
for (var j in _ar) {
_available--;
_bookedseats++;
var _index = _ar[j];
SEATARRAY["seat_" + _index] = 0;
if (user.get().uuid == _seats[i].user_id) {
_yourbookedseats++;
SEATARRAY["seat_" + _index] = 1;
}
}
}
for (var i = 0; i < totalseats; i++) {
var _color = "#fff";
var _index = "seat_" + (i + 1);
if (SEATARRAY[_index] == 0)
_color = "#f00";
if (SEATARRAY[_index] == 1)
_color = "#0f0";
seatshtml += `<div class='seat' id="seat_${i+1}" style="border-color:${_color}"${SEATARRAY[_index]==-1?`onclick="MarkORUNMark(${i+1})"`:""}>${i+1}</div>`;
}
//return html;
App.innerHTML = `
<div class="home">
<div class="smovie" style="background-image: url(${_movie.imgurl});" onclick="getmoviedata(${_movie.uuid})">
<div class="dum">.</div>
<div class="title">${_movie.title}</div>
</div>
<div class="smovie">
<div class="title">${_theatre.name}</div>
<div class="title s">Show on ${_date.toLocaleDateString()} , ${_date.toLocaleTimeString()}</div>
<div class="title s"><i class="fa fa-ticket" aria-hidden="true"></i> ${_perfomance.price}rs</div>
</div>
<div class="seats">
<div class="seat" style="border-color:#f00">${_bookedseats}</div> Total Bookings<br>
<div class="seat" style="border-color:#0f0">${_yourbookedseats}</div> Yours Booking<br>
<div class="seat" style="border-color:#fff">${_available}</div> Remaing slots
</div>
<div class="seats">
${seatshtml}
</div>
<div class="book" onclick="BookTicketsWarn()">Book Tickets <i class="fa fa-ticket" aria-hidden="true"></i></div>
</div>
`;
}
var SEATARRAY = [];
function MarkORUNMark(index) {
var val = SEATARRAY["seat_" + index];
if (val == -1)
MarkSeatBLUE(index);
if (val == 2)
UNMarkSeat(index);
}
function MarkSeatRED(index) {
document.getElementById("seat_" + index).style.borderColor = "#f00";
SEATARRAY["seat_" + index] = 0;
}
function MarkSeatBLUE(index) {
if (SEATARRAY["seat_" + index] != -1)
return;
document.getElementById("seat_" + index).style.borderColor = "#00f";
SEATARRAY["seat_" + index] = 2;
}
function MarkSeatGREEN(index) {
document.getElementById("seat_" + index).style.borderColor = "#0f0";
SEATARRAY["seat_" + index] = 1;
}
function UNMarkSeat(index) {
if (SEATARRAY["seat_" + index] != 2)
return;
document.getElementById("seat_" + index).style.borderColor = "#fff";
SEATARRAY["seat_" + index] = -1;
}
function GetAllBookings(){
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'getallbookings');
data.append('uuid', user.get().uuid);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
BookPage(response);
Topbar.SideBar(false);
});
}
function BookPage(data){
var elehtml="";
for(var i in data){
var _data=data[i];
console.log(_data);
var _tickets=0;
var seats=_data.bookings.seats.split("-");
for(i in seats){
if(Number(seats[i])!=NaN)
_tickets++;
}
var _date=new Date(Number(_data.perfomance.starttime));
_date=_date.toLocaleDateString()+" , "+_date.toLocaleTimeString();
elehtml+=`
<div class="bmovie">
<div class="ele">
<img src=${_data.movie.imgurl} onclick="getmoviedata(${_data.movie.uuid})">
</div>
<div class="ele">
<div class="title">${_data.movie.title}</div>
<div class="title">${_data.theatre.name}</div>
<div class="title">${_date}</div>
<div class="title">${_tickets} tickets booked</div>
</div>
</div>
`;
}
App.innerHTML=`
<div class="bookp">
<div class="tit">Your Bookings</div>
${elehtml}
</div>
`;
}
function DeleteUser(uuid){
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'deleteuser');
data.append('uuid', user.get().uuid);
data.append('user_id',uuid);
Post(url, data, (response) => {
console.log(response);
response = JSON.parse(response);
console.log(response);
});
}
function DeleteMovie(uuid,callback){
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'deletemovie');
data.append('uuid', user.get().uuid);
data.append('movie_id',uuid);
Post(url, data, (response) => {
response = JSON.parse(response);
callback(response);
});
}
function DeletePerson(uuid,callback){
var url = baseurl + "php/links.php";
var data = new FormData();
data.append('status', 'deleteperson');
data.append('uuid', user.get().uuid);
data.append('person_id',uuid);
Post(url, data, (response) => {
response = JSON.parse(response);
callback(response);
});
}
|
'use strict';
const Customers = (sequelize, DataTypes) => {
return sequelize.define('Customers', {
name: {
type: DataTypes.STRING,
required: true,
},
})
}
module.exports = Customers;
|
# Copyright 2012-2017 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from collections import defaultdict, OrderedDict
from dataclasses import dataclass, field
from functools import lru_cache
import copy
import hashlib
import itertools, pathlib
import os
import pickle
import re
import textwrap
import typing as T
from . import environment
from . import dependencies
from . import mlog
from . import programs
from .mesonlib import (
HoldableObject, SecondLevelHolder,
File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
extract_as_list, typeslistify, stringlistify, classify_unity_sources,
get_filenames_templates_dict, substitute_values, has_path_sep,
OptionKey, PerMachineDefaultable,
MesonBugException,
)
from .compilers import (
Compiler, is_object, clink_langs, sort_clink, lang_suffixes,
is_known_suffix, detect_static_linker, detect_compiler_for
)
from .linkers import StaticLinker
from .interpreterbase import FeatureNew, FeatureDeprecated
if T.TYPE_CHECKING:
from typing_extensions import Literal
from ._typing import ImmutableListProtocol, ImmutableSetProtocol
from .backend.backends import Backend, ExecutableSerialisation
from .interpreter.interpreter import Test, SourceOutputs, Interpreter
from .interpreterbase import SubProject
from .mesonlib import FileMode, FileOrString
from .modules import ModuleState
from .mparser import BaseNode
GeneratedTypes = T.Union['CustomTarget', 'CustomTargetIndex', 'GeneratedList']
pch_kwargs = {'c_pch', 'cpp_pch'}
lang_arg_kwargs = {
'c_args',
'cpp_args',
'cuda_args',
'd_args',
'd_import_dirs',
'd_unittest',
'd_module_versions',
'd_debug',
'fortran_args',
'java_args',
'objc_args',
'objcpp_args',
'rust_args',
'vala_args',
'cs_args',
'cython_args',
}
vala_kwargs = {'vala_header', 'vala_gir', 'vala_vapi'}
rust_kwargs = {'rust_crate_type'}
cs_kwargs = {'resources', 'cs_args'}
buildtarget_kwargs = {
'build_by_default',
'build_rpath',
'dependencies',
'extra_files',
'gui_app',
'link_with',
'link_whole',
'link_args',
'link_depends',
'implicit_include_directories',
'include_directories',
'install',
'install_rpath',
'install_dir',
'install_mode',
'install_tag',
'name_prefix',
'name_suffix',
'native',
'objects',
'override_options',
'sources',
'gnu_symbol_visibility',
'link_language',
'win_subsystem',
}
known_build_target_kwargs = (
buildtarget_kwargs |
lang_arg_kwargs |
pch_kwargs |
vala_kwargs |
rust_kwargs |
cs_kwargs)
known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'}
known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'}
known_stlib_kwargs = known_build_target_kwargs | {'pic', 'prelink'}
known_jar_kwargs = known_exe_kwargs | {'main_class'}
@lru_cache(maxsize=None)
def get_target_macos_dylib_install_name(ld) -> str:
name = ['@rpath/', ld.prefix, ld.name]
if ld.soversion is not None:
name.append('.' + ld.soversion)
name.append('.dylib')
return ''.join(name)
class InvalidArguments(MesonException):
pass
@dataclass(eq=False)
class DependencyOverride(HoldableObject):
dep: dependencies.Dependency
node: 'BaseNode'
explicit: bool = True
@dataclass(eq=False)
class Headers(HoldableObject):
sources: T.List[File]
install_subdir: T.Optional[str]
custom_install_dir: T.Optional[str]
custom_install_mode: 'FileMode'
subproject: str
# TODO: we really don't need any of these methods, but they're preserved to
# keep APIs relying on them working.
def set_install_subdir(self, subdir: str) -> None:
self.install_subdir = subdir
def get_install_subdir(self) -> T.Optional[str]:
return self.install_subdir
def get_sources(self) -> T.List[File]:
return self.sources
def get_custom_install_dir(self) -> T.Optional[str]:
return self.custom_install_dir
def get_custom_install_mode(self) -> 'FileMode':
return self.custom_install_mode
@dataclass(eq=False)
class Man(HoldableObject):
sources: T.List[File]
custom_install_dir: T.Optional[str]
custom_install_mode: 'FileMode'
subproject: str
locale: T.Optional[str]
def get_custom_install_dir(self) -> T.Optional[str]:
return self.custom_install_dir
def get_custom_install_mode(self) -> 'FileMode':
return self.custom_install_mode
def get_sources(self) -> T.List['File']:
return self.sources
@dataclass(eq=False)
class EmptyDir(HoldableObject):
path: str
install_mode: 'FileMode'
subproject: str
install_tag: T.Optional[str] = None
@dataclass(eq=False)
class InstallDir(HoldableObject):
source_subdir: str
installable_subdir: str
install_dir: str
install_mode: 'FileMode'
exclude: T.Tuple[T.Set[str], T.Set[str]]
strip_directory: bool
subproject: str
from_source_dir: bool = True
install_tag: T.Optional[str] = None
@dataclass(eq=False)
class DepManifest:
version: str
license: T.List[str]
def to_json(self) -> T.Dict[str, T.Union[str, T.List[str]]]:
return {
'version': self.version,
'license': self.license,
}
# literally everything isn't dataclass stuff
class Build:
"""A class that holds the status of one build including
all dependencies and so on.
"""
def __init__(self, environment: environment.Environment):
self.project_name = 'name of master project'
self.project_version = None
self.environment = environment
self.projects = {}
self.targets: 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]' = OrderedDict()
self.run_target_names: T.Set[T.Tuple[str, str]] = set()
self.global_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
self.global_link_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
self.projects_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {})
self.projects_link_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {})
self.tests: T.List['Test'] = []
self.benchmarks: T.List['Test'] = []
self.headers: T.List[Headers] = []
self.man: T.List[Man] = []
self.emptydir: T.List[EmptyDir] = []
self.data: T.List[Data] = []
self.symlinks: T.List[SymlinkData] = []
self.static_linker: PerMachine[StaticLinker] = PerMachine(None, None)
self.subprojects = {}
self.subproject_dir = ''
self.install_scripts: T.List['ExecutableSerialisation'] = []
self.postconf_scripts: T.List['ExecutableSerialisation'] = []
self.dist_scripts: T.List['ExecutableSerialisation'] = []
self.install_dirs: T.List[InstallDir] = []
self.dep_manifest_name: T.Optional[str] = None
self.dep_manifest: T.Dict[str, DepManifest] = {}
self.stdlibs = PerMachine({}, {})
self.test_setups: T.Dict[str, TestSetup] = {}
self.test_setup_default_name = None
self.find_overrides: T.Dict[str, T.Union['Executable', programs.ExternalProgram, programs.OverrideProgram]] = {}
self.searched_programs = set() # The list of all programs that have been searched for.
# If we are doing a cross build we need two caches, if we're doing a
# build == host compilation the both caches should point to the same place.
self.dependency_overrides: PerMachine[T.Dict[T.Tuple, DependencyOverride]] = PerMachineDefaultable.default(
environment.is_cross_build(), {}, {})
self.devenv: T.List[EnvironmentVariables] = []
self.modules: T.List[str] = []
self.need_vsenv = False
def get_build_targets(self):
build_targets = OrderedDict()
for name, t in self.targets.items():
if isinstance(t, BuildTarget):
build_targets[name] = t
return build_targets
def get_custom_targets(self):
custom_targets = OrderedDict()
for name, t in self.targets.items():
if isinstance(t, CustomTarget):
custom_targets[name] = t
return custom_targets
def copy(self):
other = Build(self.environment)
for k, v in self.__dict__.items():
if isinstance(v, (list, dict, set, OrderedDict)):
other.__dict__[k] = v.copy()
else:
other.__dict__[k] = v
return other
def merge(self, other):
for k, v in other.__dict__.items():
self.__dict__[k] = v
def ensure_static_linker(self, compiler):
if self.static_linker[compiler.for_machine] is None and compiler.needs_static_linker():
self.static_linker[compiler.for_machine] = detect_static_linker(self.environment, compiler)
def get_project(self):
return self.projects['']
def get_subproject_dir(self):
return self.subproject_dir
def get_targets(self) -> 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]':
return self.targets
def get_tests(self) -> T.List['Test']:
return self.tests
def get_benchmarks(self) -> T.List['Test']:
return self.benchmarks
def get_headers(self) -> T.List['Headers']:
return self.headers
def get_man(self) -> T.List['Man']:
return self.man
def get_data(self) -> T.List['Data']:
return self.data
def get_symlinks(self) -> T.List['SymlinkData']:
return self.symlinks
def get_emptydir(self) -> T.List['EmptyDir']:
return self.emptydir
def get_install_subdirs(self) -> T.List['InstallDir']:
return self.install_dirs
def get_global_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]:
d = self.global_args[for_machine]
return d.get(compiler.get_language(), [])
def get_project_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]:
d = self.projects_args[for_machine]
args = d.get(project)
if not args:
return []
return args.get(compiler.get_language(), [])
def get_global_link_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]:
d = self.global_link_args[for_machine]
return d.get(compiler.get_language(), [])
def get_project_link_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]:
d = self.projects_link_args[for_machine]
link_args = d.get(project)
if not link_args:
return []
return link_args.get(compiler.get_language(), [])
@dataclass(eq=False)
class IncludeDirs(HoldableObject):
"""Internal representation of an include_directories call."""
curdir: str
incdirs: T.List[str]
is_system: bool
# Interpreter has validated that all given directories
# actually exist.
extra_build_dirs: T.List[str] = field(default_factory=list)
def __repr__(self) -> str:
r = '<{} {}/{}>'
return r.format(self.__class__.__name__, self.curdir, self.incdirs)
def get_curdir(self) -> str:
return self.curdir
def get_incdirs(self) -> T.List[str]:
return self.incdirs
def get_extra_build_dirs(self) -> T.List[str]:
return self.extra_build_dirs
def to_string_list(self, sourcedir: str, builddir: T.Optional[str] = None) -> T.List[str]:
"""Convert IncludeDirs object to a list of strings.
:param sourcedir: The absolute source directory
:param builddir: The absolute build directory, option, buid dir will not
be added if this is unset
:returns: A list of strings (without compiler argument)
"""
strlist: T.List[str] = []
for idir in self.incdirs:
strlist.append(os.path.join(sourcedir, self.curdir, idir))
if builddir:
strlist.append(os.path.join(builddir, self.curdir, idir))
return strlist
@dataclass(eq=False)
class ExtractedObjects(HoldableObject):
'''
Holds a list of sources for which the objects must be extracted
'''
target: 'BuildTarget'
srclist: T.List[File] = field(default_factory=list)
genlist: T.List['GeneratedTypes'] = field(default_factory=list)
objlist: T.List[T.Union[str, 'File', 'ExtractedObjects']] = field(default_factory=list)
recursive: bool = True
def __post_init__(self) -> None:
if self.target.is_unity:
self.check_unity_compatible()
def __repr__(self) -> str:
r = '<{0} {1!r}: {2}>'
return r.format(self.__class__.__name__, self.target.name, self.srclist)
@staticmethod
def get_sources(sources: T.Sequence['FileOrString'], generated_sources: T.Sequence['GeneratedTypes']) -> T.List['FileOrString']:
# Merge sources and generated sources
sources = list(sources)
for gensrc in generated_sources:
for s in gensrc.get_outputs():
# We cannot know the path where this source will be generated,
# but all we need here is the file extension to determine the
# compiler.
sources.append(s)
# Filter out headers and all non-source files
return [s for s in sources if environment.is_source(s) and not environment.is_header(s)]
def classify_all_sources(self, sources: T.List[str], generated_sources: T.Sequence['GeneratedTypes']) -> T.Dict['Compiler', T.List['FileOrString']]:
sources_ = self.get_sources(sources, generated_sources)
return classify_unity_sources(self.target.compilers.values(), sources_)
def check_unity_compatible(self) -> None:
# Figure out if the extracted object list is compatible with a Unity
# build. When we're doing a Unified build, we go through the sources,
# and create a single source file from each subset of the sources that
# can be compiled with a specific compiler. Then we create one object
# from each unified source file. So for each compiler we can either
# extra all its sources or none.
cmpsrcs = self.classify_all_sources(self.target.sources, self.target.generated)
extracted_cmpsrcs = self.classify_all_sources(self.srclist, self.genlist)
for comp, srcs in extracted_cmpsrcs.items():
if set(srcs) != set(cmpsrcs[comp]):
raise MesonException('Single object files can not be extracted '
'in Unity builds. You can only extract all '
'the object files for each compiler at once.')
def get_outputs(self, backend: 'Backend') -> T.List[str]:
return [
backend.object_filename_from_source(self.target, source)
for source in self.get_sources(self.srclist, self.genlist)
]
EnvInitValueType = T.Dict[str, T.Union[str, T.List[str]]]
class EnvironmentVariables(HoldableObject):
def __init__(self, values: T.Optional[EnvValueType] = None,
init_method: Literal['set', 'prepend', 'append'] = 'set', separator: str = os.pathsep) -> None:
self.envvars: T.List[T.Tuple[T.Callable[[T.Dict[str, str], str, T.List[str], str], str], str, T.List[str], str]] = []
# The set of all env vars we have operations for. Only used for self.has_name()
self.varnames: T.Set[str] = set()
if values:
init_func = getattr(self, init_method)
for name, value in values.items():
init_func(name, listify(value), separator)
def __repr__(self) -> str:
repr_str = "<{0}: {1}>"
return repr_str.format(self.__class__.__name__, self.envvars)
def hash(self, hasher: T.Any):
myenv = self.get_env({})
for key in sorted(myenv.keys()):
hasher.update(bytes(key, encoding='utf-8'))
hasher.update(b',')
hasher.update(bytes(myenv[key], encoding='utf-8'))
hasher.update(b';')
def has_name(self, name: str) -> bool:
return name in self.varnames
def get_names(self) -> T.Set[str]:
return self.varnames
def set(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
self.varnames.add(name)
self.envvars.append((self._set, name, values, separator))
def append(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
self.varnames.add(name)
self.envvars.append((self._append, name, values, separator))
def prepend(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
self.varnames.add(name)
self.envvars.append((self._prepend, name, values, separator))
@staticmethod
def _set(env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
return separator.join(values)
@staticmethod
def _append(env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
curr = env.get(name)
return separator.join(values if curr is None else [curr] + values)
@staticmethod
def _prepend(env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
curr = env.get(name)
return separator.join(values if curr is None else values + [curr])
def get_env(self, full_env: T.Dict[str, str]) -> T.Dict[str, str]:
env = full_env.copy()
for method, name, values, separator in self.envvars:
env[name] = method(env, name, values, separator)
return env
@dataclass(eq=False)
class Target(HoldableObject):
# TODO: should Target be an abc.ABCMeta?
name: str
subdir: str
subproject: 'SubProject'
build_by_default: bool
for_machine: MachineChoice
def __post_init__(self) -> None:
if has_path_sep(self.name):
# Fix failing test 53 when this becomes an error.
mlog.warning(textwrap.dedent(f'''\
Target "{self.name}" has a path separator in its name.
This is not supported, it can cause unexpected failures and will become
a hard error in the future.
'''))
self.install = False
self.build_always_stale = False
self.option_overrides_base: T.Dict[OptionKey, str] = {}
self.option_overrides_compiler: T.Dict[OptionKey, str] = {}
self.extra_files = [] # type: T.List[File]
if not hasattr(self, 'typename'):
raise RuntimeError(f'Target type is not set for target class "{type(self).__name__}". This is a bug')
# dataclass comparators?
def __lt__(self, other: object) -> bool:
if not isinstance(other, Target):
return NotImplemented
return self.get_id() < other.get_id()
def __le__(self, other: object) -> bool:
if not isinstance(other, Target):
return NotImplemented
return self.get_id() <= other.get_id()
def __gt__(self, other: object) -> bool:
if not isinstance(other, Target):
return NotImplemented
return self.get_id() > other.get_id()
def __ge__(self, other: object) -> bool:
if not isinstance(other, Target):
return NotImplemented
return self.get_id() >= other.get_id()
def get_default_install_dir(self, env: environment.Environment) -> T.Tuple[str, str]:
raise NotImplementedError
def get_custom_install_dir(self) -> T.List[T.Union[str, bool]]:
raise NotImplementedError
def get_install_dir(self, environment: environment.Environment) -> T.Tuple[T.Any, str, bool]:
# Find the installation directory.
default_install_dir, install_dir_name = self.get_default_install_dir(environment)
outdirs = self.get_custom_install_dir()
if outdirs and outdirs[0] != default_install_dir and outdirs[0] is not True:
# Either the value is set to a non-default value, or is set to
# False (which means we want this specific output out of many
# outputs to not be installed).
custom_install_dir = True
else:
custom_install_dir = False
# if outdirs is empty we need to set to something, otherwise we set
# only the first value to the default
if outdirs:
outdirs[0] = default_install_dir
else:
outdirs = [default_install_dir]
return outdirs, install_dir_name, custom_install_dir
def get_basename(self) -> str:
return self.name
def get_subdir(self) -> str:
return self.subdir
def get_typename(self) -> str:
return self.typename
@staticmethod
def _get_id_hash(target_id):
# We don't really need cryptographic security here.
# Small-digest hash function with unlikely collision is good enough.
h = hashlib.sha256()
h.update(target_id.encode(encoding='utf-8', errors='replace'))
# This ID should be case-insensitive and should work in Visual Studio,
# e.g. it should not start with leading '-'.
return h.hexdigest()[:7]
@staticmethod
def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
"""Construct target ID from subdir, name and type suffix.
This helper function is made public mostly for tests."""
# This ID must also be a valid file name on all OSs.
# It should also avoid shell metacharacters for obvious
# reasons. '@' is not used as often as '_' in source code names.
# In case of collisions consider using checksums.
# FIXME replace with assert when slash in names is prohibited
name_part = name.replace('/', '@').replace('\\', '@')
assert not has_path_sep(type_suffix)
my_id = name_part + type_suffix
if subdir:
subdir_part = Target._get_id_hash(subdir)
# preserve myid for better debuggability
return subdir_part + '@@' + my_id
return my_id
def get_id(self) -> str:
return self.construct_id_from_path(
self.subdir, self.name, self.type_suffix())
def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
if 'build_by_default' in kwargs:
self.build_by_default = kwargs['build_by_default']
if not isinstance(self.build_by_default, bool):
raise InvalidArguments('build_by_default must be a boolean value.')
elif kwargs.get('install', False):
# For backward compatibility, if build_by_default is not explicitly
# set, use the value of 'install' if it's enabled.
self.build_by_default = True
option_overrides = self.parse_overrides(kwargs)
for k, v in option_overrides.items():
if k.lang:
self.option_overrides_compiler[k.evolve(machine=self.for_machine)] = v
continue
self.option_overrides_base[k] = v
@staticmethod
def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]:
opts = kwargs.get('override_options', [])
# In this case we have an already parsed and ready to go dictionary
# provided by typed_kwargs
if isinstance(opts, dict):
return T.cast(T.Dict[OptionKey, str], opts)
result: T.Dict[OptionKey, str] = {}
overrides = stringlistify(opts)
for o in overrides:
if '=' not in o:
raise InvalidArguments('Overrides must be of form "key=value"')
k, v = o.split('=', 1)
key = OptionKey.from_string(k.strip())
v = v.strip()
result[key] = v
return result
def is_linkable_target(self) -> bool:
return False
def get_outputs(self) -> T.List[str]:
return []
def should_install(self) -> bool:
return False
class BuildTarget(Target):
known_kwargs = known_build_target_kwargs
install_dir: T.List[T.Union[str, bool]]
def __init__(self, name: str, subdir: str, subproject: 'SubProject', for_machine: MachineChoice,
sources: T.List['SourceOutputs'], objects, environment: environment.Environment, kwargs):
super().__init__(name, subdir, subproject, True, for_machine)
unity_opt = environment.coredata.get_option(OptionKey('unity'))
self.is_unity = unity_opt == 'on' or (unity_opt == 'subprojects' and subproject != '')
self.environment = environment
self.compilers = OrderedDict() # type: OrderedDict[str, Compiler]
self.objects: T.List[T.Union[str, 'File', 'ExtractedObjects']] = []
self.external_deps: T.List[dependencies.Dependency] = []
self.include_dirs: T.List['IncludeDirs'] = []
self.link_language = kwargs.get('link_language')
self.link_targets: T.List[T.Union['BuildTarget', 'CustomTarget', 'CustomTargetIndex']] = []
self.link_whole_targets = []
self.link_depends = []
self.added_deps = set()
self.name_prefix_set = False
self.name_suffix_set = False
self.filename = 'no_name'
# The list of all files outputted by this target. Useful in cases such
# as Vala which generates .vapi and .h besides the compiled output.
self.outputs = [self.filename]
self.need_install = False
self.pch: T.Dict[str, T.List[str]] = {}
self.extra_args: T.Dict[str, T.List['FileOrString']] = {}
self.sources: T.List[File] = []
self.generated: T.List['GeneratedTypes'] = []
self.d_features = defaultdict(list)
self.pic = False
self.pie = False
# Track build_rpath entries so we can remove them at install time
self.rpath_dirs_to_remove: T.Set[bytes] = set()
self.process_sourcelist(sources)
# Objects can be:
# 1. Pre-existing objects provided by the user with the `objects:` kwarg
# 2. Compiled objects created by and extracted from another target
self.process_objectlist(objects)
self.process_kwargs(kwargs, environment)
self.check_unknown_kwargs(kwargs)
self.process_compilers()
if not any([self.sources, self.generated, self.objects, self.link_whole]):
raise InvalidArguments(f'Build target {name} has no sources.')
self.process_compilers_late()
self.validate_sources()
self.validate_install(environment)
self.check_module_linking()
def __repr__(self):
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.filename)
def __str__(self):
return f"{self.name}"
def validate_install(self, environment):
if self.for_machine is MachineChoice.BUILD and self.need_install:
if environment.is_cross_build():
raise InvalidArguments('Tried to install a target for the build machine in a cross build.')
else:
mlog.warning('Installing target build for the build machine. This will fail in a cross build.')
def check_unknown_kwargs(self, kwargs):
# Override this method in derived classes that have more
# keywords.
self.check_unknown_kwargs_int(kwargs, self.known_kwargs)
def check_unknown_kwargs_int(self, kwargs, known_kwargs):
unknowns = []
for k in kwargs:
if k not in known_kwargs:
unknowns.append(k)
if len(unknowns) > 0:
mlog.warning('Unknown keyword argument(s) in target {}: {}.'.format(self.name, ', '.join(unknowns)))
def process_objectlist(self, objects):
assert isinstance(objects, list)
for s in objects:
if isinstance(s, (str, File, ExtractedObjects)):
self.objects.append(s)
elif isinstance(s, (GeneratedList, CustomTarget)):
msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \
f'for target {self.name!r}.\nIt is meant only for ' + \
'pre-built object files that are shipped with the\nsource ' + \
'tree. Try adding it in the list of sources.'
raise InvalidArguments(msg)
else:
raise InvalidArguments(f'Bad object of type {type(s).__name__!r} in target {self.name!r}.')
def process_sourcelist(self, sources: T.List['SourceOutputs']) -> None:
"""Split sources into generated and static sources.
Sources can be:
1. Pre-existing source files in the source tree (static)
2. Pre-existing sources generated by configure_file in the build tree.
(static as they are only regenerated if meson itself is regenerated)
3. Sources files generated by another target or a Generator (generated)
"""
added_sources: T.Set[File] = set() # If the same source is defined multiple times, use it only once.
for s in sources:
if isinstance(s, File):
if s not in added_sources:
self.sources.append(s)
added_sources.add(s)
elif isinstance(s, (CustomTarget, CustomTargetIndex, GeneratedList)):
self.generated.append(s)
@staticmethod
def can_compile_remove_sources(compiler: 'Compiler', sources: T.List['FileOrString']) -> bool:
removed = False
for s in sources[:]:
if compiler.can_compile(s):
sources.remove(s)
removed = True
return removed
def process_compilers_late(self):
"""Processes additional compilers after kwargs have been evaluated.
This can add extra compilers that might be required by keyword
arguments, such as link_with or dependencies. It will also try to guess
which compiler to use if one hasn't been selected already.
"""
# Populate list of compilers
compilers = self.environment.coredata.compilers[self.for_machine]
# did user override clink_langs for this target?
link_langs = [self.link_language] if self.link_language else clink_langs
# If this library is linked against another library we need to consider
# the languages of those libraries as well.
if self.link_targets or self.link_whole_targets:
extra = set()
for t in itertools.chain(self.link_targets, self.link_whole_targets):
if isinstance(t, CustomTarget) or isinstance(t, CustomTargetIndex):
continue # We can't know anything about these.
for name, compiler in t.compilers.items():
if name in link_langs:
extra.add((name, compiler))
for name, compiler in sorted(extra, key=lambda p: sort_clink(p[0])):
self.compilers[name] = compiler
if not self.compilers:
# No source files or parent targets, target consists of only object
# files of unknown origin. Just add the first clink compiler
# that we have and hope that it can link these objects
for lang in link_langs:
if lang in compilers:
self.compilers[lang] = compilers[lang]
break
def process_compilers(self):
'''
Populate self.compilers, which is the list of compilers that this
target will use for compiling all its sources.
We also add compilers that were used by extracted objects to simplify
dynamic linker determination.
'''
if not self.sources and not self.generated and not self.objects:
return
# Populate list of compilers
compilers = self.environment.coredata.compilers[self.for_machine]
# Pre-existing sources
sources = list(self.sources)
# All generated sources
for gensrc in self.generated:
for s in gensrc.get_outputs():
# Generated objects can't be compiled, so don't use them for
# compiler detection. If our target only has generated objects,
# we will fall back to using the first c-like compiler we find,
# which is what we need.
if not is_object(s):
sources.append(s)
for d in self.external_deps:
for s in d.sources:
if isinstance(s, (str, File)):
sources.append(s)
# Sources that were used to create our extracted objects
for o in self.objects:
if not isinstance(o, ExtractedObjects):
continue
for s in o.srclist:
# Don't add Vala sources since that will pull in the Vala
# compiler even though we will never use it since we are
# dealing with compiled C code.
if not s.endswith(lang_suffixes['vala']):
sources.append(s)
if sources:
# For each source, try to add one compiler that can compile it.
#
# If it has a suffix that belongs to a known language, we must have
# a compiler for that language.
#
# Otherwise, it's ok if no compilers can compile it, because users
# are expected to be able to add arbitrary non-source files to the
# sources list
for s in sources:
for lang, compiler in compilers.items():
if compiler.can_compile(s):
if lang not in self.compilers:
self.compilers[lang] = compiler
break
else:
if is_known_suffix(s):
raise MesonException('No {} machine compiler for "{}"'.
format(self.for_machine.get_lower_case_name(), s))
# Re-sort according to clink_langs
self.compilers = OrderedDict(sorted(self.compilers.items(),
key=lambda t: sort_clink(t[0])))
# If all our sources are Vala, our target also needs the C compiler but
# it won't get added above.
if 'vala' in self.compilers and 'c' not in self.compilers:
self.compilers['c'] = compilers['c']
if 'cython' in self.compilers:
key = OptionKey('language', machine=self.for_machine, lang='cython')
if key in self.option_overrides_compiler:
value = self.option_overrides_compiler[key]
else:
value = self.environment.coredata.options[key].value
try:
self.compilers[value] = compilers[value]
except KeyError:
# TODO: it would be nice to not have to do this here, but we
# have two problems to work around:
# 1. If this is set via an override we have no way to know
# before now that we need a compiler for the non-default language
# 2. Because Cython itself initializes the `cython_language`
# option, we have no good place to insert that you need it
# before now, so we just have to do it here.
comp = detect_compiler_for(self.environment, value, self.for_machine)
# This is copied verbatim from the interpreter
if self.for_machine == MachineChoice.HOST or self.environment.is_cross_build():
logger_fun = mlog.log
else:
logger_fun = mlog.debug
logger_fun(comp.get_display_language(), 'compiler for the', self.for_machine.get_lower_case_name(), 'machine:',
mlog.bold(' '.join(comp.get_exelist())), comp.get_version_string())
if comp.linker is not None:
logger_fun(comp.get_display_language(), 'linker for the', self.for_machine.get_lower_case_name(), 'machine:',
mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
if comp is None:
raise MesonException(f'Cannot find required compiler {value}')
self.compilers[value] = comp
def validate_sources(self):
if not self.sources:
return
for lang in ('cs', 'java'):
if lang in self.compilers:
check_sources = list(self.sources)
compiler = self.compilers[lang]
if not self.can_compile_remove_sources(compiler, check_sources):
raise InvalidArguments(f'No {lang} sources found in target {self.name!r}')
if check_sources:
m = '{0} targets can only contain {0} files:\n'.format(lang.capitalize())
m += '\n'.join([repr(c) for c in check_sources])
raise InvalidArguments(m)
# CSharp and Java targets can't contain any other file types
assert len(self.compilers) == 1
return
def process_link_depends(self, sources, environment):
"""Process the link_depends keyword argument.
This is designed to handle strings, Files, and the output of Custom
Targets. Notably it doesn't handle generator() returned objects, since
adding them as a link depends would inherently cause them to be
generated twice, since the output needs to be passed to the ld_args and
link_depends.
"""
sources = listify(sources)
for s in sources:
if isinstance(s, File):
self.link_depends.append(s)
elif isinstance(s, str):
self.link_depends.append(
File.from_source_file(environment.source_dir, self.subdir, s))
elif hasattr(s, 'get_outputs'):
self.link_depends.append(s)
else:
raise InvalidArguments(
'Link_depends arguments must be strings, Files, '
'or a Custom Target, or lists thereof.')
def get_original_kwargs(self):
return self.kwargs
def copy_kwargs(self, kwargs):
self.kwargs = copy.copy(kwargs)
for k, v in self.kwargs.items():
if isinstance(v, list):
self.kwargs[k] = listify(v, flatten=True)
for t in ['dependencies', 'link_with', 'include_directories', 'sources']:
if t in self.kwargs:
self.kwargs[t] = listify(self.kwargs[t], flatten=True)
def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedTypes']]) -> ExtractedObjects:
sources_set = set(self.sources)
generated_set = set(self.generated)
obj_src: T.List['File'] = []
obj_gen: T.List['GeneratedTypes'] = []
for src in srclist:
if isinstance(src, (str, File)):
if isinstance(src, str):
src = File(False, self.subdir, src)
else:
FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject)
if src not in sources_set:
raise MesonException(f'Tried to extract unknown source {src}.')
obj_src.append(src)
elif isinstance(src, (CustomTarget, CustomTargetIndex, GeneratedList)):
FeatureNew.single_use('Generated sources for extract_objects', '0.61.0', self.subproject)
target = src.target if isinstance(src, CustomTargetIndex) else src
if src not in generated_set and target not in generated_set:
raise MesonException(f'Tried to extract unknown source {target.get_basename()}.')
obj_gen.append(src)
else:
raise MesonException(f'Object extraction arguments must be strings, Files or targets (got {type(src).__name__}).')
return ExtractedObjects(self, obj_src, obj_gen)
def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects:
return ExtractedObjects(self, self.sources, self.generated, self.objects,
recursive)
def get_all_link_deps(self) -> 'ImmutableListProtocol[T.Union[BuildTarget, CustomTarget, CustomTargetIndex]]':
return self.get_transitive_link_deps()
@lru_cache(maxsize=None)
def get_transitive_link_deps(self) -> 'ImmutableListProtocol[T.Union[BuildTarget, CustomTarget, CustomTargetIndex]]':
result: T.List[Target] = []
for i in self.link_targets:
result += i.get_all_link_deps()
return result
def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
return self.get_transitive_link_deps_mapping(prefix, environment)
@lru_cache(maxsize=None)
def get_transitive_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
result: T.Dict[str, str] = {}
for i in self.link_targets:
mapping = i.get_link_deps_mapping(prefix, environment)
#we are merging two dictionaries, while keeping the earlier one dominant
result_tmp = mapping.copy()
result_tmp.update(result)
result = result_tmp
return result
@lru_cache(maxsize=None)
def get_link_dep_subdirs(self) -> 'ImmutableSetProtocol[str]':
result: OrderedSet[str] = OrderedSet()
for i in self.link_targets:
if not isinstance(i, StaticLibrary):
result.add(i.get_subdir())
result.update(i.get_link_dep_subdirs())
return result
def get_default_install_dir(self, environment: environment.Environment) -> T.Tuple[str, str]:
return environment.get_libdir(), '{libdir}'
def get_custom_install_dir(self) -> T.List[T.Union[str, bool]]:
return self.install_dir
def get_custom_install_mode(self) -> T.Optional['FileMode']:
return self.install_mode
def process_kwargs(self, kwargs, environment):
self.process_kwargs_base(kwargs)
self.copy_kwargs(kwargs)
kwargs.get('modules', [])
self.need_install = kwargs.get('install', self.need_install)
llist = extract_as_list(kwargs, 'link_with')
for linktarget in llist:
if isinstance(linktarget, dependencies.ExternalLibrary):
raise MesonException(textwrap.dedent('''\
An external library was used in link_with keyword argument, which
is reserved for libraries built as part of this project. External
libraries must be passed using the dependencies keyword argument
instead, because they are conceptually "external dependencies",
just like those detected with the dependency() function.
'''))
self.link(linktarget)
lwhole = extract_as_list(kwargs, 'link_whole')
for linktarget in lwhole:
self.link_whole(linktarget)
c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
= (extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args'])
self.add_pch('c', c_pchlist)
self.add_pch('cpp', cpp_pchlist)
compiler_args = {'c': clist, 'cpp': cpplist, 'cuda': cudalist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
'fortran': fortranlist, 'rust': rustlist
}
for key, value in compiler_args.items():
self.add_compiler_args(key, value)
if not isinstance(self, Executable) or 'export_dynamic' in kwargs:
self.vala_header = kwargs.get('vala_header', self.name + '.h')
self.vala_vapi = kwargs.get('vala_vapi', self.name + '.vapi')
self.vala_gir = kwargs.get('vala_gir', None)
dlist = stringlistify(kwargs.get('d_args', []))
self.add_compiler_args('d', dlist)
dfeatures = defaultdict(list)
dfeature_unittest = kwargs.get('d_unittest', False)
if dfeature_unittest:
dfeatures['unittest'] = dfeature_unittest
dfeature_versions = kwargs.get('d_module_versions', [])
if dfeature_versions:
dfeatures['versions'] = dfeature_versions
dfeature_debug = kwargs.get('d_debug', [])
if dfeature_debug:
dfeatures['debug'] = dfeature_debug
if 'd_import_dirs' in kwargs:
dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs')
for d in dfeature_import_dirs:
if not isinstance(d, IncludeDirs):
raise InvalidArguments('Arguments to d_import_dirs must be include_directories.')
dfeatures['import_dirs'] = dfeature_import_dirs
if dfeatures:
self.d_features = dfeatures
self.link_args = extract_as_list(kwargs, 'link_args')
for i in self.link_args:
if not isinstance(i, str):
raise InvalidArguments('Link_args arguments must be strings.')
for l in self.link_args:
if '-Wl,-rpath' in l or l.startswith('-rpath'):
mlog.warning(textwrap.dedent('''\
Please do not define rpath with a linker argument, use install_rpath
or build_rpath properties instead.
This will become a hard error in a future Meson release.
'''))
self.process_link_depends(kwargs.get('link_depends', []), environment)
# Target-specific include dirs must be added BEFORE include dirs from
# internal deps (added inside self.add_deps()) to override them.
inclist = extract_as_list(kwargs, 'include_directories')
self.add_include_dirs(inclist)
# Add dependencies (which also have include_directories)
deplist = extract_as_list(kwargs, 'dependencies')
self.add_deps(deplist)
# If an item in this list is False, the output corresponding to
# the list index of that item will not be installed
self.install_dir = typeslistify(kwargs.get('install_dir', []),
(str, bool))
self.install_mode = kwargs.get('install_mode', None)
self.install_tag = stringlistify(kwargs.get('install_tag', [None]))
main_class = kwargs.get('main_class', '')
if not isinstance(main_class, str):
raise InvalidArguments('Main class must be a string')
self.main_class = main_class
if isinstance(self, Executable):
# This kwarg is deprecated. The value of "none" means that the kwarg
# was not specified and win_subsystem should be used instead.
self.gui_app = None
if 'gui_app' in kwargs:
if 'win_subsystem' in kwargs:
raise InvalidArguments('Can specify only gui_app or win_subsystem for a target, not both.')
self.gui_app = kwargs['gui_app']
if not isinstance(self.gui_app, bool):
raise InvalidArguments('Argument gui_app must be boolean.')
self.win_subsystem = self.validate_win_subsystem(kwargs.get('win_subsystem', 'console'))
elif 'gui_app' in kwargs:
raise InvalidArguments('Argument gui_app can only be used on executables.')
elif 'win_subsystem' in kwargs:
raise InvalidArguments('Argument win_subsystem can only be used on executables.')
extra_files = extract_as_list(kwargs, 'extra_files')
for i in extra_files:
assert isinstance(i, File)
trial = os.path.join(environment.get_source_dir(), i.subdir, i.fname)
if not os.path.isfile(trial):
raise InvalidArguments(f'Tried to add non-existing extra file {i}.')
self.extra_files = extra_files
self.install_rpath: str = kwargs.get('install_rpath', '')
if not isinstance(self.install_rpath, str):
raise InvalidArguments('Install_rpath is not a string.')
self.build_rpath = kwargs.get('build_rpath', '')
if not isinstance(self.build_rpath, str):
raise InvalidArguments('Build_rpath is not a string.')
resources = extract_as_list(kwargs, 'resources')
for r in resources:
if not isinstance(r, str):
raise InvalidArguments('Resource argument is not a string.')
trial = os.path.join(environment.get_source_dir(), self.subdir, r)
if not os.path.isfile(trial):
raise InvalidArguments(f'Tried to add non-existing resource {r}.')
self.resources = resources
if 'name_prefix' in kwargs:
name_prefix = kwargs['name_prefix']
if isinstance(name_prefix, list):
if name_prefix:
raise InvalidArguments('name_prefix array must be empty to signify default.')
else:
if not isinstance(name_prefix, str):
raise InvalidArguments('name_prefix must be a string.')
self.prefix = name_prefix
self.name_prefix_set = True
if 'name_suffix' in kwargs:
name_suffix = kwargs['name_suffix']
if isinstance(name_suffix, list):
if name_suffix:
raise InvalidArguments('name_suffix array must be empty to signify default.')
else:
if not isinstance(name_suffix, str):
raise InvalidArguments('name_suffix must be a string.')
if name_suffix == '':
raise InvalidArguments('name_suffix should not be an empty string. '
'If you want meson to use the default behaviour '
'for each platform pass `[]` (empty array)')
self.suffix = name_suffix
self.name_suffix_set = True
if isinstance(self, StaticLibrary):
# You can't disable PIC on OS X. The compiler ignores -fno-PIC.
# PIC is always on for Windows (all code is position-independent
# since library loading is done differently)
m = self.environment.machines[self.for_machine]
if m.is_darwin() or m.is_windows():
self.pic = True
else:
self.pic = self._extract_pic_pie(kwargs, 'pic', environment, 'b_staticpic')
if isinstance(self, Executable) or (isinstance(self, StaticLibrary) and not self.pic):
# Executables must be PIE on Android
if self.environment.machines[self.for_machine].is_android():
self.pie = True
else:
self.pie = self._extract_pic_pie(kwargs, 'pie', environment, 'b_pie')
self.implicit_include_directories = kwargs.get('implicit_include_directories', True)
if not isinstance(self.implicit_include_directories, bool):
raise InvalidArguments('Implicit_include_directories must be a boolean.')
self.gnu_symbol_visibility = kwargs.get('gnu_symbol_visibility', '')
if not isinstance(self.gnu_symbol_visibility, str):
raise InvalidArguments('GNU symbol visibility must be a string.')
if self.gnu_symbol_visibility != '':
permitted = ['default', 'internal', 'hidden', 'protected', 'inlineshidden']
if self.gnu_symbol_visibility not in permitted:
raise InvalidArguments('GNU symbol visibility arg {} not one of: {}'.format(self.symbol_visibility, ', '.join(permitted)))
def validate_win_subsystem(self, value: str) -> str:
value = value.lower()
if re.fullmatch(r'(boot_application|console|efi_application|efi_boot_service_driver|efi_rom|efi_runtime_driver|native|posix|windows)(,\d+(\.\d+)?)?', value) is None:
raise InvalidArguments(f'Invalid value for win_subsystem: {value}.')
return value
def _extract_pic_pie(self, kwargs, arg: str, environment, option: str):
# Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags
all_flags = self.extra_args['c'] + self.extra_args['cpp']
if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags:
mlog.warning(f"Use the '{arg}' kwarg instead of passing '-f{arg}' manually to {self.name!r}")
return True
k = OptionKey(option)
if arg in kwargs:
val = kwargs[arg]
elif k in environment.coredata.options:
val = environment.coredata.options[k].value
else:
val = False
if not isinstance(val, bool):
raise InvalidArguments(f'Argument {arg} to {self.name!r} must be boolean')
return val
def get_filename(self) -> str:
return self.filename
def get_outputs(self) -> T.List[str]:
return self.outputs
def get_extra_args(self, language):
return self.extra_args.get(language, [])
def get_dependencies(self, exclude=None):
transitive_deps = []
if exclude is None:
exclude = []
for t in itertools.chain(self.link_targets, self.link_whole_targets):
if t in transitive_deps or t in exclude:
continue
transitive_deps.append(t)
if isinstance(t, StaticLibrary):
transitive_deps += t.get_dependencies(transitive_deps + exclude)
return transitive_deps
def get_source_subdir(self):
return self.subdir
def get_sources(self):
return self.sources
def get_objects(self) -> T.List[T.Union[str, 'File', 'ExtractedObjects']]:
return self.objects
def get_generated_sources(self) -> T.List['GeneratedTypes']:
return self.generated
def should_install(self) -> bool:
return self.need_install
def has_pch(self) -> bool:
return bool(self.pch)
def get_pch(self, language: str) -> T.List[str]:
return self.pch.get(language, [])
def get_include_dirs(self) -> T.List['IncludeDirs']:
return self.include_dirs
def add_deps(self, deps):
deps = listify(deps)
for dep in deps:
if dep in self.added_deps:
continue
dep_d_features = dep.d_features
for feature in ('versions', 'import_dirs'):
if feature in dep_d_features:
self.d_features[feature].extend(dep_d_features[feature])
if isinstance(dep, dependencies.InternalDependency):
# Those parts that are internal.
self.process_sourcelist(dep.sources)
self.add_include_dirs(dep.include_directories, dep.get_include_type())
for l in dep.libraries:
self.link(l)
for l in dep.whole_libraries:
self.link_whole(l)
if dep.get_compile_args() or dep.get_link_args():
# Those parts that are external.
extpart = dependencies.InternalDependency('undefined',
[],
dep.get_compile_args(),
dep.get_link_args(),
[], [], [], [], {}, [], [])
self.external_deps.append(extpart)
# Deps of deps.
self.add_deps(dep.ext_deps)
elif isinstance(dep, dependencies.Dependency):
if dep not in self.external_deps:
self.external_deps.append(dep)
self.process_sourcelist(dep.get_sources())
self.add_deps(dep.ext_deps)
elif isinstance(dep, BuildTarget):
raise InvalidArguments('''Tried to use a build target as a dependency.
You probably should put it in link_with instead.''')
else:
# This is a bit of a hack. We do not want Build to know anything
# about the interpreter so we can't import it and use isinstance.
# This should be reliable enough.
if hasattr(dep, 'project_args_frozen') or hasattr(dep, 'global_args_frozen'):
raise InvalidArguments('Tried to use subproject object as a dependency.\n'
'You probably wanted to use a dependency declared in it instead.\n'
'Access it by calling get_variable() on the subproject object.')
raise InvalidArguments(f'Argument is of an unacceptable type {type(dep).__name__!r}.\nMust be '
'either an external dependency (returned by find_library() or '
'dependency()) or an internal dependency (returned by '
'declare_dependency()).')
self.added_deps.add(dep)
def get_external_deps(self) -> T.List[dependencies.Dependency]:
return self.external_deps
def is_internal(self) -> bool:
return isinstance(self, StaticLibrary) and not self.need_install
def link(self, target):
for t in listify(target):
if isinstance(self, StaticLibrary) and self.need_install:
if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.should_install():
mlog.warning(f'Try to link an installed static library target {self.name} with a'
'custom target that is not installed, this might cause problems'
'when you try to use this static library')
elif t.is_internal():
# When we're a static library and we link_with to an
# internal/convenience library, promote to link_whole.
return self.link_whole(t)
if not isinstance(t, (Target, CustomTargetIndex)):
raise InvalidArguments(f'{t!r} is not a target.')
if not t.is_linkable_target():
raise InvalidArguments(f"Link target '{t!s}' is not linkable.")
if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. "
msg += "Use the 'pic' option to static_library to build with PIC."
raise InvalidArguments(msg)
if self.for_machine is not t.for_machine:
msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}'
if self.environment.is_cross_build():
raise InvalidArguments(msg + ' This is not possible in a cross build.')
else:
mlog.warning(msg + ' This will fail in cross build.')
self.link_targets.append(t)
def link_whole(self, target):
for t in listify(target):
if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.is_linkable_target():
raise InvalidArguments(f'Custom target {t!r} is not linkable.')
if not t.get_filename().endswith('.a'):
raise InvalidArguments('Can only link_whole custom targets that are .a archives.')
if isinstance(self, StaticLibrary):
# FIXME: We could extract the .a archive to get object files
raise InvalidArguments('Cannot link_whole a custom target into a static library')
elif not isinstance(t, StaticLibrary):
raise InvalidArguments(f'{t!r} is not a static library.')
elif isinstance(self, SharedLibrary) and not t.pic:
msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. "
msg += "Use the 'pic' option to static_library to build with PIC."
raise InvalidArguments(msg)
if self.for_machine is not t.for_machine:
msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}'
if self.environment.is_cross_build():
raise InvalidArguments(msg + ' This is not possible in a cross build.')
else:
mlog.warning(msg + ' This will fail in cross build.')
if isinstance(self, StaticLibrary):
# When we're a static library and we link_whole: to another static
# library, we need to add that target's objects to ourselves.
self.objects += t.extract_all_objects_recurse()
self.link_whole_targets.append(t)
def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
objs = [self.extract_all_objects()]
for t in self.link_targets:
if t.is_internal():
objs += t.extract_all_objects_recurse()
return objs
def add_pch(self, language: str, pchlist: T.List[str]) -> None:
if not pchlist:
return
elif len(pchlist) == 1:
if not environment.is_header(pchlist[0]):
raise InvalidArguments(f'PCH argument {pchlist[0]} is not a header.')
elif len(pchlist) == 2:
if environment.is_header(pchlist[0]):
if not environment.is_source(pchlist[1]):
raise InvalidArguments('PCH definition must contain one header and at most one source.')
elif environment.is_source(pchlist[0]):
if not environment.is_header(pchlist[1]):
raise InvalidArguments('PCH definition must contain one header and at most one source.')
pchlist = [pchlist[1], pchlist[0]]
else:
raise InvalidArguments(f'PCH argument {pchlist[0]} is of unknown type.')
if os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1]):
raise InvalidArguments('PCH files must be stored in the same folder.')
FeatureDeprecated.single_use('PCH source files', '0.50.0', self.subproject,
'Only a single header file should be used.')
elif len(pchlist) > 2:
raise InvalidArguments('PCH definition may have a maximum of 2 files.')
for f in pchlist:
if not isinstance(f, str):
raise MesonException('PCH arguments must be strings.')
if not os.path.isfile(os.path.join(self.environment.source_dir, self.subdir, f)):
raise MesonException(f'File {f} does not exist.')
self.pch[language] = pchlist
def add_include_dirs(self, args: T.Sequence['IncludeDirs'], set_is_system: T.Optional[str] = None) -> None:
ids: T.List['IncludeDirs'] = []
for a in args:
if not isinstance(a, IncludeDirs):
raise InvalidArguments('Include directory to be added is not an include directory object.')
ids.append(a)
if set_is_system is None:
set_is_system = 'preserve'
if set_is_system != 'preserve':
is_system = set_is_system == 'system'
ids = [IncludeDirs(x.get_curdir(), x.get_incdirs(), is_system, x.get_extra_build_dirs()) for x in ids]
self.include_dirs += ids
def add_compiler_args(self, language: str, args: T.List['FileOrString']) -> None:
args = listify(args)
for a in args:
if not isinstance(a, (str, File)):
raise InvalidArguments('A non-string passed to compiler args.')
if language in self.extra_args:
self.extra_args[language] += args
else:
self.extra_args[language] = args
def get_aliases(self) -> T.Dict[str, str]:
return {}
def get_langs_used_by_deps(self) -> T.List[str]:
'''
Sometimes you want to link to a C++ library that exports C API, which
means the linker must link in the C++ stdlib, and we must use a C++
compiler for linking. The same is also applicable for objc/objc++, etc,
so we can keep using clink_langs for the priority order.
See: https://github.com/mesonbuild/meson/issues/1653
'''
langs = [] # type: T.List[str]
# Check if any of the external libraries were written in this language
for dep in self.external_deps:
if dep.language is None:
continue
if dep.language not in langs:
langs.append(dep.language)
# Check if any of the internal libraries this target links to were
# written in this language
for link_target in itertools.chain(self.link_targets, self.link_whole_targets):
if isinstance(link_target, (CustomTarget, CustomTargetIndex)):
continue
for language in link_target.compilers:
if language not in langs:
langs.append(language)
return langs
def get_prelinker(self):
all_compilers = self.environment.coredata.compilers[self.for_machine]
if self.link_language:
comp = all_compilers[self.link_language]
return comp
for l in clink_langs:
if l in self.compilers:
try:
prelinker = all_compilers[l]
except KeyError:
raise MesonException(
f'Could not get a prelinker linker for build target {self.name!r}. '
f'Requires a compiler for language "{l}", but that is not '
'a project language.')
return prelinker
raise MesonException(f'Could not determine prelinker for {self.name!r}.')
def get_clink_dynamic_linker_and_stdlibs(self) -> T.Tuple['Compiler', T.List[str]]:
'''
We use the order of languages in `clink_langs` to determine which
linker to use in case the target has sources compiled with multiple
compilers. All languages other than those in this list have their own
linker.
Note that Vala outputs C code, so Vala sources can use any linker
that can link compiled C. We don't actually need to add an exception
for Vala here because of that.
'''
# Populate list of all compilers, not just those being used to compile
# sources in this target
all_compilers = self.environment.coredata.compilers[self.for_machine]
# If the user set the link_language, just return that.
if self.link_language:
comp = all_compilers[self.link_language]
return comp, comp.language_stdlib_only_link_flags(self.environment)
# Languages used by dependencies
dep_langs = self.get_langs_used_by_deps()
# Pick a compiler based on the language priority-order
for l in clink_langs:
if l in self.compilers or l in dep_langs:
try:
linker = all_compilers[l]
except KeyError:
raise MesonException(
f'Could not get a dynamic linker for build target {self.name!r}. '
f'Requires a linker for language "{l}", but that is not '
'a project language.')
stdlib_args: T.List[str] = []
added_languages: T.Set[str] = set()
for dl in itertools.chain(self.compilers, dep_langs):
if dl != linker.language:
stdlib_args += all_compilers[dl].language_stdlib_only_link_flags(self.environment)
added_languages.add(dl)
# Type of var 'linker' is Compiler.
# Pretty hard to fix because the return value is passed everywhere
return linker, stdlib_args
raise AssertionError(f'Could not get a dynamic linker for build target {self.name!r}')
def uses_rust(self) -> bool:
"""Is this target a rust target."""
if self.sources:
first_file = self.sources[0]
if first_file.fname.endswith('.rs'):
return True
elif self.generated:
if self.generated[0].get_outputs()[0].endswith('.rs'):
return True
return False
def get_using_msvc(self) -> bool:
'''
Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary,
and SharedLibrary for deciding when to use MSVC-specific file naming
and debug filenames.
If at least some code is built with MSVC and the final library is
linked with MSVC, we can be sure that some debug info will be
generated. We only check the dynamic linker here because the static
linker is guaranteed to be of the same type.
Interesting cases:
1. The Vala compiler outputs C code to be compiled by whatever
C compiler we're using, so all objects will still be created by the
MSVC compiler.
2. If the target contains only objects, process_compilers guesses and
picks the first compiler that smells right.
'''
# Rustc can use msvc style linkers
if self.uses_rust():
compiler = self.environment.coredata.compilers[self.for_machine]['rust']
else:
compiler, _ = self.get_clink_dynamic_linker_and_stdlibs()
# Mixing many languages with MSVC is not supported yet so ignore stdlibs.
return compiler and compiler.get_linker_id() in {'link', 'lld-link', 'xilink', 'optlink'}
def check_module_linking(self):
'''
Warn if shared modules are linked with target: (link_with) #2865
'''
for link_target in self.link_targets:
if isinstance(link_target, SharedModule) and not link_target.force_soname:
if self.environment.machines[self.for_machine].is_darwin():
raise MesonException(
f'target {self.name} links against shared module {link_target.name}. This is not permitted on OSX')
elif self.environment.machines[self.for_machine].is_android() and isinstance(self, SharedModule):
# Android requires shared modules that use symbols from other shared modules to
# be linked before they can be dlopen()ed in the correct order. Not doing so
# leads to a missing symbol error: https://github.com/android/ndk/issues/201
link_target.force_soname = True
else:
mlog.deprecation(f'target {self.name} links against shared module {link_target.name}, which is incorrect.'
'\n '
f'This will be an error in the future, so please use shared_library() for {link_target.name} instead.'
'\n '
f'If shared_module() was used for {link_target.name} because it has references to undefined symbols,'
'\n '
'use shared_libary() with `override_options: [\'b_lundef=false\']` instead.')
link_target.force_soname = True
class Generator(HoldableObject):
def __init__(self, exe: T.Union['Executable', programs.ExternalProgram],
arguments: T.List[str],
output: T.List[str],
# how2dataclass
*,
depfile: T.Optional[str] = None,
capture: bool = False,
depends: T.Optional[T.List[T.Union[BuildTarget, 'CustomTarget']]] = None,
name: str = 'Generator'):
self.exe = exe
self.depfile = depfile
self.capture = capture
self.depends: T.List[T.Union[BuildTarget, 'CustomTarget']] = depends or []
self.arglist = arguments
self.outputs = output
self.name = name
def __repr__(self) -> str:
repr_str = "<{0}: {1}>"
return repr_str.format(self.__class__.__name__, self.exe)
def get_exe(self) -> T.Union['Executable', programs.ExternalProgram]:
return self.exe
def get_base_outnames(self, inname: str) -> T.List[str]:
plainname = os.path.basename(inname)
basename = os.path.splitext(plainname)[0]
bases = [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.outputs]
return bases
def get_dep_outname(self, inname: str) -> T.List[str]:
if self.depfile is None:
raise InvalidArguments('Tried to get dep name for rule that does not have dependency file defined.')
plainname = os.path.basename(inname)
basename = os.path.splitext(plainname)[0]
return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname)
def get_arglist(self, inname: str) -> T.List[str]:
plainname = os.path.basename(inname)
basename = os.path.splitext(plainname)[0]
return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.arglist]
@staticmethod
def is_parent_path(parent: str, trial: str) -> bool:
relpath = pathlib.PurePath(trial).relative_to(parent)
return relpath.parts[0] != '..' # For subdirs we can only go "down".
def process_files(self, files: T.Iterable[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList']],
state: T.Union['Interpreter', 'ModuleState'],
preserve_path_from: T.Optional[str] = None,
extra_args: T.Optional[T.List[str]] = None) -> 'GeneratedList':
output = GeneratedList(self, state.subdir, preserve_path_from, extra_args=extra_args if extra_args is not None else [])
for e in files:
if isinstance(e, CustomTarget):
output.depends.add(e)
if isinstance(e, CustomTargetIndex):
output.depends.add(e.target)
if isinstance(e, (CustomTarget, CustomTargetIndex, GeneratedList)):
self.depends.append(e) # BUG: this should go in the GeneratedList object, not this object.
fs = [File.from_built_file(state.subdir, f) for f in e.get_outputs()]
elif isinstance(e, str):
fs = [File.from_source_file(state.environment.source_dir, state.subdir, e)]
else:
fs = [e]
for f in fs:
if preserve_path_from:
abs_f = f.absolute_path(state.environment.source_dir, state.environment.build_dir)
if not self.is_parent_path(preserve_path_from, abs_f):
raise InvalidArguments('generator.process: When using preserve_path_from, all input files must be in a subdirectory of the given dir.')
output.add_file(f, state)
return output
@dataclass(eq=False)
class GeneratedList(HoldableObject):
"""The output of generator.process."""
generator: Generator
subdir: str
preserve_path_from: T.Optional[str]
extra_args: T.List[str]
def __post_init__(self) -> None:
self.name = self.generator.exe
self.depends: T.Set['CustomTarget'] = set() # Things this target depends on (because e.g. a custom target was used as input)
self.infilelist: T.List['File'] = []
self.outfilelist: T.List[str] = []
self.outmap: T.Dict[File, T.List[str]] = {}
self.extra_depends = [] # XXX: Doesn't seem to be used?
self.depend_files: T.List[File] = []
if self.extra_args is None:
self.extra_args: T.List[str] = []
if isinstance(self.generator.exe, programs.ExternalProgram):
if not self.generator.exe.found():
raise InvalidArguments('Tried to use not-found external program as generator')
path = self.generator.exe.get_path()
if os.path.isabs(path):
# Can only add a dependency on an external program which we
# know the absolute path of
self.depend_files.append(File.from_absolute_file(path))
def add_preserved_path_segment(self, infile: File, outfiles: T.List[str], state: T.Union['Interpreter', 'ModuleState']) -> T.List[str]:
result: T.List[str] = []
in_abs = infile.absolute_path(state.environment.source_dir, state.environment.build_dir)
assert os.path.isabs(self.preserve_path_from)
rel = os.path.relpath(in_abs, self.preserve_path_from)
path_segment = os.path.dirname(rel)
for of in outfiles:
result.append(os.path.join(path_segment, of))
return result
def add_file(self, newfile: File, state: T.Union['Interpreter', 'ModuleState']) -> None:
self.infilelist.append(newfile)
outfiles = self.generator.get_base_outnames(newfile.fname)
if self.preserve_path_from:
outfiles = self.add_preserved_path_segment(newfile, outfiles, state)
self.outfilelist += outfiles
self.outmap[newfile] = outfiles
def get_inputs(self) -> T.List['File']:
return self.infilelist
def get_outputs(self) -> T.List[str]:
return self.outfilelist
def get_outputs_for(self, filename: 'File') -> T.List[str]:
return self.outmap[filename]
def get_generator(self) -> 'Generator':
return self.generator
def get_extra_args(self) -> T.List[str]:
return self.extra_args
def get_subdir(self) -> str:
return self.subdir
class Executable(BuildTarget):
known_kwargs = known_exe_kwargs
def __init__(self, name: str, subdir: str, subproject: str, for_machine: MachineChoice,
sources: T.List[File], objects, environment: environment.Environment, kwargs):
self.typename = 'executable'
key = OptionKey('b_pie')
if 'pie' not in kwargs and key in environment.coredata.options:
kwargs['pie'] = environment.coredata.options[key].value
super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
# Unless overridden, executables have no suffix or prefix. Except on
# Windows and with C#/Mono executables where the suffix is 'exe'
if not hasattr(self, 'prefix'):
self.prefix = ''
if not hasattr(self, 'suffix'):
machine = environment.machines[for_machine]
# Executable for Windows or C#/Mono
if machine.is_windows() or machine.is_cygwin() or 'cs' in self.compilers:
self.suffix = 'exe'
elif machine.system.startswith('wasm') or machine.system == 'emscripten':
self.suffix = 'js'
elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('armclang') or
'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('armclang')):
self.suffix = 'axf'
elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or
'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')):
self.suffix = 'abs'
elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('xc16')):
self.suffix = 'elf'
elif ('c' in self.compilers and self.compilers['c'].get_id() in ('ti', 'c2000') or
'cpp' in self.compilers and self.compilers['cpp'].get_id() in ('ti', 'c2000')):
self.suffix = 'out'
else:
self.suffix = environment.machines[for_machine].get_exe_suffix()
self.filename = self.name
if self.suffix:
self.filename += '.' + self.suffix
self.outputs = [self.filename]
# The import library this target will generate
self.import_filename = None
# The import library that Visual Studio would generate (and accept)
self.vs_import_filename = None
# The import library that GCC would generate (and prefer)
self.gcc_import_filename = None
# The debugging information file this target will generate
self.debug_filename = None
# Check for export_dynamic
self.export_dynamic = False
if kwargs.get('export_dynamic'):
if not isinstance(kwargs['export_dynamic'], bool):
raise InvalidArguments('"export_dynamic" keyword argument must be a boolean')
self.export_dynamic = True
if kwargs.get('implib'):
self.export_dynamic = True
if self.export_dynamic and kwargs.get('implib') is False:
raise InvalidArguments('"implib" keyword argument must not be false for if "export_dynamic" is true')
m = environment.machines[for_machine]
# If using export_dynamic, set the import library name
if self.export_dynamic:
implib_basename = self.name + '.exe'
if not isinstance(kwargs.get('implib', False), bool):
implib_basename = kwargs['implib']
if m.is_windows() or m.is_cygwin():
self.vs_import_filename = f'{implib_basename}.lib'
self.gcc_import_filename = f'lib{implib_basename}.a'
if self.get_using_msvc():
self.import_filename = self.vs_import_filename
else:
self.import_filename = self.gcc_import_filename
if m.is_windows() and ('cs' in self.compilers or
self.uses_rust() or
self.get_using_msvc()):
self.debug_filename = self.name + '.pdb'
# Only linkwithable if using export_dynamic
self.is_linkwithable = self.export_dynamic
# Remember that this exe was returned by `find_program()` through an override
self.was_returned_by_find_program = False
def get_default_install_dir(self, environment: environment.Environment) -> T.Tuple[str, str]:
return environment.get_bindir(), '{bindir}'
def description(self):
'''Human friendly description of the executable'''
return self.name
def type_suffix(self):
return "@exe"
def get_import_filename(self) -> T.Optional[str]:
"""
The name of the import library that will be outputted by the compiler
Returns None if there is no import library required for this platform
"""
return self.import_filename
def get_import_filenameslist(self):
if self.import_filename:
return [self.vs_import_filename, self.gcc_import_filename]
return []
def get_debug_filename(self) -> T.Optional[str]:
"""
The name of debuginfo file that will be created by the compiler
Returns None if the build won't create any debuginfo file
"""
return self.debug_filename
def is_linkable_target(self):
return self.is_linkwithable
def get_command(self) -> 'ImmutableListProtocol[str]':
"""Provides compatibility with ExternalProgram.
Since you can override ExternalProgram instances with Executables.
"""
return self.outputs
class StaticLibrary(BuildTarget):
known_kwargs = known_stlib_kwargs
def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
self.typename = 'static library'
super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
if 'cs' in self.compilers:
raise InvalidArguments('Static libraries not supported for C#.')
if 'rust' in self.compilers:
# If no crate type is specified, or it's the generic lib type, use rlib
if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib':
mlog.debug('Defaulting Rust static library target crate type to rlib')
self.rust_crate_type = 'rlib'
# Don't let configuration proceed with a non-static crate type
elif self.rust_crate_type not in ['rlib', 'staticlib']:
raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for static libraries; must be "rlib" or "staticlib"')
# By default a static library is named libfoo.a even on Windows because
# MSVC does not have a consistent convention for what static libraries
# are called. The MSVC CRT uses libfoo.lib syntax but nothing else uses
# it and GCC only looks for static libraries called foo.lib and
# libfoo.a. However, we cannot use foo.lib because that's the same as
# the import library. Using libfoo.a is ok because people using MSVC
# always pass the library filename while linking anyway.
if not hasattr(self, 'prefix'):
self.prefix = 'lib'
if not hasattr(self, 'suffix'):
if 'rust' in self.compilers:
if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'rlib':
# default Rust static library suffix
self.suffix = 'rlib'
elif self.rust_crate_type == 'staticlib':
self.suffix = 'a'
else:
self.suffix = 'a'
self.filename = self.prefix + self.name + '.' + self.suffix
self.outputs = [self.filename]
self.prelink = kwargs.get('prelink', False)
if not isinstance(self.prelink, bool):
raise InvalidArguments('Prelink keyword argument must be a boolean.')
def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
return {}
def get_default_install_dir(self, environment) -> T.Tuple[str, str]:
return environment.get_static_lib_dir(), '{libdir_static}'
def type_suffix(self):
return "@sta"
def process_kwargs(self, kwargs, environment):
super().process_kwargs(kwargs, environment)
if 'rust_crate_type' in kwargs:
rust_crate_type = kwargs['rust_crate_type']
if isinstance(rust_crate_type, str):
self.rust_crate_type = rust_crate_type
else:
raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.')
def is_linkable_target(self):
return True
class SharedLibrary(BuildTarget):
known_kwargs = known_shlib_kwargs
def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
self.typename = 'shared library'
self.soversion = None
self.ltversion = None
# Max length 2, first element is compatibility_version, second is current_version
self.darwin_versions = []
self.vs_module_defs = None
# The import library this target will generate
self.import_filename = None
# The import library that Visual Studio would generate (and accept)
self.vs_import_filename = None
# The import library that GCC would generate (and prefer)
self.gcc_import_filename = None
# The debugging information file this target will generate
self.debug_filename = None
# Use by the pkgconfig module
self.shared_library_only = False
super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
if 'rust' in self.compilers:
# If no crate type is specified, or it's the generic lib type, use dylib
if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib':
mlog.debug('Defaulting Rust dynamic library target crate type to "dylib"')
self.rust_crate_type = 'dylib'
# Don't let configuration proceed with a non-dynamic crate type
elif self.rust_crate_type not in ['dylib', 'cdylib']:
raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for dynamic libraries; must be "dylib" or "cdylib"')
if not hasattr(self, 'prefix'):
self.prefix = None
if not hasattr(self, 'suffix'):
self.suffix = None
self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
self.determine_filenames(environment)
def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
result: T.Dict[str, str] = {}
mappings = self.get_transitive_link_deps_mapping(prefix, environment)
old = get_target_macos_dylib_install_name(self)
if old not in mappings:
fname = self.get_filename()
outdirs, _, _ = self.get_install_dir(self.environment)
new = os.path.join(prefix, outdirs[0], fname)
result.update({old: new})
mappings.update(result)
return mappings
def get_default_install_dir(self, environment) -> T.Tuple[str, str]:
return environment.get_shared_lib_dir(), '{libdir_shared}'
def determine_filenames(self, env):
"""
See https://github.com/mesonbuild/meson/pull/417 for details.
First we determine the filename template (self.filename_tpl), then we
set the output filename (self.filename).
The template is needed while creating aliases (self.get_aliases),
which are needed while generating .so shared libraries for Linux.
Besides this, there's also the import library name, which is only used
on Windows since on that platform the linker uses a separate library
called the "import library" during linking instead of the shared
library (DLL). The toolchain will output an import library in one of
two formats: GCC or Visual Studio.
When we're building with Visual Studio, the import library that will be
generated by the toolchain is self.vs_import_filename, and with
MinGW/GCC, it's self.gcc_import_filename. self.import_filename will
always contain the import library name this target will generate.
"""
prefix = ''
suffix = ''
create_debug_file = False
self.filename_tpl = self.basic_filename_tpl
# NOTE: manual prefix/suffix override is currently only tested for C/C++
# C# and Mono
if 'cs' in self.compilers:
prefix = ''
suffix = 'dll'
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
create_debug_file = True
# C, C++, Swift, Vala
# Only Windows uses a separate import library for linking
# For all other targets/platforms import_filename stays None
elif env.machines[self.for_machine].is_windows():
suffix = 'dll'
self.vs_import_filename = '{}{}.lib'.format(self.prefix if self.prefix is not None else '', self.name)
self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name)
if self.uses_rust():
# Shared library is of the form foo.dll
prefix = ''
# Import library is called foo.dll.lib
self.import_filename = f'{self.name}.dll.lib'
create_debug_file = True
elif self.get_using_msvc():
# Shared library is of the form foo.dll
prefix = ''
# Import library is called foo.lib
self.import_filename = self.vs_import_filename
create_debug_file = True
# Assume GCC-compatible naming
else:
# Shared library is of the form libfoo.dll
prefix = 'lib'
# Import library is called libfoo.dll.a
self.import_filename = self.gcc_import_filename
# Shared library has the soversion if it is defined
if self.soversion:
self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
else:
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
elif env.machines[self.for_machine].is_cygwin():
suffix = 'dll'
self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name)
# Shared library is of the form cygfoo.dll
# (ld --dll-search-prefix=cyg is the default)
prefix = 'cyg'
# Import library is called libfoo.dll.a
self.import_filename = self.gcc_import_filename
if self.soversion:
self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
else:
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
elif env.machines[self.for_machine].is_darwin():
prefix = 'lib'
suffix = 'dylib'
# On macOS, the filename can only contain the major version
if self.soversion:
# libfoo.X.dylib
self.filename_tpl = '{0.prefix}{0.name}.{0.soversion}.{0.suffix}'
else:
# libfoo.dylib
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
elif env.machines[self.for_machine].is_android():
prefix = 'lib'
suffix = 'so'
# Android doesn't support shared_library versioning
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
else:
prefix = 'lib'
suffix = 'so'
if self.ltversion:
# libfoo.so.X[.Y[.Z]] (.Y and .Z are optional)
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.ltversion}'
elif self.soversion:
# libfoo.so.X
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.soversion}'
else:
# No versioning, libfoo.so
self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
if self.prefix is None:
self.prefix = prefix
if self.suffix is None:
self.suffix = suffix
self.filename = self.filename_tpl.format(self)
self.outputs = [self.filename]
if create_debug_file:
self.debug_filename = os.path.splitext(self.filename)[0] + '.pdb'
@staticmethod
def _validate_darwin_versions(darwin_versions):
try:
if isinstance(darwin_versions, int):
darwin_versions = str(darwin_versions)
if isinstance(darwin_versions, str):
darwin_versions = 2 * [darwin_versions]
if not isinstance(darwin_versions, list):
raise InvalidArguments('Shared library darwin_versions: must be a string, integer,'
f'or a list, not {darwin_versions!r}')
if len(darwin_versions) > 2:
raise InvalidArguments('Shared library darwin_versions: list must contain 2 or fewer elements')
if len(darwin_versions) == 1:
darwin_versions = 2 * darwin_versions
for i, v in enumerate(darwin_versions[:]):
if isinstance(v, int):
v = str(v)
if not isinstance(v, str):
raise InvalidArguments('Shared library darwin_versions: list elements '
f'must be strings or integers, not {v!r}')
if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', v):
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z where '
'X, Y, Z are numbers, and Y and Z are optional')
parts = v.split('.')
if len(parts) in (1, 2, 3) and int(parts[0]) > 65535:
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
'where X is [0, 65535] and Y, Z are optional')
if len(parts) in (2, 3) and int(parts[1]) > 255:
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
'where Y is [0, 255] and Y, Z are optional')
if len(parts) == 3 and int(parts[2]) > 255:
raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
'where Z is [0, 255] and Y, Z are optional')
darwin_versions[i] = v
except ValueError:
raise InvalidArguments('Shared library darwin_versions: value is invalid')
return darwin_versions
def process_kwargs(self, kwargs, environment):
super().process_kwargs(kwargs, environment)
if not self.environment.machines[self.for_machine].is_android():
supports_versioning = True
else:
supports_versioning = False
if supports_versioning:
# Shared library version
if 'version' in kwargs:
self.ltversion = kwargs['version']
if not isinstance(self.ltversion, str):
raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__)
if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion):
raise InvalidArguments(f'Invalid Shared library version "{self.ltversion}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.')
# Try to extract/deduce the soversion
if 'soversion' in kwargs:
self.soversion = kwargs['soversion']
if isinstance(self.soversion, int):
self.soversion = str(self.soversion)
if not isinstance(self.soversion, str):
raise InvalidArguments('Shared library soversion is not a string or integer.')
elif self.ltversion:
# library version is defined, get the soversion from that
# We replicate what Autotools does here and take the first
# number of the version by default.
self.soversion = self.ltversion.split('.')[0]
# macOS, iOS and tvOS dylib compatibility_version and current_version
if 'darwin_versions' in kwargs:
self.darwin_versions = self._validate_darwin_versions(kwargs['darwin_versions'])
elif self.soversion:
# If unspecified, pick the soversion
self.darwin_versions = 2 * [self.soversion]
# Visual Studio module-definitions file
if 'vs_module_defs' in kwargs:
path = kwargs['vs_module_defs']
if isinstance(path, str):
if os.path.isabs(path):
self.vs_module_defs = File.from_absolute_file(path)
else:
self.vs_module_defs = File.from_source_file(environment.source_dir, self.subdir, path)
elif isinstance(path, File):
# When passing a generated file.
self.vs_module_defs = path
elif hasattr(path, 'get_filename'):
# When passing output of a Custom Target
self.vs_module_defs = File.from_built_file(path.subdir, path.get_filename())
else:
raise InvalidArguments(
'Shared library vs_module_defs must be either a string, '
'a file object or a Custom Target')
self.process_link_depends(path, environment)
if 'rust_crate_type' in kwargs:
rust_crate_type = kwargs['rust_crate_type']
if isinstance(rust_crate_type, str):
self.rust_crate_type = rust_crate_type
else:
raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.')
def get_import_filename(self) -> T.Optional[str]:
"""
The name of the import library that will be outputted by the compiler
Returns None if there is no import library required for this platform
"""
return self.import_filename
def get_debug_filename(self) -> T.Optional[str]:
"""
The name of debuginfo file that will be created by the compiler
Returns None if the build won't create any debuginfo file
"""
return self.debug_filename
def get_import_filenameslist(self):
if self.import_filename:
return [self.vs_import_filename, self.gcc_import_filename]
return []
def get_all_link_deps(self):
return [self] + self.get_transitive_link_deps()
def get_aliases(self) -> T.Dict[str, str]:
"""
If the versioned library name is libfoo.so.0.100.0, aliases are:
* libfoo.so.0 (soversion) -> libfoo.so.0.100.0
* libfoo.so (unversioned; for linking) -> libfoo.so.0
Same for dylib:
* libfoo.dylib (unversioned; for linking) -> libfoo.0.dylib
"""
aliases: T.Dict[str, str] = {}
# Aliases are only useful with .so and .dylib libraries. Also if
# there's no self.soversion (no versioning), we don't need aliases.
if self.suffix not in ('so', 'dylib') or not self.soversion:
return aliases
# With .so libraries, the minor and micro versions are also in the
# filename. If ltversion != soversion we create an soversion alias:
# libfoo.so.0 -> libfoo.so.0.100.0
# Where libfoo.so.0.100.0 is the actual library
if self.suffix == 'so' and self.ltversion and self.ltversion != self.soversion:
alias_tpl = self.filename_tpl.replace('ltversion', 'soversion')
ltversion_filename = alias_tpl.format(self)
aliases[ltversion_filename] = self.filename
# libfoo.so.0/libfoo.0.dylib is the actual library
else:
ltversion_filename = self.filename
# Unversioned alias:
# libfoo.so -> libfoo.so.0
# libfoo.dylib -> libfoo.0.dylib
aliases[self.basic_filename_tpl.format(self)] = ltversion_filename
return aliases
def type_suffix(self):
return "@sha"
def is_linkable_target(self):
return True
# A shared library that is meant to be used with dlopen rather than linking
# into something else.
class SharedModule(SharedLibrary):
known_kwargs = known_shmod_kwargs
def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
if 'version' in kwargs:
raise MesonException('Shared modules must not specify the version kwarg.')
if 'soversion' in kwargs:
raise MesonException('Shared modules must not specify the soversion kwarg.')
super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
self.typename = 'shared module'
# We need to set the soname in cases where build files link the module
# to build targets, see: https://github.com/mesonbuild/meson/issues/9492
self.force_soname = False
def get_default_install_dir(self, environment) -> T.Tuple[str, str]:
return environment.get_shared_module_dir(), '{moduledir_shared}'
class BothLibraries(SecondLevelHolder):
def __init__(self, shared: SharedLibrary, static: StaticLibrary) -> None:
self._preferred_library = 'shared'
self.shared = shared
self.static = static
self.subproject = self.shared.subproject
def __repr__(self) -> str:
return f'<BothLibraries: static={repr(self.static)}; shared={repr(self.shared)}>'
def get_default_object(self) -> BuildTarget:
if self._preferred_library == 'shared':
return self.shared
elif self._preferred_library == 'static':
return self.static
raise MesonBugException(f'self._preferred_library == "{self._preferred_library}" is neither "shared" nor "static".')
class CommandBase:
depend_files: T.List[File]
dependencies: T.List[T.Union[BuildTarget, 'CustomTarget']]
subproject: str
def flatten_command(self, cmd: T.Sequence[T.Union[str, File, programs.ExternalProgram, 'BuildTarget', 'CustomTarget', 'CustomTargetIndex']]) -> \
T.List[T.Union[str, File, BuildTarget, 'CustomTarget']]:
cmd = listify(cmd)
final_cmd: T.List[T.Union[str, File, BuildTarget, 'CustomTarget']] = []
for c in cmd:
if isinstance(c, str):
final_cmd.append(c)
elif isinstance(c, File):
self.depend_files.append(c)
final_cmd.append(c)
elif isinstance(c, programs.ExternalProgram):
if not c.found():
raise InvalidArguments('Tried to use not-found external program in "command"')
path = c.get_path()
if os.path.isabs(path):
# Can only add a dependency on an external program which we
# know the absolute path of
self.depend_files.append(File.from_absolute_file(path))
final_cmd += c.get_command()
elif isinstance(c, (BuildTarget, CustomTarget)):
self.dependencies.append(c)
final_cmd.append(c)
elif isinstance(c, CustomTargetIndex):
FeatureNew.single_use('CustomTargetIndex for command argument', '0.60', self.subproject)
self.dependencies.append(c.target)
final_cmd += self.flatten_command(File.from_built_file(c.get_subdir(), c.get_filename()))
elif isinstance(c, list):
final_cmd += self.flatten_command(c)
else:
raise InvalidArguments(f'Argument {c!r} in "command" is invalid')
return final_cmd
class CustomTarget(Target, CommandBase):
typename = 'custom'
def __init__(self,
name: T.Optional[str],
subdir: str,
subproject: str,
command: T.Sequence[T.Union[
str, BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList, programs.ExternalProgram, File]],
sources: T.Sequence[T.Union[
str, File, BuildTarget, CustomTarget, CustomTargetIndex,
ExtractedObjects, GeneratedList, programs.ExternalProgram]],
outputs: T.List[str],
*,
build_always_stale: bool = False,
build_by_default: T.Optional[bool] = None,
capture: bool = False,
console: bool = False,
depend_files: T.Optional[T.Sequence[FileOrString]] = None,
extra_depends: T.Optional[T.Sequence[T.Union[str, SourceOutputs]]] = None,
depfile: T.Optional[str] = None,
env: T.Optional[EnvironmentVariables] = None,
feed: bool = False,
install: bool = False,
install_dir: T.Optional[T.Sequence[T.Union[str, bool]]] = None,
install_mode: T.Optional[FileMode] = None,
install_tag: T.Optional[T.Sequence[T.Optional[str]]] = None,
override_options: T.Optional[T.Dict[OptionKey, str]] = None,
absolute_paths: bool = False,
backend: T.Optional['Backend'] = None,
):
# TODO expose keyword arg to make MachineChoice.HOST configurable
super().__init__(name, subdir, subproject, False, MachineChoice.HOST)
self.sources = list(sources)
self.outputs = substitute_values(
outputs, get_filenames_templates_dict(
get_sources_string_names(sources, backend),
[]))
self.build_by_default = build_by_default if build_by_default is not None else install
self.build_always_stale = build_always_stale
self.capture = capture
self.console = console
self.depend_files = list(depend_files or [])
self.dependencies: T.List[T.Union[CustomTarget, BuildTarget]] = []
# must be after depend_files and dependencies
self.command = self.flatten_command(command)
self.depfile = depfile
self.env = env or EnvironmentVariables()
self.extra_depends = list(extra_depends or [])
self.feed = feed
self.install = install
self.install_dir = list(install_dir or [])
self.install_mode = install_mode
_install_tag: T.List[T.Optional[str]]
if not install_tag:
_install_tag = [None] * len(self.outputs)
elif len(install_tag) == 1:
_install_tag = list(install_tag) * len(self.outputs)
else:
_install_tag = list(install_tag)
self.install_tag = _install_tag
self.name = name if name else self.outputs[0]
if override_options:
for k, v in override_options.items():
if k.lang:
self.option_overrides_compiler[k.evolve(machine=self.for_machine)] = v
else:
self.option_overrides_base[k] = v
# Whether to use absolute paths for all files on the commandline
self.absolute_paths = absolute_paths
def get_default_install_dir(self, environment) -> T.Tuple[str, str]:
return None, None
def __repr__(self):
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.command)
def get_target_dependencies(self) -> T.List[T.Union['BuildTarget', 'CustomTarget']]:
deps = self.dependencies[:]
deps += self.extra_depends
for c in self.sources:
if isinstance(c, (BuildTarget, CustomTarget)):
deps.append(c)
if isinstance(c, CustomTargetIndex):
deps.append(c.target)
return deps
def get_transitive_build_target_deps(self) -> T.Set[T.Union[BuildTarget, 'CustomTarget']]:
'''
Recursively fetch the build targets that this custom target depends on,
whether through `command:`, `depends:`, or `sources:` The recursion is
only performed on custom targets.
This is useful for setting PATH on Windows for finding required DLLs.
F.ex, if you have a python script that loads a C module that links to
other DLLs in your project.
'''
bdeps: T.Set[T.Union[BuildTarget, 'CustomTarget']] = set()
deps = self.get_target_dependencies()
for d in deps:
if isinstance(d, BuildTarget):
bdeps.add(d)
elif isinstance(d, CustomTarget):
bdeps.update(d.get_transitive_build_target_deps())
return bdeps
def get_dependencies(self):
return self.dependencies
def should_install(self) -> bool:
return self.install
def get_custom_install_dir(self) -> T.List[T.Union[str, bool]]:
return self.install_dir
def get_custom_install_mode(self) -> T.Optional['FileMode']:
return self.install_mode
def get_outputs(self) -> T.List[str]:
return self.outputs
def get_filename(self) -> str:
return self.outputs[0]
def get_sources(self) -> T.List[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList', 'ExtractedObjects']]:
return self.sources
def get_generated_lists(self) -> T.List[GeneratedList]:
genlists: T.List[GeneratedList] = []
for c in self.sources:
if isinstance(c, GeneratedList):
genlists.append(c)
return genlists
def get_generated_sources(self) -> T.List[GeneratedList]:
return self.get_generated_lists()
def get_dep_outname(self, infilenames):
if self.depfile is None:
raise InvalidArguments('Tried to get depfile name for custom_target that does not have depfile defined.')
if infilenames:
plainname = os.path.basename(infilenames[0])
basename = os.path.splitext(plainname)[0]
return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname)
else:
if '@BASENAME@' in self.depfile or '@PLAINNAME@' in self.depfile:
raise InvalidArguments('Substitution in depfile for custom_target that does not have an input file.')
return self.depfile
def is_linkable_target(self) -> bool:
if len(self.outputs) != 1:
return False
suf = os.path.splitext(self.outputs[0])[-1]
return suf in {'.a', '.dll', '.lib', '.so', '.dylib'}
def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
return {}
def get_link_dep_subdirs(self):
return OrderedSet()
def get_all_link_deps(self):
return []
def is_internal(self) -> bool:
'''
Returns True iif this is a not installed static library.
'''
if len(self.outputs) != 1:
return False
return CustomTargetIndex(self, self.outputs[0]).is_internal()
def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
return self.get_outputs()
def type_suffix(self):
return "@cus"
def __getitem__(self, index: int) -> 'CustomTargetIndex':
return CustomTargetIndex(self, self.outputs[index])
def __setitem__(self, index, value):
raise NotImplementedError
def __delitem__(self, index):
raise NotImplementedError
def __iter__(self):
for i in self.outputs:
yield CustomTargetIndex(self, i)
def __len__(self) -> int:
return len(self.outputs)
class RunTarget(Target, CommandBase):
def __init__(self, name: str,
command: T.Sequence[T.Union[str, File, BuildTarget, 'CustomTarget', 'CustomTargetIndex', programs.ExternalProgram]],
dependencies: T.Sequence[Target],
subdir: str,
subproject: str,
env: T.Optional['EnvironmentVariables'] = None):
self.typename = 'run'
# These don't produce output artifacts
super().__init__(name, subdir, subproject, False, MachineChoice.BUILD)
self.dependencies = dependencies
self.depend_files = []
self.command = self.flatten_command(command)
self.absolute_paths = False
self.env = env
def __repr__(self) -> str:
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.command[0])
def get_dependencies(self) -> T.List[T.Union[BuildTarget, 'CustomTarget']]:
return self.dependencies
def get_generated_sources(self) -> T.List['GeneratedTypes']:
return []
def get_sources(self) -> T.List[File]:
return []
def should_install(self) -> bool:
return False
def get_filename(self) -> str:
return self.name
def get_outputs(self) -> T.List[str]:
if isinstance(self.name, str):
return [self.name]
elif isinstance(self.name, list):
return self.name
else:
raise RuntimeError('RunTarget: self.name is neither a list nor a string. This is a bug')
def type_suffix(self) -> str:
return "@run"
class AliasTarget(RunTarget):
def __init__(self, name: str, dependencies: T.Sequence['Target'],
subdir: str, subproject: str):
super().__init__(name, [], dependencies, subdir, subproject)
def __repr__(self):
repr_str = "<{0} {1}>"
return repr_str.format(self.__class__.__name__, self.get_id())
class Jar(BuildTarget):
known_kwargs = known_jar_kwargs
def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
self.typename = 'jar'
super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
for s in self.sources:
if not s.endswith('.java'):
raise InvalidArguments(f'Jar source {s} is not a java file.')
for t in self.link_targets:
if not isinstance(t, Jar):
raise InvalidArguments(f'Link target {t} is not a jar target.')
self.filename = self.name + '.jar'
self.outputs = [self.filename]
self.java_args = kwargs.get('java_args', [])
def get_main_class(self):
return self.main_class
def type_suffix(self):
return "@jar"
def get_java_args(self):
return self.java_args
def validate_install(self, environment):
# All jar targets are installable.
pass
def is_linkable_target(self):
return True
def get_classpath_args(self):
cp_paths = [os.path.join(l.get_subdir(), l.get_filename()) for l in self.link_targets]
cp_string = os.pathsep.join(cp_paths)
if cp_string:
return ['-cp', os.pathsep.join(cp_paths)]
return []
def get_default_install_dir(self, environment: environment.Environment) -> T.Tuple[str, str]:
return environment.get_jar_dir(), '{jardir}'
@dataclass(eq=False)
class CustomTargetIndex(HoldableObject):
"""A special opaque object returned by indexing a CustomTarget. This object
exists in Meson, but acts as a proxy in the backends, making targets depend
on the CustomTarget it's derived from, but only adding one source file to
the sources.
"""
target: CustomTarget
output: str
def __post_init__(self) -> None:
self.typename = 'custom'
self.for_machine = self.target.for_machine
@property
def name(self) -> str:
return f'{self.target.name}[{self.output}]'
def __repr__(self):
return '<CustomTargetIndex: {!r}[{}]>'.format(
self.target, self.target.get_outputs().index(self.output))
def get_outputs(self) -> T.List[str]:
return [self.output]
def get_subdir(self) -> str:
return self.target.get_subdir()
def get_filename(self) -> str:
return self.output
def get_id(self) -> str:
return self.target.get_id()
def get_all_link_deps(self):
return self.target.get_all_link_deps()
def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
return self.target.get_link_deps_mapping(prefix, environment)
def get_link_dep_subdirs(self):
return self.target.get_link_dep_subdirs()
def is_linkable_target(self) -> bool:
suf = os.path.splitext(self.output)[-1]
return suf in {'.a', '.dll', '.lib', '.so'}
def should_install(self) -> bool:
return self.target.should_install()
def is_internal(self) -> bool:
'''
Returns True iif this is a not installed static library
'''
suf = os.path.splitext(self.output)[-1]
return suf in {'.a', '.lib'} and not self.should_install()
def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
return self.target.extract_all_objects_recurse()
def get_custom_install_dir(self) -> T.List[T.Union[str, bool]]:
return self.target.get_custom_install_dir()
class ConfigurationData(HoldableObject):
def __init__(self, initial_values: T.Optional[T.Union[
T.Dict[str, T.Tuple[T.Union[str, int, bool], T.Optional[str]]],
T.Dict[str, T.Union[str, int, bool]]]
] = None):
super().__init__()
self.values: T.Dict[str, T.Tuple[T.Union[str, int, bool], T.Optional[str]]] = \
{k: v if isinstance(v, tuple) else (v, None) for k, v in initial_values.items()} if initial_values else {}
self.used: bool = False
def __repr__(self) -> str:
return repr(self.values)
def __contains__(self, value: str) -> bool:
return value in self.values
def __bool__(self) -> bool:
return bool(self.values)
def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
return self.values[name] # (val, desc)
def keys(self) -> T.Iterator[str]:
return self.values.keys()
# A bit poorly named, but this represents plain data files to copy
# during install.
@dataclass(eq=False)
class Data(HoldableObject):
sources: T.List[File]
install_dir: str
install_dir_name: str
install_mode: 'FileMode'
subproject: str
rename: T.List[str] = None
install_tag: T.Optional[str] = None
data_type: str = None
def __post_init__(self) -> None:
if self.rename is None:
self.rename = [os.path.basename(f.fname) for f in self.sources]
@dataclass(eq=False)
class SymlinkData(HoldableObject):
target: str
name: str
install_dir: str
subproject: str
install_tag: T.Optional[str] = None
def __post_init__(self) -> None:
if self.name != os.path.basename(self.name):
raise InvalidArguments(f'Link name is "{self.name}", but link names cannot contain path separators. '
'The dir part should be in install_dir.')
@dataclass(eq=False)
class TestSetup:
exe_wrapper: T.List[str]
gdb: bool
timeout_multiplier: int
env: EnvironmentVariables
exclude_suites: T.List[str]
def get_sources_string_names(sources, backend):
'''
For the specified list of @sources which can be strings, Files, or targets,
get all the output basenames.
'''
names = []
for s in sources:
if isinstance(s, str):
names.append(s)
elif isinstance(s, (BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList)):
names += s.get_outputs()
elif isinstance(s, ExtractedObjects):
names += s.get_outputs(backend)
elif isinstance(s, File):
names.append(s.fname)
else:
raise AssertionError(f'Unknown source type: {s!r}')
return names
def load(build_dir: str) -> Build:
filename = os.path.join(build_dir, 'meson-private', 'build.dat')
load_fail_msg = f'Build data file {filename!r} is corrupted. Try with a fresh build tree.'
nonexisting_fail_msg = f'No such build data file as "{filename!r}".'
try:
with open(filename, 'rb') as f:
obj = pickle.load(f)
except FileNotFoundError:
raise MesonException(nonexisting_fail_msg)
except (pickle.UnpicklingError, EOFError):
raise MesonException(load_fail_msg)
except AttributeError:
raise MesonException(
f"Build data file {filename!r} references functions or classes that don't "
"exist. This probably means that it was generated with an old "
"version of meson. Try running from the source directory "
f"meson {build_dir} --wipe")
if not isinstance(obj, Build):
raise MesonException(load_fail_msg)
return obj
def save(obj: Build, filename: str) -> None:
with open(filename, 'wb') as f:
pickle.dump(obj, f)
|
//=================================================================================================
/*!
// \file blazemark/blaze/SVecDVecCross.h
// \brief Header file for the Blaze sparse vector/dense vector cross product kernel
//
// Copyright (C) 2013 Klaus Iglberger - All Rights Reserved
//
// This file is part of the Blaze library. You can redistribute it and/or modify it under
// the terms of the New (Revised) BSD License. Redistribution and use in source and binary
// forms, with or without modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of
// conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
// of conditions and the following disclaimer in the documentation and/or other materials
// provided with the distribution.
// 3. Neither the names of the Blaze development group nor the names of its contributors
// may be used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
// SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
// BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
*/
//=================================================================================================
#ifndef _BLAZEMARK_BLAZE_SVECDVECCROSS_H_
#define _BLAZEMARK_BLAZE_SVECDVECCROSS_H_
//*************************************************************************************************
// Includes
//*************************************************************************************************
#include <blazemark/system/Types.h>
namespace blazemark {
namespace blaze {
//=================================================================================================
//
// KERNEL FUNCTIONS
//
//=================================================================================================
//*************************************************************************************************
/*!\name Blaze kernel functions */
//@{
double svecdveccross( size_t N, size_t F, size_t steps );
//@}
//*************************************************************************************************
} // namespace blaze
} // namespace blazemark
#endif
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module is part of the opsi PackageBuilder
see: https://forum.opsi.org/viewforum.php?f=22
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
__author__ = 'Holger Pandel'
__copyright__ = "Copyright 2013-2015, Holger Pandel"
__license__ = "MIT"
__maintainer__ = "Holger Pandel"
__email__ = "holger.pandel@googlemail.com"
__status__ = "Production"
from PyQt5 import QtCore, QtGui
from PyQt5.QtCore import QObject
import oPB
from oPB.controller.base import BaseController
from oPB.gui.deployagent import DeployAgentDialog
translate = QtCore.QCoreApplication.translate
class DeployAgentComponent(BaseController, QObject):
def __init__(self, parent):
super().__init__(self)
self._parent = parent
print("controller/DeployAgentComponent parent: ", self._parent, " -> self: ", self) if oPB.PRINTHIER else None
self.ui = None
self.ui = DeployAgentDialog(self)
self.connect_signals()
def connect_signals(self):
self.ui.dialogOpened.connect(self._parent.startup.hide_)
self.ui.dialogClosed.connect(self._parent.startup.show_)
def start_deploy(self, destination: list, options: dict):
"""
Starts deployment via backend routine.
The ``option`` parameter is a dictionary, especially for parameters regarding
the *opsi-deploy-client-agent* script.
options = {
"pre_action": <command string to execute beofre deployment>,
"user": <(domain) username>,
"pass": <password>,
"usefqdn": <true/false: use FQDN as client address>,
"ignoreping": <true/false: ignore ping check>,
"skipexisting": <true/false: ignore existing clients>,
"post_action": <post install action string: (do nothing), "startclient", "reboot", "shutdown">
"proceed": <true/false: (don't) wait for process to return, start via nohup>}
:param destination: list of client fqdn/ip addresses
:param options: see description
"""
self._parent.do_deploy(clientlist = destination, options = options,
dest = self._parent.query_depot(parent = self.ui, with_all = False, with_repo = False))
|
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var msalCommon = require('@azure/msal-common');
var axios = _interopDefault(require('axios'));
var uuid = require('uuid');
var crypto = _interopDefault(require('crypto'));
var jsonwebtoken = require('jsonwebtoken');
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
try {
var info = gen[key](arg);
var value = info.value;
} catch (error) {
reject(error);
return;
}
if (info.done) {
resolve(value);
} else {
Promise.resolve(value).then(_next, _throw);
}
}
function _asyncToGenerator(fn) {
return function () {
var self = this,
args = arguments;
return new Promise(function (resolve, reject) {
var gen = fn.apply(self, args);
function _next(value) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
}
function _throw(err) {
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err);
}
_next(undefined);
});
};
}
function _extends() {
_extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
return _extends.apply(this, arguments);
}
function _inheritsLoose(subClass, superClass) {
subClass.prototype = Object.create(superClass.prototype);
subClass.prototype.constructor = subClass;
subClass.__proto__ = superClass;
}
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var runtime_1 = createCommonjsModule(function (module) {
/**
* Copyright (c) 2014-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
var runtime = (function (exports) {
var Op = Object.prototype;
var hasOwn = Op.hasOwnProperty;
var undefined$1; // More compressible than void 0.
var $Symbol = typeof Symbol === "function" ? Symbol : {};
var iteratorSymbol = $Symbol.iterator || "@@iterator";
var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator";
var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag";
function define(obj, key, value) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
return obj[key];
}
try {
// IE 8 has a broken Object.defineProperty that only works on DOM objects.
define({}, "");
} catch (err) {
define = function(obj, key, value) {
return obj[key] = value;
};
}
function wrap(innerFn, outerFn, self, tryLocsList) {
// If outerFn provided and outerFn.prototype is a Generator, then outerFn.prototype instanceof Generator.
var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator;
var generator = Object.create(protoGenerator.prototype);
var context = new Context(tryLocsList || []);
// The ._invoke method unifies the implementations of the .next,
// .throw, and .return methods.
generator._invoke = makeInvokeMethod(innerFn, self, context);
return generator;
}
exports.wrap = wrap;
// Try/catch helper to minimize deoptimizations. Returns a completion
// record like context.tryEntries[i].completion. This interface could
// have been (and was previously) designed to take a closure to be
// invoked without arguments, but in all the cases we care about we
// already have an existing method we want to call, so there's no need
// to create a new function object. We can even get away with assuming
// the method takes exactly one argument, since that happens to be true
// in every case, so we don't have to touch the arguments object. The
// only additional allocation required is the completion record, which
// has a stable shape and so hopefully should be cheap to allocate.
function tryCatch(fn, obj, arg) {
try {
return { type: "normal", arg: fn.call(obj, arg) };
} catch (err) {
return { type: "throw", arg: err };
}
}
var GenStateSuspendedStart = "suspendedStart";
var GenStateSuspendedYield = "suspendedYield";
var GenStateExecuting = "executing";
var GenStateCompleted = "completed";
// Returning this object from the innerFn has the same effect as
// breaking out of the dispatch switch statement.
var ContinueSentinel = {};
// Dummy constructor functions that we use as the .constructor and
// .constructor.prototype properties for functions that return Generator
// objects. For full spec compliance, you may wish to configure your
// minifier not to mangle the names of these two functions.
function Generator() {}
function GeneratorFunction() {}
function GeneratorFunctionPrototype() {}
// This is a polyfill for %IteratorPrototype% for environments that
// don't natively support it.
var IteratorPrototype = {};
IteratorPrototype[iteratorSymbol] = function () {
return this;
};
var getProto = Object.getPrototypeOf;
var NativeIteratorPrototype = getProto && getProto(getProto(values([])));
if (NativeIteratorPrototype &&
NativeIteratorPrototype !== Op &&
hasOwn.call(NativeIteratorPrototype, iteratorSymbol)) {
// This environment has a native %IteratorPrototype%; use it instead
// of the polyfill.
IteratorPrototype = NativeIteratorPrototype;
}
var Gp = GeneratorFunctionPrototype.prototype =
Generator.prototype = Object.create(IteratorPrototype);
GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype;
GeneratorFunctionPrototype.constructor = GeneratorFunction;
GeneratorFunction.displayName = define(
GeneratorFunctionPrototype,
toStringTagSymbol,
"GeneratorFunction"
);
// Helper for defining the .next, .throw, and .return methods of the
// Iterator interface in terms of a single ._invoke method.
function defineIteratorMethods(prototype) {
["next", "throw", "return"].forEach(function(method) {
define(prototype, method, function(arg) {
return this._invoke(method, arg);
});
});
}
exports.isGeneratorFunction = function(genFun) {
var ctor = typeof genFun === "function" && genFun.constructor;
return ctor
? ctor === GeneratorFunction ||
// For the native GeneratorFunction constructor, the best we can
// do is to check its .name property.
(ctor.displayName || ctor.name) === "GeneratorFunction"
: false;
};
exports.mark = function(genFun) {
if (Object.setPrototypeOf) {
Object.setPrototypeOf(genFun, GeneratorFunctionPrototype);
} else {
genFun.__proto__ = GeneratorFunctionPrototype;
define(genFun, toStringTagSymbol, "GeneratorFunction");
}
genFun.prototype = Object.create(Gp);
return genFun;
};
// Within the body of any async function, `await x` is transformed to
// `yield regeneratorRuntime.awrap(x)`, so that the runtime can test
// `hasOwn.call(value, "__await")` to determine if the yielded value is
// meant to be awaited.
exports.awrap = function(arg) {
return { __await: arg };
};
function AsyncIterator(generator, PromiseImpl) {
function invoke(method, arg, resolve, reject) {
var record = tryCatch(generator[method], generator, arg);
if (record.type === "throw") {
reject(record.arg);
} else {
var result = record.arg;
var value = result.value;
if (value &&
typeof value === "object" &&
hasOwn.call(value, "__await")) {
return PromiseImpl.resolve(value.__await).then(function(value) {
invoke("next", value, resolve, reject);
}, function(err) {
invoke("throw", err, resolve, reject);
});
}
return PromiseImpl.resolve(value).then(function(unwrapped) {
// When a yielded Promise is resolved, its final value becomes
// the .value of the Promise<{value,done}> result for the
// current iteration.
result.value = unwrapped;
resolve(result);
}, function(error) {
// If a rejected Promise was yielded, throw the rejection back
// into the async generator function so it can be handled there.
return invoke("throw", error, resolve, reject);
});
}
}
var previousPromise;
function enqueue(method, arg) {
function callInvokeWithMethodAndArg() {
return new PromiseImpl(function(resolve, reject) {
invoke(method, arg, resolve, reject);
});
}
return previousPromise =
// If enqueue has been called before, then we want to wait until
// all previous Promises have been resolved before calling invoke,
// so that results are always delivered in the correct order. If
// enqueue has not been called before, then it is important to
// call invoke immediately, without waiting on a callback to fire,
// so that the async generator function has the opportunity to do
// any necessary setup in a predictable way. This predictability
// is why the Promise constructor synchronously invokes its
// executor callback, and why async functions synchronously
// execute code before the first await. Since we implement simple
// async functions in terms of async generators, it is especially
// important to get this right, even though it requires care.
previousPromise ? previousPromise.then(
callInvokeWithMethodAndArg,
// Avoid propagating failures to Promises returned by later
// invocations of the iterator.
callInvokeWithMethodAndArg
) : callInvokeWithMethodAndArg();
}
// Define the unified helper method that is used to implement .next,
// .throw, and .return (see defineIteratorMethods).
this._invoke = enqueue;
}
defineIteratorMethods(AsyncIterator.prototype);
AsyncIterator.prototype[asyncIteratorSymbol] = function () {
return this;
};
exports.AsyncIterator = AsyncIterator;
// Note that simple async functions are implemented on top of
// AsyncIterator objects; they just return a Promise for the value of
// the final result produced by the iterator.
exports.async = function(innerFn, outerFn, self, tryLocsList, PromiseImpl) {
if (PromiseImpl === void 0) PromiseImpl = Promise;
var iter = new AsyncIterator(
wrap(innerFn, outerFn, self, tryLocsList),
PromiseImpl
);
return exports.isGeneratorFunction(outerFn)
? iter // If outerFn is a generator, return the full iterator.
: iter.next().then(function(result) {
return result.done ? result.value : iter.next();
});
};
function makeInvokeMethod(innerFn, self, context) {
var state = GenStateSuspendedStart;
return function invoke(method, arg) {
if (state === GenStateExecuting) {
throw new Error("Generator is already running");
}
if (state === GenStateCompleted) {
if (method === "throw") {
throw arg;
}
// Be forgiving, per 25.3.3.3.3 of the spec:
// https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume
return doneResult();
}
context.method = method;
context.arg = arg;
while (true) {
var delegate = context.delegate;
if (delegate) {
var delegateResult = maybeInvokeDelegate(delegate, context);
if (delegateResult) {
if (delegateResult === ContinueSentinel) continue;
return delegateResult;
}
}
if (context.method === "next") {
// Setting context._sent for legacy support of Babel's
// function.sent implementation.
context.sent = context._sent = context.arg;
} else if (context.method === "throw") {
if (state === GenStateSuspendedStart) {
state = GenStateCompleted;
throw context.arg;
}
context.dispatchException(context.arg);
} else if (context.method === "return") {
context.abrupt("return", context.arg);
}
state = GenStateExecuting;
var record = tryCatch(innerFn, self, context);
if (record.type === "normal") {
// If an exception is thrown from innerFn, we leave state ===
// GenStateExecuting and loop back for another invocation.
state = context.done
? GenStateCompleted
: GenStateSuspendedYield;
if (record.arg === ContinueSentinel) {
continue;
}
return {
value: record.arg,
done: context.done
};
} else if (record.type === "throw") {
state = GenStateCompleted;
// Dispatch the exception by looping back around to the
// context.dispatchException(context.arg) call above.
context.method = "throw";
context.arg = record.arg;
}
}
};
}
// Call delegate.iterator[context.method](context.arg) and handle the
// result, either by returning a { value, done } result from the
// delegate iterator, or by modifying context.method and context.arg,
// setting context.delegate to null, and returning the ContinueSentinel.
function maybeInvokeDelegate(delegate, context) {
var method = delegate.iterator[context.method];
if (method === undefined$1) {
// A .throw or .return when the delegate iterator has no .throw
// method always terminates the yield* loop.
context.delegate = null;
if (context.method === "throw") {
// Note: ["return"] must be used for ES3 parsing compatibility.
if (delegate.iterator["return"]) {
// If the delegate iterator has a return method, give it a
// chance to clean up.
context.method = "return";
context.arg = undefined$1;
maybeInvokeDelegate(delegate, context);
if (context.method === "throw") {
// If maybeInvokeDelegate(context) changed context.method from
// "return" to "throw", let that override the TypeError below.
return ContinueSentinel;
}
}
context.method = "throw";
context.arg = new TypeError(
"The iterator does not provide a 'throw' method");
}
return ContinueSentinel;
}
var record = tryCatch(method, delegate.iterator, context.arg);
if (record.type === "throw") {
context.method = "throw";
context.arg = record.arg;
context.delegate = null;
return ContinueSentinel;
}
var info = record.arg;
if (! info) {
context.method = "throw";
context.arg = new TypeError("iterator result is not an object");
context.delegate = null;
return ContinueSentinel;
}
if (info.done) {
// Assign the result of the finished delegate to the temporary
// variable specified by delegate.resultName (see delegateYield).
context[delegate.resultName] = info.value;
// Resume execution at the desired location (see delegateYield).
context.next = delegate.nextLoc;
// If context.method was "throw" but the delegate handled the
// exception, let the outer generator proceed normally. If
// context.method was "next", forget context.arg since it has been
// "consumed" by the delegate iterator. If context.method was
// "return", allow the original .return call to continue in the
// outer generator.
if (context.method !== "return") {
context.method = "next";
context.arg = undefined$1;
}
} else {
// Re-yield the result returned by the delegate method.
return info;
}
// The delegate iterator is finished, so forget it and continue with
// the outer generator.
context.delegate = null;
return ContinueSentinel;
}
// Define Generator.prototype.{next,throw,return} in terms of the
// unified ._invoke helper method.
defineIteratorMethods(Gp);
define(Gp, toStringTagSymbol, "Generator");
// A Generator should always return itself as the iterator object when the
// @@iterator function is called on it. Some browsers' implementations of the
// iterator prototype chain incorrectly implement this, causing the Generator
// object to not be returned from this call. This ensures that doesn't happen.
// See https://github.com/facebook/regenerator/issues/274 for more details.
Gp[iteratorSymbol] = function() {
return this;
};
Gp.toString = function() {
return "[object Generator]";
};
function pushTryEntry(locs) {
var entry = { tryLoc: locs[0] };
if (1 in locs) {
entry.catchLoc = locs[1];
}
if (2 in locs) {
entry.finallyLoc = locs[2];
entry.afterLoc = locs[3];
}
this.tryEntries.push(entry);
}
function resetTryEntry(entry) {
var record = entry.completion || {};
record.type = "normal";
delete record.arg;
entry.completion = record;
}
function Context(tryLocsList) {
// The root entry object (effectively a try statement without a catch
// or a finally block) gives us a place to store values thrown from
// locations where there is no enclosing try statement.
this.tryEntries = [{ tryLoc: "root" }];
tryLocsList.forEach(pushTryEntry, this);
this.reset(true);
}
exports.keys = function(object) {
var keys = [];
for (var key in object) {
keys.push(key);
}
keys.reverse();
// Rather than returning an object with a next method, we keep
// things simple and return the next function itself.
return function next() {
while (keys.length) {
var key = keys.pop();
if (key in object) {
next.value = key;
next.done = false;
return next;
}
}
// To avoid creating an additional object, we just hang the .value
// and .done properties off the next function object itself. This
// also ensures that the minifier will not anonymize the function.
next.done = true;
return next;
};
};
function values(iterable) {
if (iterable) {
var iteratorMethod = iterable[iteratorSymbol];
if (iteratorMethod) {
return iteratorMethod.call(iterable);
}
if (typeof iterable.next === "function") {
return iterable;
}
if (!isNaN(iterable.length)) {
var i = -1, next = function next() {
while (++i < iterable.length) {
if (hasOwn.call(iterable, i)) {
next.value = iterable[i];
next.done = false;
return next;
}
}
next.value = undefined$1;
next.done = true;
return next;
};
return next.next = next;
}
}
// Return an iterator with no values.
return { next: doneResult };
}
exports.values = values;
function doneResult() {
return { value: undefined$1, done: true };
}
Context.prototype = {
constructor: Context,
reset: function(skipTempReset) {
this.prev = 0;
this.next = 0;
// Resetting context._sent for legacy support of Babel's
// function.sent implementation.
this.sent = this._sent = undefined$1;
this.done = false;
this.delegate = null;
this.method = "next";
this.arg = undefined$1;
this.tryEntries.forEach(resetTryEntry);
if (!skipTempReset) {
for (var name in this) {
// Not sure about the optimal order of these conditions:
if (name.charAt(0) === "t" &&
hasOwn.call(this, name) &&
!isNaN(+name.slice(1))) {
this[name] = undefined$1;
}
}
}
},
stop: function() {
this.done = true;
var rootEntry = this.tryEntries[0];
var rootRecord = rootEntry.completion;
if (rootRecord.type === "throw") {
throw rootRecord.arg;
}
return this.rval;
},
dispatchException: function(exception) {
if (this.done) {
throw exception;
}
var context = this;
function handle(loc, caught) {
record.type = "throw";
record.arg = exception;
context.next = loc;
if (caught) {
// If the dispatched exception was caught by a catch block,
// then let that catch block handle the exception normally.
context.method = "next";
context.arg = undefined$1;
}
return !! caught;
}
for (var i = this.tryEntries.length - 1; i >= 0; --i) {
var entry = this.tryEntries[i];
var record = entry.completion;
if (entry.tryLoc === "root") {
// Exception thrown outside of any try block that could handle
// it, so set the completion value of the entire function to
// throw the exception.
return handle("end");
}
if (entry.tryLoc <= this.prev) {
var hasCatch = hasOwn.call(entry, "catchLoc");
var hasFinally = hasOwn.call(entry, "finallyLoc");
if (hasCatch && hasFinally) {
if (this.prev < entry.catchLoc) {
return handle(entry.catchLoc, true);
} else if (this.prev < entry.finallyLoc) {
return handle(entry.finallyLoc);
}
} else if (hasCatch) {
if (this.prev < entry.catchLoc) {
return handle(entry.catchLoc, true);
}
} else if (hasFinally) {
if (this.prev < entry.finallyLoc) {
return handle(entry.finallyLoc);
}
} else {
throw new Error("try statement without catch or finally");
}
}
}
},
abrupt: function(type, arg) {
for (var i = this.tryEntries.length - 1; i >= 0; --i) {
var entry = this.tryEntries[i];
if (entry.tryLoc <= this.prev &&
hasOwn.call(entry, "finallyLoc") &&
this.prev < entry.finallyLoc) {
var finallyEntry = entry;
break;
}
}
if (finallyEntry &&
(type === "break" ||
type === "continue") &&
finallyEntry.tryLoc <= arg &&
arg <= finallyEntry.finallyLoc) {
// Ignore the finally entry if control is not jumping to a
// location outside the try/catch block.
finallyEntry = null;
}
var record = finallyEntry ? finallyEntry.completion : {};
record.type = type;
record.arg = arg;
if (finallyEntry) {
this.method = "next";
this.next = finallyEntry.finallyLoc;
return ContinueSentinel;
}
return this.complete(record);
},
complete: function(record, afterLoc) {
if (record.type === "throw") {
throw record.arg;
}
if (record.type === "break" ||
record.type === "continue") {
this.next = record.arg;
} else if (record.type === "return") {
this.rval = this.arg = record.arg;
this.method = "return";
this.next = "end";
} else if (record.type === "normal" && afterLoc) {
this.next = afterLoc;
}
return ContinueSentinel;
},
finish: function(finallyLoc) {
for (var i = this.tryEntries.length - 1; i >= 0; --i) {
var entry = this.tryEntries[i];
if (entry.finallyLoc === finallyLoc) {
this.complete(entry.completion, entry.afterLoc);
resetTryEntry(entry);
return ContinueSentinel;
}
}
},
"catch": function(tryLoc) {
for (var i = this.tryEntries.length - 1; i >= 0; --i) {
var entry = this.tryEntries[i];
if (entry.tryLoc === tryLoc) {
var record = entry.completion;
if (record.type === "throw") {
var thrown = record.arg;
resetTryEntry(entry);
}
return thrown;
}
}
// The context.catch method must only be called with a location
// argument that corresponds to a known catch block.
throw new Error("illegal catch attempt");
},
delegateYield: function(iterable, resultName, nextLoc) {
this.delegate = {
iterator: values(iterable),
resultName: resultName,
nextLoc: nextLoc
};
if (this.method === "next") {
// Deliberately forget the last sent value so that we don't
// accidentally pass it on to the delegate.
this.arg = undefined$1;
}
return ContinueSentinel;
}
};
// Regardless of whether this script is executing as a CommonJS module
// or not, return the runtime object so that we can declare the variable
// regeneratorRuntime in the outer scope, which allows this module to be
// injected easily by `bin/regenerator --include-runtime script.js`.
return exports;
}(
// If this script is executing as a CommonJS module, use module.exports
// as the regeneratorRuntime namespace. Otherwise create a new empty
// object. Either way, the resulting object will be used to initialize
// the regeneratorRuntime variable at the top of this file.
module.exports
));
try {
regeneratorRuntime = runtime;
} catch (accidentalStrictMode) {
// This module should not be running in strict mode, so the above
// assignment should always work unless something is misconfigured. Just
// in case runtime.js accidentally runs in strict mode, we can escape
// strict mode using a global Function call. This could conceivably fail
// if a Content Security Policy forbids using Function, but in that case
// the proper solution is to fix the accidental strict mode problem. If
// you've misconfigured your bundler to force strict mode and applied a
// CSP to forbid Function, and you're not willing to fix either of those
// problems, please detail your unique predicament in a GitHub issue.
Function("r", "regeneratorRuntime = r")(runtime);
}
});
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
/**
* http methods
*/
var HttpMethod;
(function (HttpMethod) {
HttpMethod["GET"] = "get";
HttpMethod["POST"] = "post";
})(HttpMethod || (HttpMethod = {}));
/**
* Constant used for PKCE
*/
var RANDOM_OCTET_SIZE = 32;
/**
* Constants used in PKCE
*/
var Hash = {
SHA256: "sha256"
};
/**
* Constants for encoding schemes
*/
var CharSet = {
CV_CHARSET: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
};
/**
* Constants
*/
var Constants = {
MSAL_SKU: "msal.js.node",
JWT_BEARER_ASSERTION_TYPE: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
};
/**
* API Codes for Telemetry purposes.
* Before adding a new code you must claim it in the MSAL Telemetry tracker as these number spaces are shared across all MSALs
* 0-99 Silent Flow
* 600-699 Device Code Flow
* 800-899 Auth Code Flow
*/
var ApiId;
(function (ApiId) {
ApiId[ApiId["acquireTokenSilent"] = 62] = "acquireTokenSilent";
ApiId[ApiId["acquireTokenByUsernamePassword"] = 371] = "acquireTokenByUsernamePassword";
ApiId[ApiId["acquireTokenByDeviceCode"] = 671] = "acquireTokenByDeviceCode";
ApiId[ApiId["acquireTokenByClientCredential"] = 771] = "acquireTokenByClientCredential";
ApiId[ApiId["acquireTokenByCode"] = 871] = "acquireTokenByCode";
ApiId[ApiId["acquireTokenByRefreshToken"] = 872] = "acquireTokenByRefreshToken";
})(ApiId || (ApiId = {}));
/**
* JWT constants
*/
var JwtConstants = {
ALGORITHM: "alg",
RSA_256: "RS256",
X5T: "x5t",
X5C: "x5c",
AUDIENCE: "aud",
EXPIRATION_TIME: "exp",
ISSUER: "iss",
SUBJECT: "sub",
NOT_BEFORE: "nbf",
JWT_ID: "jti"
};
/**
* This class implements the API for network requests.
*/
var HttpClient = /*#__PURE__*/function () {
function HttpClient() {
axios.defaults.validateStatus = function () {
return true;
};
}
/**
* Http Get request
* @param url
* @param options
*/
var _proto = HttpClient.prototype;
_proto.sendGetRequestAsync =
/*#__PURE__*/
function () {
var _sendGetRequestAsync = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee(url, options) {
var request, response;
return runtime_1.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
request = {
method: HttpMethod.GET,
url: url,
headers: options && options.headers
};
_context.next = 3;
return axios(request);
case 3:
response = _context.sent;
return _context.abrupt("return", {
headers: response.headers,
body: response.data,
status: response.status
});
case 5:
case "end":
return _context.stop();
}
}
}, _callee);
}));
function sendGetRequestAsync(_x, _x2) {
return _sendGetRequestAsync.apply(this, arguments);
}
return sendGetRequestAsync;
}()
/**
* Http Post request
* @param url
* @param options
*/
;
_proto.sendPostRequestAsync =
/*#__PURE__*/
function () {
var _sendPostRequestAsync = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee2(url, options) {
var request, response;
return runtime_1.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
request = {
method: HttpMethod.POST,
url: url,
data: options && options.body || "",
headers: options && options.headers
};
_context2.next = 3;
return axios(request);
case 3:
response = _context2.sent;
return _context2.abrupt("return", {
headers: response.headers,
body: response.data,
status: response.status
});
case 5:
case "end":
return _context2.stop();
}
}
}, _callee2);
}));
function sendPostRequestAsync(_x3, _x4) {
return _sendPostRequestAsync.apply(this, arguments);
}
return sendPostRequestAsync;
}();
return HttpClient;
}();
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
var NetworkUtils = /*#__PURE__*/function () {
function NetworkUtils() {}
/**
* Returns best compatible network client object.
*/
NetworkUtils.getNetworkClient = function getNetworkClient() {
return new HttpClient();
};
return NetworkUtils;
}();
var DEFAULT_AUTH_OPTIONS = {
clientId: "",
authority: msalCommon.Constants.DEFAULT_AUTHORITY,
clientSecret: "",
clientAssertion: "",
clientCertificate: {
thumbprint: "",
privateKey: "",
x5c: ""
},
knownAuthorities: [],
cloudDiscoveryMetadata: "",
authorityMetadata: "",
clientCapabilities: [],
protocolMode: msalCommon.ProtocolMode.AAD
};
var DEFAULT_CACHE_OPTIONS = {};
var DEFAULT_LOGGER_OPTIONS = {
loggerCallback: function loggerCallback() {// allow users to not set logger call back
},
piiLoggingEnabled: false,
logLevel: msalCommon.LogLevel.Info
};
var DEFAULT_SYSTEM_OPTIONS = {
loggerOptions: DEFAULT_LOGGER_OPTIONS,
networkClient: /*#__PURE__*/NetworkUtils.getNetworkClient()
};
/**
* Sets the default options when not explicitly configured from app developer
*
* @param auth
* @param cache
* @param system
*
* @returns Configuration
*/
function buildAppConfiguration(_ref) {
var auth = _ref.auth,
cache = _ref.cache,
system = _ref.system;
return {
auth: _extends({}, DEFAULT_AUTH_OPTIONS, auth),
cache: _extends({}, DEFAULT_CACHE_OPTIONS, cache),
system: _extends({}, DEFAULT_SYSTEM_OPTIONS, system)
};
}
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
var GuidGenerator = /*#__PURE__*/function () {
function GuidGenerator() {}
/**
*
* RFC4122: The version 4 UUID is meant for generating UUIDs from truly-random or pseudo-random numbers.
* uuidv4 generates guids from cryprtographically-string random
*/
GuidGenerator.generateGuid = function generateGuid() {
return uuid.v4();
}
/**
* verifies if a string is GUID
* @param guid
*/
;
GuidGenerator.isGuid = function isGuid(guid) {
var regexGuid = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
return regexGuid.test(guid);
};
return GuidGenerator;
}();
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
var EncodingUtils = /*#__PURE__*/function () {
function EncodingUtils() {}
/**
* 'utf8': Multibyte encoded Unicode characters. Many web pages and other document formats use UTF-8.
* 'base64': Base64 encoding.
*
* @param str text
*/
EncodingUtils.base64Encode = function base64Encode(str, encoding) {
return Buffer.from(str, encoding).toString("base64");
}
/**
* encode a URL
* @param str
*/
;
EncodingUtils.base64EncodeUrl = function base64EncodeUrl(str, encoding) {
return EncodingUtils.base64Encode(str, encoding).replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_");
}
/**
* 'utf8': Multibyte encoded Unicode characters. Many web pages and other document formats use UTF-8.
* 'base64': Base64 encoding.
*
* @param base64Str Base64 encoded text
*/
;
EncodingUtils.base64Decode = function base64Decode(base64Str) {
return Buffer.from(base64Str, "base64").toString("utf8");
}
/**
* @param base64Str Base64 encoded Url
*/
;
EncodingUtils.base64DecodeUrl = function base64DecodeUrl(base64Str) {
var str = base64Str.replace(/-/g, "+").replace(/_/g, "/");
while (str.length % 4) {
str += "=";
}
return EncodingUtils.base64Decode(str);
};
return EncodingUtils;
}();
/**
* https://tools.ietf.org/html/rfc7636#page-8
*/
var PkceGenerator = /*#__PURE__*/function () {
function PkceGenerator() {}
var _proto = PkceGenerator.prototype;
/**
* generates the codeVerfier and the challenge from the codeVerfier
* reference: https://tools.ietf.org/html/rfc7636#section-4.1 and https://tools.ietf.org/html/rfc7636#section-4.2
*/
_proto.generatePkceCodes =
/*#__PURE__*/
function () {
var _generatePkceCodes = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee() {
var verifier, challenge;
return runtime_1.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
verifier = this.generateCodeVerifier();
challenge = this.generateCodeChallengeFromVerifier(verifier);
return _context.abrupt("return", {
verifier: verifier,
challenge: challenge
});
case 3:
case "end":
return _context.stop();
}
}
}, _callee, this);
}));
function generatePkceCodes() {
return _generatePkceCodes.apply(this, arguments);
}
return generatePkceCodes;
}()
/**
* generates the codeVerfier; reference: https://tools.ietf.org/html/rfc7636#section-4.1
*/
;
_proto.generateCodeVerifier = function generateCodeVerifier() {
var buffer = crypto.randomBytes(RANDOM_OCTET_SIZE);
var verifier = this.bufferToCVString(buffer);
return EncodingUtils.base64EncodeUrl(verifier);
}
/**
* generate the challenge from the codeVerfier; reference: https://tools.ietf.org/html/rfc7636#section-4.2
* @param codeVerifier
*/
;
_proto.generateCodeChallengeFromVerifier = function generateCodeChallengeFromVerifier(codeVerifier) {
return EncodingUtils.base64EncodeUrl(this.sha256(codeVerifier).toString("base64"), "base64");
}
/**
* generate 'SHA256' hash
* @param buffer
*/
;
_proto.sha256 = function sha256(buffer) {
return crypto.createHash(Hash.SHA256).update(buffer).digest();
}
/**
* Accepted characters; reference: https://tools.ietf.org/html/rfc7636#section-4.1
* @param buffer
*/
;
_proto.bufferToCVString = function bufferToCVString(buffer) {
var charArr = [];
for (var i = 0; i < buffer.byteLength; i += 1) {
var index = buffer[i] % CharSet.CV_CHARSET.length;
charArr.push(CharSet.CV_CHARSET[index]);
}
return charArr.join("");
};
return PkceGenerator;
}();
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
/**
* This class implements MSAL node's crypto interface, which allows it to perform base64 encoding and decoding, generating cryptographically random GUIDs and
* implementing Proof Key for Code Exchange specs for the OAuth Authorization Code Flow using PKCE (rfc here: https://tools.ietf.org/html/rfc7636).
*/
var CryptoProvider = /*#__PURE__*/function () {
function CryptoProvider() {
// Browser crypto needs to be validated first before any other classes can be set.
this.pkceGenerator = new PkceGenerator();
}
/**
* Creates a new random GUID - used to populate state and nonce.
* @returns string (GUID)
*/
var _proto = CryptoProvider.prototype;
_proto.createNewGuid = function createNewGuid() {
return GuidGenerator.generateGuid();
}
/**
* Encodes input string to base64.
* @param input
*/
;
_proto.base64Encode = function base64Encode(input) {
return EncodingUtils.base64Encode(input);
}
/**
* Decodes input string from base64.
* @param input
*/
;
_proto.base64Decode = function base64Decode(input) {
return EncodingUtils.base64Decode(input);
}
/**
* Generates PKCE codes used in Authorization Code Flow.
*/
;
_proto.generatePkceCodes = function generatePkceCodes() {
return this.pkceGenerator.generatePkceCodes();
};
_proto.getPublicKeyThumbprint = function getPublicKeyThumbprint() {
throw new Error("Method not implemented.");
};
_proto.signJwt = function signJwt() {
throw new Error("Method not implemented.");
};
return CryptoProvider;
}();
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
/**
* This class deserializes cache entities read from the file into in memory object types defined internally
*/
var Deserializer = /*#__PURE__*/function () {
function Deserializer() {}
/**
* Parse the JSON blob in memory and deserialize the content
* @param cachedJson
*/
Deserializer.deserializeJSONBlob = function deserializeJSONBlob(jsonFile) {
var deserializedCache = msalCommon.StringUtils.isEmpty(jsonFile) ? {} : JSON.parse(jsonFile);
return deserializedCache;
}
/**
* Deserializes accounts to AccountEntity objects
* @param accounts
*/
;
Deserializer.deserializeAccounts = function deserializeAccounts(accounts) {
var accountObjects = {};
if (accounts) {
Object.keys(accounts).map(function (key) {
var serializedAcc = accounts[key];
var mappedAcc = {
homeAccountId: serializedAcc.home_account_id,
environment: serializedAcc.environment,
realm: serializedAcc.realm,
localAccountId: serializedAcc.local_account_id,
username: serializedAcc.username,
authorityType: serializedAcc.authority_type,
name: serializedAcc.name,
clientInfo: serializedAcc.client_info,
lastModificationTime: serializedAcc.last_modification_time,
lastModificationApp: serializedAcc.last_modification_app
};
var account = new msalCommon.AccountEntity();
msalCommon.CacheManager.toObject(account, mappedAcc);
accountObjects[key] = account;
});
}
return accountObjects;
}
/**
* Deserializes id tokens to IdTokenEntity objects
* @param idTokens
*/
;
Deserializer.deserializeIdTokens = function deserializeIdTokens(idTokens) {
var idObjects = {};
if (idTokens) {
Object.keys(idTokens).map(function (key) {
var serializedIdT = idTokens[key];
var mappedIdT = {
homeAccountId: serializedIdT.home_account_id,
environment: serializedIdT.environment,
credentialType: serializedIdT.credential_type,
clientId: serializedIdT.client_id,
secret: serializedIdT.secret,
realm: serializedIdT.realm
};
var idToken = new msalCommon.IdTokenEntity();
msalCommon.CacheManager.toObject(idToken, mappedIdT);
idObjects[key] = idToken;
});
}
return idObjects;
}
/**
* Deserializes access tokens to AccessTokenEntity objects
* @param accessTokens
*/
;
Deserializer.deserializeAccessTokens = function deserializeAccessTokens(accessTokens) {
var atObjects = {};
if (accessTokens) {
Object.keys(accessTokens).map(function (key) {
var serializedAT = accessTokens[key];
var mappedAT = {
homeAccountId: serializedAT.home_account_id,
environment: serializedAT.environment,
credentialType: serializedAT.credential_type,
clientId: serializedAT.client_id,
secret: serializedAT.secret,
realm: serializedAT.realm,
target: serializedAT.target,
cachedAt: serializedAT.cached_at,
expiresOn: serializedAT.expires_on,
extendedExpiresOn: serializedAT.extended_expires_on,
refreshOn: serializedAT.refresh_on,
keyId: serializedAT.key_id,
tokenType: serializedAT.token_type
};
var accessToken = new msalCommon.AccessTokenEntity();
msalCommon.CacheManager.toObject(accessToken, mappedAT);
atObjects[key] = accessToken;
});
}
return atObjects;
}
/**
* Deserializes refresh tokens to RefreshTokenEntity objects
* @param refreshTokens
*/
;
Deserializer.deserializeRefreshTokens = function deserializeRefreshTokens(refreshTokens) {
var rtObjects = {};
if (refreshTokens) {
Object.keys(refreshTokens).map(function (key) {
var serializedRT = refreshTokens[key];
var mappedRT = {
homeAccountId: serializedRT.home_account_id,
environment: serializedRT.environment,
credentialType: serializedRT.credential_type,
clientId: serializedRT.client_id,
secret: serializedRT.secret,
familyId: serializedRT.family_id,
target: serializedRT.target,
realm: serializedRT.realm
};
var refreshToken = new msalCommon.RefreshTokenEntity();
msalCommon.CacheManager.toObject(refreshToken, mappedRT);
rtObjects[key] = refreshToken;
});
}
return rtObjects;
}
/**
* Deserializes appMetadata to AppMetaData objects
* @param appMetadata
*/
;
Deserializer.deserializeAppMetadata = function deserializeAppMetadata(appMetadata) {
var appMetadataObjects = {};
if (appMetadata) {
Object.keys(appMetadata).map(function (key) {
var serializedAmdt = appMetadata[key];
var mappedAmd = {
clientId: serializedAmdt.client_id,
environment: serializedAmdt.environment,
familyId: serializedAmdt.family_id
};
var amd = new msalCommon.AppMetadataEntity();
msalCommon.CacheManager.toObject(amd, mappedAmd);
appMetadataObjects[key] = amd;
});
}
return appMetadataObjects;
}
/**
* Deserialize an inMemory Cache
* @param jsonCache
*/
;
Deserializer.deserializeAllCache = function deserializeAllCache(jsonCache) {
return {
accounts: jsonCache.Account ? this.deserializeAccounts(jsonCache.Account) : {},
idTokens: jsonCache.IdToken ? this.deserializeIdTokens(jsonCache.IdToken) : {},
accessTokens: jsonCache.AccessToken ? this.deserializeAccessTokens(jsonCache.AccessToken) : {},
refreshTokens: jsonCache.RefreshToken ? this.deserializeRefreshTokens(jsonCache.RefreshToken) : {},
appMetadata: jsonCache.AppMetadata ? this.deserializeAppMetadata(jsonCache.AppMetadata) : {}
};
};
return Deserializer;
}();
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
var Serializer = /*#__PURE__*/function () {
function Serializer() {}
/**
* serialize the JSON blob
* @param data
*/
Serializer.serializeJSONBlob = function serializeJSONBlob(data) {
return JSON.stringify(data);
}
/**
* Serialize Accounts
* @param accCache
*/
;
Serializer.serializeAccounts = function serializeAccounts(accCache) {
var accounts = {};
Object.keys(accCache).map(function (key) {
var accountEntity = accCache[key];
accounts[key] = {
home_account_id: accountEntity.homeAccountId,
environment: accountEntity.environment,
realm: accountEntity.realm,
local_account_id: accountEntity.localAccountId,
username: accountEntity.username,
authority_type: accountEntity.authorityType,
name: accountEntity.name,
client_info: accountEntity.clientInfo,
last_modification_time: accountEntity.lastModificationTime,
last_modification_app: accountEntity.lastModificationApp
};
});
return accounts;
}
/**
* Serialize IdTokens
* @param idTCache
*/
;
Serializer.serializeIdTokens = function serializeIdTokens(idTCache) {
var idTokens = {};
Object.keys(idTCache).map(function (key) {
var idTEntity = idTCache[key];
idTokens[key] = {
home_account_id: idTEntity.homeAccountId,
environment: idTEntity.environment,
credential_type: idTEntity.credentialType,
client_id: idTEntity.clientId,
secret: idTEntity.secret,
realm: idTEntity.realm
};
});
return idTokens;
}
/**
* Serializes AccessTokens
* @param atCache
*/
;
Serializer.serializeAccessTokens = function serializeAccessTokens(atCache) {
var accessTokens = {};
Object.keys(atCache).map(function (key) {
var atEntity = atCache[key];
accessTokens[key] = {
home_account_id: atEntity.homeAccountId,
environment: atEntity.environment,
credential_type: atEntity.credentialType,
client_id: atEntity.clientId,
secret: atEntity.secret,
realm: atEntity.realm,
target: atEntity.target,
cached_at: atEntity.cachedAt,
expires_on: atEntity.expiresOn,
extended_expires_on: atEntity.extendedExpiresOn,
refresh_on: atEntity.refreshOn,
key_id: atEntity.keyId,
token_type: atEntity.tokenType
};
});
return accessTokens;
}
/**
* Serialize refreshTokens
* @param rtCache
*/
;
Serializer.serializeRefreshTokens = function serializeRefreshTokens(rtCache) {
var refreshTokens = {};
Object.keys(rtCache).map(function (key) {
var rtEntity = rtCache[key];
refreshTokens[key] = {
home_account_id: rtEntity.homeAccountId,
environment: rtEntity.environment,
credential_type: rtEntity.credentialType,
client_id: rtEntity.clientId,
secret: rtEntity.secret,
family_id: rtEntity.familyId,
target: rtEntity.target,
realm: rtEntity.realm
};
});
return refreshTokens;
}
/**
* Serialize amdtCache
* @param amdtCache
*/
;
Serializer.serializeAppMetadata = function serializeAppMetadata(amdtCache) {
var appMetadata = {};
Object.keys(amdtCache).map(function (key) {
var amdtEntity = amdtCache[key];
appMetadata[key] = {
client_id: amdtEntity.clientId,
environment: amdtEntity.environment,
family_id: amdtEntity.familyId
};
});
return appMetadata;
}
/**
* Serialize the cache
* @param jsonContent
*/
;
Serializer.serializeAllCache = function serializeAllCache(inMemCache) {
return {
Account: this.serializeAccounts(inMemCache.accounts),
IdToken: this.serializeIdTokens(inMemCache.idTokens),
AccessToken: this.serializeAccessTokens(inMemCache.accessTokens),
RefreshToken: this.serializeRefreshTokens(inMemCache.refreshTokens),
AppMetadata: this.serializeAppMetadata(inMemCache.appMetadata)
};
};
return Serializer;
}();
/**
* This class implements Storage for node, reading cache from user specified storage location or an extension library
*/
var Storage = /*#__PURE__*/function (_CacheManager) {
_inheritsLoose(Storage, _CacheManager);
function Storage(logger, clientId, cryptoImpl) {
var _this;
_this = _CacheManager.call(this, clientId, cryptoImpl) || this;
_this.cache = {};
_this.changeEmitters = [];
_this.logger = logger;
return _this;
}
var _proto = Storage.prototype;
_proto.registerChangeEmitter = function registerChangeEmitter(func) {
this.changeEmitters.push(func);
};
_proto.emitChange = function emitChange() {
this.changeEmitters.forEach(function (func) {
return func.call(null);
});
}
/**
* Converts cacheKVStore to InMemoryCache
* @param cache
*/
;
_proto.cacheToInMemoryCache = function cacheToInMemoryCache(cache) {
var inMemoryCache = {
accounts: {},
idTokens: {},
accessTokens: {},
refreshTokens: {},
appMetadata: {}
};
for (var key in cache) {
if (cache[key] instanceof msalCommon.AccountEntity) {
inMemoryCache.accounts[key] = cache[key];
} else if (cache[key] instanceof msalCommon.IdTokenEntity) {
inMemoryCache.idTokens[key] = cache[key];
} else if (cache[key] instanceof msalCommon.AccessTokenEntity) {
inMemoryCache.accessTokens[key] = cache[key];
} else if (cache[key] instanceof msalCommon.RefreshTokenEntity) {
inMemoryCache.refreshTokens[key] = cache[key];
} else if (cache[key] instanceof msalCommon.AppMetadataEntity) {
inMemoryCache.appMetadata[key] = cache[key];
} else {
continue;
}
}
return inMemoryCache;
}
/**
* converts inMemoryCache to CacheKVStore
* @param inMemoryCache
*/
;
_proto.inMemoryCacheToCache = function inMemoryCacheToCache(inMemoryCache) {
// convert in memory cache to a flat Key-Value map
var cache = this.getCache();
cache = _extends({}, inMemoryCache.accounts, inMemoryCache.idTokens, inMemoryCache.accessTokens, inMemoryCache.refreshTokens, inMemoryCache.appMetadata);
return cache;
}
/**
* gets the current in memory cache for the client
*/
;
_proto.getInMemoryCache = function getInMemoryCache() {
this.logger.verbose("Getting in-memory cache"); // convert the cache key value store to inMemoryCache
var inMemoryCache = this.cacheToInMemoryCache(this.getCache());
return inMemoryCache;
}
/**
* sets the current in memory cache for the client
* @param inMemoryCache
*/
;
_proto.setInMemoryCache = function setInMemoryCache(inMemoryCache) {
this.logger.verbose("Setting in-memory cache"); // convert and append the inMemoryCache to cacheKVStore
var cache = this.inMemoryCacheToCache(inMemoryCache);
this.setCache(cache);
this.emitChange();
}
/**
* get the current cache key-value store
*/
;
_proto.getCache = function getCache() {
this.logger.verbose("Getting cache key-value store");
return this.cache;
}
/**
* sets the current cache (key value store)
* @param cacheMap
*/
;
_proto.setCache = function setCache(cache) {
this.logger.verbose("Setting cache key value store");
this.cache = cache; // mark change in cache
this.emitChange();
}
/**
* Gets cache item with given key.
* @param key
*/
;
_proto.getItem = function getItem(key) {
this.logger.verbosePii("Item key: " + key); // read cache
var cache = this.getCache();
return cache[key];
}
/**
* Gets cache item with given <key, value>
* @param key
* @param value
*/
;
_proto.setItem = function setItem(key, value) {
this.logger.verbosePii("Item key: " + key); // read cache
var cache = this.getCache();
cache[key] = value; // write to cache
this.setCache(cache);
}
/**
* fetch the account entity
* @param accountKey
*/
;
_proto.getAccount = function getAccount(accountKey) {
var account = this.getItem(accountKey);
if (msalCommon.AccountEntity.isAccountEntity(account)) {
return account;
}
return null;
}
/**
* set account entity
* @param account
*/
;
_proto.setAccount = function setAccount(account) {
var accountKey = account.generateAccountKey();
this.setItem(accountKey, account);
}
/**
* fetch the idToken credential
* @param idTokenKey
*/
;
_proto.getIdTokenCredential = function getIdTokenCredential(idTokenKey) {
var idToken = this.getItem(idTokenKey);
if (msalCommon.IdTokenEntity.isIdTokenEntity(idToken)) {
return idToken;
}
return null;
}
/**
* set idToken credential
* @param idToken
*/
;
_proto.setIdTokenCredential = function setIdTokenCredential(idToken) {
var idTokenKey = idToken.generateCredentialKey();
this.setItem(idTokenKey, idToken);
}
/**
* fetch the accessToken credential
* @param accessTokenKey
*/
;
_proto.getAccessTokenCredential = function getAccessTokenCredential(accessTokenKey) {
var accessToken = this.getItem(accessTokenKey);
if (msalCommon.AccessTokenEntity.isAccessTokenEntity(accessToken)) {
return accessToken;
}
return null;
}
/**
* set accessToken credential
* @param accessToken
*/
;
_proto.setAccessTokenCredential = function setAccessTokenCredential(accessToken) {
var accessTokenKey = accessToken.generateCredentialKey();
this.setItem(accessTokenKey, accessToken);
}
/**
* fetch the refreshToken credential
* @param refreshTokenKey
*/
;
_proto.getRefreshTokenCredential = function getRefreshTokenCredential(refreshTokenKey) {
var refreshToken = this.getItem(refreshTokenKey);
if (msalCommon.RefreshTokenEntity.isRefreshTokenEntity(refreshToken)) {
return refreshToken;
}
return null;
}
/**
* set refreshToken credential
* @param refreshToken
*/
;
_proto.setRefreshTokenCredential = function setRefreshTokenCredential(refreshToken) {
var refreshTokenKey = refreshToken.generateCredentialKey();
this.setItem(refreshTokenKey, refreshToken);
}
/**
* fetch appMetadata entity from the platform cache
* @param appMetadataKey
*/
;
_proto.getAppMetadata = function getAppMetadata(appMetadataKey) {
var appMetadata = this.getItem(appMetadataKey);
if (msalCommon.AppMetadataEntity.isAppMetadataEntity(appMetadataKey, appMetadata)) {
return appMetadata;
}
return null;
}
/**
* set appMetadata entity to the platform cache
* @param appMetadata
*/
;
_proto.setAppMetadata = function setAppMetadata(appMetadata) {
var appMetadataKey = appMetadata.generateAppMetadataKey();
this.setItem(appMetadataKey, appMetadata);
}
/**
* fetch server telemetry entity from the platform cache
* @param serverTelemetrykey
*/
;
_proto.getServerTelemetry = function getServerTelemetry(serverTelemetrykey) {
var serverTelemetryEntity = this.getItem(serverTelemetrykey);
if (serverTelemetryEntity && msalCommon.ServerTelemetryEntity.isServerTelemetryEntity(serverTelemetrykey, serverTelemetryEntity)) {
return serverTelemetryEntity;
}
return null;
}
/**
* set server telemetry entity to the platform cache
* @param serverTelemetryKey
* @param serverTelemetry
*/
;
_proto.setServerTelemetry = function setServerTelemetry(serverTelemetryKey, serverTelemetry) {
this.setItem(serverTelemetryKey, serverTelemetry);
}
/**
* fetch authority metadata entity from the platform cache
* @param key
*/
;
_proto.getAuthorityMetadata = function getAuthorityMetadata(key) {
var authorityMetadataEntity = this.getItem(key);
if (authorityMetadataEntity && msalCommon.AuthorityMetadataEntity.isAuthorityMetadataEntity(key, authorityMetadataEntity)) {
return authorityMetadataEntity;
}
return null;
}
/**
* Get all authority metadata keys
*/
;
_proto.getAuthorityMetadataKeys = function getAuthorityMetadataKeys() {
var _this2 = this;
return this.getKeys().filter(function (key) {
return _this2.isAuthorityMetadata(key);
});
}
/**
* set authority metadata entity to the platform cache
* @param key
* @param metadata
*/
;
_proto.setAuthorityMetadata = function setAuthorityMetadata(key, metadata) {
this.setItem(key, metadata);
}
/**
* fetch throttling entity from the platform cache
* @param throttlingCacheKey
*/
;
_proto.getThrottlingCache = function getThrottlingCache(throttlingCacheKey) {
var throttlingCache = this.getItem(throttlingCacheKey);
if (throttlingCache && msalCommon.ThrottlingEntity.isThrottlingEntity(throttlingCacheKey, throttlingCache)) {
return throttlingCache;
}
return null;
}
/**
* set throttling entity to the platform cache
* @param throttlingCacheKey
* @param throttlingCache
*/
;
_proto.setThrottlingCache = function setThrottlingCache(throttlingCacheKey, throttlingCache) {
this.setItem(throttlingCacheKey, throttlingCache);
}
/**
* Removes the cache item from memory with the given key.
* @param key
* @param inMemory
*/
;
_proto.removeItem = function removeItem(key) {
this.logger.verbosePii("Item key: " + key); // read inMemoryCache
var result = false;
var cache = this.getCache();
if (!!cache[key]) {
delete cache[key];
result = true;
} // write to the cache after removal
if (result) {
this.setCache(cache);
this.emitChange();
}
return result;
}
/**
* Checks whether key is in cache.
* @param key
*/
;
_proto.containsKey = function containsKey(key) {
return this.getKeys().includes(key);
}
/**
* Gets all keys in window.
*/
;
_proto.getKeys = function getKeys() {
this.logger.verbose("Retrieving all cache keys"); // read cache
var cache = this.getCache();
return [].concat(Object.keys(cache));
}
/**
* Clears all cache entries created by MSAL (except tokens).
*/
;
_proto.clear = function clear() {
var _this3 = this;
this.logger.verbose("Clearing cache entries created by MSAL"); // read inMemoryCache
var cacheKeys = this.getKeys(); // delete each element
cacheKeys.forEach(function (key) {
_this3.removeItem(key);
});
this.emitChange();
}
/**
* Initialize in memory cache from an exisiting cache vault
* @param cache
*/
;
Storage.generateInMemoryCache = function generateInMemoryCache(cache) {
return Deserializer.deserializeAllCache(Deserializer.deserializeJSONBlob(cache));
}
/**
* retrieves the final JSON
* @param inMemoryCache
*/
;
Storage.generateJsonCache = function generateJsonCache(inMemoryCache) {
return Serializer.serializeAllCache(inMemoryCache);
};
return Storage;
}(msalCommon.CacheManager);
var defaultSerializedCache = {
Account: {},
IdToken: {},
AccessToken: {},
RefreshToken: {},
AppMetadata: {}
};
/**
* In-memory token cache manager
*/
var TokenCache = /*#__PURE__*/function () {
function TokenCache(storage, logger, cachePlugin) {
this.cacheHasChanged = false;
this.storage = storage;
this.storage.registerChangeEmitter(this.handleChangeEvent.bind(this));
if (cachePlugin) {
this.persistence = cachePlugin;
}
this.logger = logger;
}
/**
* Set to true if cache state has changed since last time serialize or writeToPersistence was called
*/
var _proto = TokenCache.prototype;
_proto.hasChanged = function hasChanged() {
return this.cacheHasChanged;
}
/**
* Serializes in memory cache to JSON
*/
;
_proto.serialize = function serialize() {
this.logger.verbose("Serializing in-memory cache");
var finalState = Serializer.serializeAllCache(this.storage.getInMemoryCache()); // if cacheSnapshot not null or empty, merge
if (!msalCommon.StringUtils.isEmpty(this.cacheSnapshot)) {
this.logger.verbose("Reading cache snapshot from disk");
finalState = this.mergeState(JSON.parse(this.cacheSnapshot), finalState);
} else {
this.logger.verbose("No cache snapshot to merge");
}
this.cacheHasChanged = false;
return JSON.stringify(finalState);
}
/**
* Deserializes JSON to in-memory cache. JSON should be in MSAL cache schema format
* @param cache
*/
;
_proto.deserialize = function deserialize(cache) {
this.logger.verbose("Deserializing JSON to in-memory cache");
this.cacheSnapshot = cache;
if (!msalCommon.StringUtils.isEmpty(this.cacheSnapshot)) {
this.logger.verbose("Reading cache snapshot from disk");
var deserializedCache = Deserializer.deserializeAllCache(this.overlayDefaults(JSON.parse(this.cacheSnapshot)));
this.storage.setInMemoryCache(deserializedCache);
} else {
this.logger.verbose("No cache snapshot to deserialize");
}
};
_proto.getKVStore = function getKVStore() {
return this.storage.getCache();
}
/**
* API that retrieves all accounts currently in cache to the user
*/
;
_proto.getAllAccounts =
/*#__PURE__*/
function () {
var _getAllAccounts = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee() {
var cacheContext;
return runtime_1.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
this.logger.verbose("getAllAccounts called");
_context.prev = 1;
if (!this.persistence) {
_context.next = 6;
break;
}
cacheContext = new msalCommon.TokenCacheContext(this, false);
_context.next = 6;
return this.persistence.beforeCacheAccess(cacheContext);
case 6:
return _context.abrupt("return", this.storage.getAllAccounts());
case 7:
_context.prev = 7;
if (!(this.persistence && cacheContext)) {
_context.next = 11;
break;
}
_context.next = 11;
return this.persistence.afterCacheAccess(cacheContext);
case 11:
return _context.finish(7);
case 12:
case "end":
return _context.stop();
}
}
}, _callee, this, [[1,, 7, 12]]);
}));
function getAllAccounts() {
return _getAllAccounts.apply(this, arguments);
}
return getAllAccounts;
}()
/**
* Returns the signed in account matching homeAccountId.
* (the account object is created at the time of successful login)
* or null when no matching account is found
* @returns {@link AccountInfo} - the account object stored in MSAL
*/
;
_proto.getAccountByHomeId =
/*#__PURE__*/
function () {
var _getAccountByHomeId = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee2(homeAccountId) {
var allAccounts;
return runtime_1.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
_context2.next = 2;
return this.getAllAccounts();
case 2:
allAccounts = _context2.sent;
if (!(!msalCommon.StringUtils.isEmpty(homeAccountId) && allAccounts && allAccounts.length)) {
_context2.next = 7;
break;
}
return _context2.abrupt("return", allAccounts.filter(function (accountObj) {
return accountObj.homeAccountId === homeAccountId;
})[0] || null);
case 7:
return _context2.abrupt("return", null);
case 8:
case "end":
return _context2.stop();
}
}
}, _callee2, this);
}));
function getAccountByHomeId(_x) {
return _getAccountByHomeId.apply(this, arguments);
}
return getAccountByHomeId;
}()
/**
* Returns the signed in account matching localAccountId.
* (the account object is created at the time of successful login)
* or null when no matching account is found
* @returns {@link AccountInfo} - the account object stored in MSAL
*/
;
_proto.getAccountByLocalId =
/*#__PURE__*/
function () {
var _getAccountByLocalId = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee3(localAccountId) {
var allAccounts;
return runtime_1.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
_context3.next = 2;
return this.getAllAccounts();
case 2:
allAccounts = _context3.sent;
if (!(!msalCommon.StringUtils.isEmpty(localAccountId) && allAccounts && allAccounts.length)) {
_context3.next = 7;
break;
}
return _context3.abrupt("return", allAccounts.filter(function (accountObj) {
return accountObj.localAccountId === localAccountId;
})[0] || null);
case 7:
return _context3.abrupt("return", null);
case 8:
case "end":
return _context3.stop();
}
}
}, _callee3, this);
}));
function getAccountByLocalId(_x2) {
return _getAccountByLocalId.apply(this, arguments);
}
return getAccountByLocalId;
}()
/**
* API to remove a specific account and the relevant data from cache
* @param account
*/
;
_proto.removeAccount =
/*#__PURE__*/
function () {
var _removeAccount = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee4(account) {
var cacheContext;
return runtime_1.wrap(function _callee4$(_context4) {
while (1) {
switch (_context4.prev = _context4.next) {
case 0:
this.logger.verbose("removeAccount called");
_context4.prev = 1;
if (!this.persistence) {
_context4.next = 6;
break;
}
cacheContext = new msalCommon.TokenCacheContext(this, true);
_context4.next = 6;
return this.persistence.beforeCacheAccess(cacheContext);
case 6:
this.storage.removeAccount(msalCommon.AccountEntity.generateAccountCacheKey(account));
case 7:
_context4.prev = 7;
if (!(this.persistence && cacheContext)) {
_context4.next = 11;
break;
}
_context4.next = 11;
return this.persistence.afterCacheAccess(cacheContext);
case 11:
return _context4.finish(7);
case 12:
case "end":
return _context4.stop();
}
}
}, _callee4, this, [[1,, 7, 12]]);
}));
function removeAccount(_x3) {
return _removeAccount.apply(this, arguments);
}
return removeAccount;
}()
/**
* Called when the cache has changed state.
*/
;
_proto.handleChangeEvent = function handleChangeEvent() {
this.cacheHasChanged = true;
}
/**
* Merge in memory cache with the cache snapshot.
* @param oldState
* @param currentState
*/
;
_proto.mergeState = function mergeState(oldState, currentState) {
this.logger.verbose("Merging in-memory cache with cache snapshot");
var stateAfterRemoval = this.mergeRemovals(oldState, currentState);
return this.mergeUpdates(stateAfterRemoval, currentState);
}
/**
* Deep update of oldState based on newState values
* @param oldState
* @param newState
*/
;
_proto.mergeUpdates = function mergeUpdates(oldState, newState) {
var _this = this;
Object.keys(newState).forEach(function (newKey) {
var newValue = newState[newKey]; // if oldState does not contain value but newValue does, add it
if (!oldState.hasOwnProperty(newKey)) {
if (newValue !== null) {
oldState[newKey] = newValue;
}
} else {
// both oldState and newState contain the key, do deep update
var newValueNotNull = newValue !== null;
var newValueIsObject = typeof newValue === "object";
var newValueIsNotArray = !Array.isArray(newValue);
var oldStateNotUndefinedOrNull = typeof oldState[newKey] !== "undefined" && oldState[newKey] !== null;
if (newValueNotNull && newValueIsObject && newValueIsNotArray && oldStateNotUndefinedOrNull) {
_this.mergeUpdates(oldState[newKey], newValue);
} else {
oldState[newKey] = newValue;
}
}
});
return oldState;
}
/**
* Removes entities in oldState that the were removed from newState. If there are any unknown values in root of
* oldState that are not recognized, they are left untouched.
* @param oldState
* @param newState
*/
;
_proto.mergeRemovals = function mergeRemovals(oldState, newState) {
this.logger.verbose("Remove updated entries in cache");
var accounts = oldState.Account ? this.mergeRemovalsDict(oldState.Account, newState.Account) : oldState.Account;
var accessTokens = oldState.AccessToken ? this.mergeRemovalsDict(oldState.AccessToken, newState.AccessToken) : oldState.AccessToken;
var refreshTokens = oldState.RefreshToken ? this.mergeRemovalsDict(oldState.RefreshToken, newState.RefreshToken) : oldState.RefreshToken;
var idTokens = oldState.IdToken ? this.mergeRemovalsDict(oldState.IdToken, newState.IdToken) : oldState.IdToken;
var appMetadata = oldState.AppMetadata ? this.mergeRemovalsDict(oldState.AppMetadata, newState.AppMetadata) : oldState.AppMetadata;
return _extends({}, oldState, {
Account: accounts,
AccessToken: accessTokens,
RefreshToken: refreshTokens,
IdToken: idTokens,
AppMetadata: appMetadata
});
};
_proto.mergeRemovalsDict = function mergeRemovalsDict(oldState, newState) {
var finalState = _extends({}, oldState);
Object.keys(oldState).forEach(function (oldKey) {
if (!newState || !newState.hasOwnProperty(oldKey)) {
delete finalState[oldKey];
}
});
return finalState;
};
_proto.overlayDefaults = function overlayDefaults(passedInCache) {
this.logger.verbose("Overlaying input cache with the default cache");
return {
Account: _extends({}, defaultSerializedCache.Account, passedInCache.Account),
IdToken: _extends({}, defaultSerializedCache.IdToken, passedInCache.IdToken),
AccessToken: _extends({}, defaultSerializedCache.AccessToken, passedInCache.AccessToken),
RefreshToken: _extends({}, defaultSerializedCache.RefreshToken, passedInCache.RefreshToken),
AppMetadata: _extends({}, defaultSerializedCache.AppMetadata, passedInCache.AppMetadata)
};
};
return TokenCache;
}();
var name = "@azure/msal-node";
var version = "1.0.0-beta.5";
var ClientApplication = /*#__PURE__*/function () {
/**
* Constructor for the ClientApplication
*/
function ClientApplication(configuration) {
this.config = buildAppConfiguration(configuration);
this.cryptoProvider = new CryptoProvider();
this.logger = new msalCommon.Logger(this.config.system.loggerOptions, name, version);
this.storage = new Storage(this.logger, this.config.auth.clientId, this.cryptoProvider);
this.tokenCache = new TokenCache(this.storage, this.logger, this.config.cache.cachePlugin);
}
/**
* Creates the URL of the authorization request, letting the user input credentials and consent to the
* application. The URL targets the /authorize endpoint of the authority configured in the
* application object.
*
* Once the user inputs their credentials and consents, the authority will send a response to the redirect URI
* sent in the request and should contain an authorization code, which can then be used to acquire tokens via
* `acquireTokenByCode(AuthorizationCodeRequest)`.
*/
var _proto = ClientApplication.prototype;
_proto.getAuthCodeUrl =
/*#__PURE__*/
function () {
var _getAuthCodeUrl = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee(request) {
var validRequest, authClientConfig, authorizationCodeClient;
return runtime_1.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
this.logger.info("getAuthCodeUrl called");
validRequest = _extends({}, request, this.initializeBaseRequest(request), {
responseMode: request.responseMode || msalCommon.ResponseMode.QUERY,
authenticationScheme: msalCommon.AuthenticationScheme.BEARER
});
_context.next = 4;
return this.buildOauthClientConfiguration(validRequest.authority);
case 4:
authClientConfig = _context.sent;
this.logger.verbose("Auth client config generated");
authorizationCodeClient = new msalCommon.AuthorizationCodeClient(authClientConfig);
return _context.abrupt("return", authorizationCodeClient.getAuthCodeUrl(validRequest));
case 8:
case "end":
return _context.stop();
}
}
}, _callee, this);
}));
function getAuthCodeUrl(_x) {
return _getAuthCodeUrl.apply(this, arguments);
}
return getAuthCodeUrl;
}()
/**
* Acquires a token by exchanging the Authorization Code received from the first step of OAuth2.0
* Authorization Code flow.
*
* `getAuthCodeUrl(AuthorizationCodeUrlRequest)` can be used to create the URL for the first step of OAuth2.0
* Authorization Code flow. Ensure that values for redirectUri and scopes in AuthorizationCodeUrlRequest and
* AuthorizationCodeRequest are the same.
*/
;
_proto.acquireTokenByCode =
/*#__PURE__*/
function () {
var _acquireTokenByCode = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee2(request) {
var validRequest, serverTelemetryManager, authClientConfig, authorizationCodeClient;
return runtime_1.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
this.logger.info("acquireTokenByCode called");
validRequest = _extends({}, request, this.initializeBaseRequest(request), {
authenticationScheme: msalCommon.AuthenticationScheme.BEARER
});
serverTelemetryManager = this.initializeServerTelemetryManager(ApiId.acquireTokenByCode, validRequest.correlationId);
_context2.prev = 3;
_context2.next = 6;
return this.buildOauthClientConfiguration(validRequest.authority, serverTelemetryManager);
case 6:
authClientConfig = _context2.sent;
this.logger.verbose("Auth client config generated");
authorizationCodeClient = new msalCommon.AuthorizationCodeClient(authClientConfig);
return _context2.abrupt("return", authorizationCodeClient.acquireToken(validRequest));
case 12:
_context2.prev = 12;
_context2.t0 = _context2["catch"](3);
serverTelemetryManager.cacheFailedRequest(_context2.t0);
throw _context2.t0;
case 16:
case "end":
return _context2.stop();
}
}
}, _callee2, this, [[3, 12]]);
}));
function acquireTokenByCode(_x2) {
return _acquireTokenByCode.apply(this, arguments);
}
return acquireTokenByCode;
}()
/**
* Acquires a token by exchanging the refresh token provided for a new set of tokens.
*
* This API is provided only for scenarios where you would like to migrate from ADAL to MSAL. Otherwise, it is
* recommended that you use `acquireTokenSilent()` for silent scenarios. When using `acquireTokenSilent()`, MSAL will
* handle the caching and refreshing of tokens automatically.
*/
;
_proto.acquireTokenByRefreshToken =
/*#__PURE__*/
function () {
var _acquireTokenByRefreshToken = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee3(request) {
var validRequest, serverTelemetryManager, refreshTokenClientConfig, refreshTokenClient;
return runtime_1.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
this.logger.info("acquireTokenByRefreshToken called");
validRequest = _extends({}, request, this.initializeBaseRequest(request), {
authenticationScheme: msalCommon.AuthenticationScheme.BEARER
});
serverTelemetryManager = this.initializeServerTelemetryManager(ApiId.acquireTokenByRefreshToken, validRequest.correlationId);
_context3.prev = 3;
_context3.next = 6;
return this.buildOauthClientConfiguration(validRequest.authority, serverTelemetryManager);
case 6:
refreshTokenClientConfig = _context3.sent;
this.logger.verbose("Auth client config generated");
refreshTokenClient = new msalCommon.RefreshTokenClient(refreshTokenClientConfig);
return _context3.abrupt("return", refreshTokenClient.acquireToken(validRequest));
case 12:
_context3.prev = 12;
_context3.t0 = _context3["catch"](3);
serverTelemetryManager.cacheFailedRequest(_context3.t0);
throw _context3.t0;
case 16:
case "end":
return _context3.stop();
}
}
}, _callee3, this, [[3, 12]]);
}));
function acquireTokenByRefreshToken(_x3) {
return _acquireTokenByRefreshToken.apply(this, arguments);
}
return acquireTokenByRefreshToken;
}()
/**
* Acquires a token silently when a user specifies the account the token is requested for.
*
* This API expects the user to provide an account object and looks into the cache to retrieve the token if present.
* There is also an optional "forceRefresh" boolean the user can send to bypass the cache for access_token and id_token.
* In case the refresh_token is expired or not found, an error is thrown
* and the guidance is for the user to call any interactive token acquisition API (eg: `acquireTokenByCode()`).
*/
;
_proto.acquireTokenSilent =
/*#__PURE__*/
function () {
var _acquireTokenSilent = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee4(request) {
var validRequest, serverTelemetryManager, silentFlowClientConfig, silentFlowClient;
return runtime_1.wrap(function _callee4$(_context4) {
while (1) {
switch (_context4.prev = _context4.next) {
case 0:
validRequest = _extends({}, request, this.initializeBaseRequest(request), {
forceRefresh: request.forceRefresh || false
});
serverTelemetryManager = this.initializeServerTelemetryManager(ApiId.acquireTokenSilent, validRequest.correlationId, validRequest.forceRefresh);
_context4.prev = 2;
_context4.next = 5;
return this.buildOauthClientConfiguration(validRequest.authority, serverTelemetryManager);
case 5:
silentFlowClientConfig = _context4.sent;
silentFlowClient = new msalCommon.SilentFlowClient(silentFlowClientConfig);
return _context4.abrupt("return", silentFlowClient.acquireToken(validRequest));
case 10:
_context4.prev = 10;
_context4.t0 = _context4["catch"](2);
serverTelemetryManager.cacheFailedRequest(_context4.t0);
throw _context4.t0;
case 14:
case "end":
return _context4.stop();
}
}
}, _callee4, this, [[2, 10]]);
}));
function acquireTokenSilent(_x4) {
return _acquireTokenSilent.apply(this, arguments);
}
return acquireTokenSilent;
}()
/**
* Gets the token cache for the application.
*/
;
_proto.getTokenCache = function getTokenCache() {
this.logger.info("getTokenCache called");
return this.tokenCache;
}
/**
* Returns the logger instance
*/
;
_proto.getLogger = function getLogger() {
return this.logger;
}
/**
* Replaces the default logger set in configurations with new Logger with new configurations
* @param logger Logger instance
*/
;
_proto.setLogger = function setLogger(logger) {
this.logger = logger;
};
_proto.buildOauthClientConfiguration = /*#__PURE__*/function () {
var _buildOauthClientConfiguration = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee5(authority, serverTelemetryManager) {
var discoveredAuthority;
return runtime_1.wrap(function _callee5$(_context5) {
while (1) {
switch (_context5.prev = _context5.next) {
case 0:
this.logger.verbose("buildOauthClientConfiguration called"); // using null assertion operator as we ensure that all config values have default values in buildConfiguration()
_context5.next = 3;
return this.createAuthority(authority);
case 3:
discoveredAuthority = _context5.sent;
return _context5.abrupt("return", {
authOptions: {
clientId: this.config.auth.clientId,
authority: discoveredAuthority,
clientCapabilities: this.config.auth.clientCapabilities
},
loggerOptions: {
loggerCallback: this.config.system.loggerOptions.loggerCallback,
piiLoggingEnabled: this.config.system.loggerOptions.piiLoggingEnabled
},
cryptoInterface: this.cryptoProvider,
networkInterface: this.config.system.networkClient,
storageInterface: this.storage,
serverTelemetryManager: serverTelemetryManager,
clientCredentials: {
clientSecret: this.clientSecret,
clientAssertion: this.clientAssertion ? this.getClientAssertion(discoveredAuthority) : undefined
},
libraryInfo: {
sku: Constants.MSAL_SKU,
version: version,
cpu: process.arch || "",
os: process.platform || ""
},
persistencePlugin: this.config.cache.cachePlugin,
serializableCache: this.tokenCache
});
case 5:
case "end":
return _context5.stop();
}
}
}, _callee5, this);
}));
function buildOauthClientConfiguration(_x5, _x6) {
return _buildOauthClientConfiguration.apply(this, arguments);
}
return buildOauthClientConfiguration;
}();
_proto.getClientAssertion = function getClientAssertion(authority) {
return {
assertion: this.clientAssertion.getJwt(this.cryptoProvider, this.config.auth.clientId, authority.tokenEndpoint),
assertionType: Constants.JWT_BEARER_ASSERTION_TYPE
};
}
/**
* Generates a request with the default scopes & generates a correlationId.
* @param authRequest
*/
;
_proto.initializeBaseRequest = function initializeBaseRequest(authRequest) {
this.logger.verbose("initializeRequestScopes called");
return _extends({}, authRequest, {
scopes: [].concat(authRequest && authRequest.scopes || [], [msalCommon.Constants.OPENID_SCOPE, msalCommon.Constants.PROFILE_SCOPE, msalCommon.Constants.OFFLINE_ACCESS_SCOPE]),
correlationId: authRequest && authRequest.correlationId || this.cryptoProvider.createNewGuid(),
authority: authRequest.authority || this.config.auth.authority
});
};
_proto.initializeServerTelemetryManager = function initializeServerTelemetryManager(apiId, correlationId, forceRefresh) {
var telemetryPayload = {
clientId: this.config.auth.clientId,
correlationId: correlationId,
apiId: apiId,
forceRefresh: forceRefresh || false
};
return new msalCommon.ServerTelemetryManager(telemetryPayload, this.storage);
}
/**
* Create authority instance. If authority not passed in request, default to authority set on the application
* object. If no authority set in application object, then default to common authority.
* @param authorityString
*/
;
_proto.createAuthority =
/*#__PURE__*/
function () {
var _createAuthority = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee6(authorityString) {
var authorityOptions;
return runtime_1.wrap(function _callee6$(_context6) {
while (1) {
switch (_context6.prev = _context6.next) {
case 0:
this.logger.verbose("createAuthority called");
authorityOptions = {
protocolMode: this.config.auth.protocolMode,
knownAuthorities: this.config.auth.knownAuthorities,
cloudDiscoveryMetadata: this.config.auth.cloudDiscoveryMetadata,
authorityMetadata: this.config.auth.authorityMetadata
};
_context6.next = 4;
return msalCommon.AuthorityFactory.createDiscoveredInstance(authorityString, this.config.system.networkClient, this.storage, authorityOptions);
case 4:
return _context6.abrupt("return", _context6.sent);
case 5:
case "end":
return _context6.stop();
}
}
}, _callee6, this);
}));
function createAuthority(_x7) {
return _createAuthority.apply(this, arguments);
}
return createAuthority;
}();
return ClientApplication;
}();
/**
* This class is to be used to acquire tokens for public client applications (desktop, mobile). Public client applications
* are not trusted to safely store application secrets, and therefore can only request tokens in the name of an user.
*/
var PublicClientApplication = /*#__PURE__*/function (_ClientApplication) {
_inheritsLoose(PublicClientApplication, _ClientApplication);
/**
* Important attributes in the Configuration object for auth are:
* - clientID: the application ID of your application. You can obtain one by registering your application with our Application registration portal.
* - authority: the authority URL for your application.
*
* AAD authorities are of the form https://login.microsoftonline.com/{Enter_the_Tenant_Info_Here}.
* - If your application supports Accounts in one organizational directory, replace "Enter_the_Tenant_Info_Here" value with the Tenant Id or Tenant name (for example, contoso.microsoft.com).
* - If your application supports Accounts in any organizational directory, replace "Enter_the_Tenant_Info_Here" value with organizations.
* - If your application supports Accounts in any organizational directory and personal Microsoft accounts, replace "Enter_the_Tenant_Info_Here" value with common.
* - To restrict support to Personal Microsoft accounts only, replace "Enter_the_Tenant_Info_Here" value with consumers.
*
* Azure B2C authorities are of the form https://{instance}/{tenant}/{policy}. Each policy is considered
* its own authority. You will have to set the all of the knownAuthorities at the time of the client application
* construction.
*
* ADFS authorities are of the form https://{instance}/adfs.
*/
function PublicClientApplication(configuration) {
return _ClientApplication.call(this, configuration) || this;
}
/**
* Acquires a token from the authority using OAuth2.0 device code flow.
* This flow is designed for devices that do not have access to a browser or have input constraints.
* The authorization server issues a DeviceCode object with a verification code, an end-user code,
* and the end-user verification URI. The DeviceCode object is provided through a callback, and the end-user should be
* instructed to use another device to navigate to the verification URI to input credentials.
* Since the client cannot receive incoming requests, it polls the authorization server repeatedly
* until the end-user completes input of credentials.
*/
var _proto = PublicClientApplication.prototype;
_proto.acquireTokenByDeviceCode =
/*#__PURE__*/
function () {
var _acquireTokenByDeviceCode = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee(request) {
var validRequest, serverTelemetryManager, deviceCodeConfig, deviceCodeClient;
return runtime_1.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
this.logger.info("acquireTokenByDeviceCode called");
validRequest = _extends({}, request, this.initializeBaseRequest(request));
serverTelemetryManager = this.initializeServerTelemetryManager(ApiId.acquireTokenByDeviceCode, validRequest.correlationId);
_context.prev = 3;
_context.next = 6;
return this.buildOauthClientConfiguration(validRequest.authority, serverTelemetryManager);
case 6:
deviceCodeConfig = _context.sent;
this.logger.verbose("Auth client config generated");
deviceCodeClient = new msalCommon.DeviceCodeClient(deviceCodeConfig);
return _context.abrupt("return", deviceCodeClient.acquireToken(validRequest));
case 12:
_context.prev = 12;
_context.t0 = _context["catch"](3);
serverTelemetryManager.cacheFailedRequest(_context.t0);
throw _context.t0;
case 16:
case "end":
return _context.stop();
}
}
}, _callee, this, [[3, 12]]);
}));
function acquireTokenByDeviceCode(_x) {
return _acquireTokenByDeviceCode.apply(this, arguments);
}
return acquireTokenByDeviceCode;
}()
/**
* Acquires tokens with password grant by exchanging client applications username and password for credentials
*
* The latest OAuth 2.0 Security Best Current Practice disallows the password grant entirely.
* More details on this recommendation at https://tools.ietf.org/html/draft-ietf-oauth-security-topics-13#section-3.4
* Microsoft's documentation and recommendations are at:
* https://docs.microsoft.com/en-us/azure/active-directory/develop/msal-authentication-flows#usernamepassword
*
* @param request
*/
;
_proto.acquireTokenByUsernamePassword =
/*#__PURE__*/
function () {
var _acquireTokenByUsernamePassword = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee2(request) {
var validRequest, serverTelemetryManager, usernamePasswordClientConfig, usernamePasswordClient;
return runtime_1.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
this.logger.info("acquireTokenByUsernamePassword called");
validRequest = _extends({}, request, this.initializeBaseRequest(request));
serverTelemetryManager = this.initializeServerTelemetryManager(ApiId.acquireTokenByUsernamePassword, validRequest.correlationId);
_context2.prev = 3;
_context2.next = 6;
return this.buildOauthClientConfiguration(validRequest.authority, serverTelemetryManager);
case 6:
usernamePasswordClientConfig = _context2.sent;
this.logger.verbose("Auth client config generated");
usernamePasswordClient = new msalCommon.UsernamePasswordClient(usernamePasswordClientConfig);
return _context2.abrupt("return", usernamePasswordClient.acquireToken(validRequest));
case 12:
_context2.prev = 12;
_context2.t0 = _context2["catch"](3);
serverTelemetryManager.cacheFailedRequest(_context2.t0);
throw _context2.t0;
case 16:
case "end":
return _context2.stop();
}
}
}, _callee2, this, [[3, 12]]);
}));
function acquireTokenByUsernamePassword(_x2) {
return _acquireTokenByUsernamePassword.apply(this, arguments);
}
return acquireTokenByUsernamePassword;
}();
return PublicClientApplication;
}(ClientApplication);
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
/**
* Client assertion of type jwt-bearer used in confidential client flows
*/
var ClientAssertion = /*#__PURE__*/function () {
function ClientAssertion() {}
ClientAssertion.fromAssertion = function fromAssertion(assertion) {
var clientAssertion = new ClientAssertion();
clientAssertion.jwt = assertion;
return clientAssertion;
};
ClientAssertion.fromCertificate = function fromCertificate(thumbprint, privateKey, publicCertificate) {
var clientAssertion = new ClientAssertion();
clientAssertion.privateKey = privateKey;
clientAssertion.thumbprint = thumbprint;
if (publicCertificate) {
clientAssertion.publicCertificate = this.parseCertificate(publicCertificate);
}
return clientAssertion;
};
var _proto = ClientAssertion.prototype;
_proto.getJwt = function getJwt(cryptoProvider, issuer, jwtAudience) {
// if assertion was created from certificate, check if jwt is expired and create new one.
if (this.privateKey && this.thumbprint) {
if (this.jwt && !this.isExpired() && issuer === this.issuer && jwtAudience === this.jwtAudience) {
return this.jwt;
}
return this.createJwt(cryptoProvider, issuer, jwtAudience);
}
/*
* if assertion was created by caller, then we just append it. It is up to the caller to
* ensure that it contains necessary claims and that it is not expired.
*/
if (this.jwt) {
return this.jwt;
}
throw msalCommon.ClientAuthError.createInvalidAssertionError();
} // JWT format and required claims specified: https://tools.ietf.org/html/rfc7523#section-3
;
_proto.createJwt = function createJwt(cryptoProvider, issuer, jwtAudience) {
var _header, _payload;
this.issuer = issuer;
this.jwtAudience = jwtAudience;
var issuedAt = msalCommon.TimeUtils.nowSeconds();
this.expirationTime = issuedAt + 600;
var header = (_header = {}, _header[JwtConstants.ALGORITHM] = JwtConstants.RSA_256, _header[JwtConstants.X5T] = EncodingUtils.base64EncodeUrl(this.thumbprint, "hex"), _header);
if (this.publicCertificate) {
var _Object$assign;
Object.assign(header, (_Object$assign = {}, _Object$assign[JwtConstants.X5C] = this.publicCertificate, _Object$assign));
}
var payload = (_payload = {}, _payload[JwtConstants.AUDIENCE] = this.jwtAudience, _payload[JwtConstants.EXPIRATION_TIME] = this.expirationTime, _payload[JwtConstants.ISSUER] = this.issuer, _payload[JwtConstants.SUBJECT] = this.issuer, _payload[JwtConstants.NOT_BEFORE] = issuedAt, _payload[JwtConstants.JWT_ID] = cryptoProvider.createNewGuid(), _payload);
this.jwt = jsonwebtoken.sign(payload, this.privateKey, {
header: header
});
return this.jwt;
};
_proto.isExpired = function isExpired() {
return this.expirationTime < msalCommon.TimeUtils.nowSeconds();
}
/**
* Extracts the raw certs from a given certificate string and returns them in an array.
* @param publicCertificate
*/
;
ClientAssertion.parseCertificate = function parseCertificate(publicCertificate) {
/**
* This is regex to identify the certs in a given certificate string.
* We want to look for the contents between the BEGIN and END certificate strings, without the associated newlines.
* The information in parens "(.+?)" is the capture group to represent the cert we want isolated.
* "." means any string character, "+" means match 1 or more times, and "?" means the shortest match.
* The "g" at the end of the regex means search the string globally, and the "s" enables the "." to match newlines.
*/
var regexToFindCerts = /\x2D\x2D\x2D\x2D\x2DBEGIN CERTIFICATE\x2D\x2D\x2D\x2D\x2D\n([\s\S]+?)\n\x2D\x2D\x2D\x2D\x2DEND CERTIFICATE\x2D\x2D\x2D\x2D\x2D/g;
var certs = [];
var matches;
while ((matches = regexToFindCerts.exec(publicCertificate)) !== null) {
// matches[1] represents the first parens capture group in the regex.
certs.push(matches[1].replace(/\n/, ""));
}
return certs;
};
return ClientAssertion;
}();
var ConfidentialClientApplication = /*#__PURE__*/function (_ClientApplication) {
_inheritsLoose(ConfidentialClientApplication, _ClientApplication);
/**
* @constructor
* Constructor for the ConfidentialClientApplication
*
* Required attributes in the Configuration object are:
* - clientID: the application ID of your application. You can obtain one by registering your application with our application registration portal
* - authority: the authority URL for your application.
* - client credential: Must set either client secret, certificate, or assertion for confidential clients. You can obtain a client secret from the application registration portal.
*
* In Azure AD, authority is a URL indicating of the form https://login.microsoftonline.com/{Enter_the_Tenant_Info_Here}.
* If your application supports Accounts in one organizational directory, replace "Enter_the_Tenant_Info_Here" value with the Tenant Id or Tenant name (for example, contoso.microsoft.com).
* If your application supports Accounts in any organizational directory, replace "Enter_the_Tenant_Info_Here" value with organizations.
* If your application supports Accounts in any organizational directory and personal Microsoft accounts, replace "Enter_the_Tenant_Info_Here" value with common.
* To restrict support to Personal Microsoft accounts only, replace "Enter_the_Tenant_Info_Here" value with consumers.
*
* In Azure B2C, authority is of the form https://{instance}/tfp/{tenant}/{policyName}/
* Full B2C functionality will be available in this library in future versions.
*
* @param {@link (Configuration:type)} configuration object for the MSAL ConfidentialClientApplication instance
*/
function ConfidentialClientApplication(configuration) {
var _this;
_this = _ClientApplication.call(this, configuration) || this;
_this.setClientCredential(_this.config);
return _this;
}
/**
* Acquires tokens from the authority for the application (not for an end user).
*/
var _proto = ConfidentialClientApplication.prototype;
_proto.acquireTokenByClientCredential =
/*#__PURE__*/
function () {
var _acquireTokenByClientCredential = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee(request) {
var validRequest, serverTelemetryManager, clientCredentialConfig, clientCredentialClient;
return runtime_1.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
this.logger.info("acquireTokenByClientCredential called");
validRequest = _extends({}, request, this.initializeBaseRequest(request));
serverTelemetryManager = this.initializeServerTelemetryManager(ApiId.acquireTokenByClientCredential, validRequest.correlationId, validRequest.skipCache);
_context.prev = 3;
_context.next = 6;
return this.buildOauthClientConfiguration(validRequest.authority, serverTelemetryManager);
case 6:
clientCredentialConfig = _context.sent;
this.logger.verbose("Auth client config generated");
clientCredentialClient = new msalCommon.ClientCredentialClient(clientCredentialConfig);
return _context.abrupt("return", clientCredentialClient.acquireToken(validRequest));
case 12:
_context.prev = 12;
_context.t0 = _context["catch"](3);
serverTelemetryManager.cacheFailedRequest(_context.t0);
throw _context.t0;
case 16:
case "end":
return _context.stop();
}
}
}, _callee, this, [[3, 12]]);
}));
function acquireTokenByClientCredential(_x) {
return _acquireTokenByClientCredential.apply(this, arguments);
}
return acquireTokenByClientCredential;
}()
/**
* Acquires tokens from the authority for the application.
*
* Used in scenarios where the current app is a middle-tier service which was called with a token
* representing an end user. The current app can use the token (oboAssertion) to request another
* token to access downstream web API, on behalf of that user.
*
* The current middle-tier app has no user interaction to obtain consent.
* See how to gain consent upfront for your middle-tier app from this article.
* https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-on-behalf-of-flow#gaining-consent-for-the-middle-tier-application
*/
;
_proto.acquireTokenOnBehalfOf =
/*#__PURE__*/
function () {
var _acquireTokenOnBehalfOf = /*#__PURE__*/_asyncToGenerator( /*#__PURE__*/runtime_1.mark(function _callee2(request) {
var validRequest, clientCredentialConfig, oboClient;
return runtime_1.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
this.logger.info("acquireTokenOnBehalfOf called");
validRequest = _extends({}, request, this.initializeBaseRequest(request));
_context2.next = 4;
return this.buildOauthClientConfiguration(validRequest.authority);
case 4:
clientCredentialConfig = _context2.sent;
this.logger.verbose("Auth client config generated");
oboClient = new msalCommon.OnBehalfOfClient(clientCredentialConfig);
return _context2.abrupt("return", oboClient.acquireToken(validRequest));
case 8:
case "end":
return _context2.stop();
}
}
}, _callee2, this);
}));
function acquireTokenOnBehalfOf(_x2) {
return _acquireTokenOnBehalfOf.apply(this, arguments);
}
return acquireTokenOnBehalfOf;
}();
_proto.setClientCredential = function setClientCredential(configuration) {
var clientSecretNotEmpty = !msalCommon.StringUtils.isEmpty(configuration.auth.clientSecret);
var clientAssertionNotEmpty = !msalCommon.StringUtils.isEmpty(configuration.auth.clientAssertion);
var certificate = configuration.auth.clientCertificate;
var certificateNotEmpty = !msalCommon.StringUtils.isEmpty(certificate.thumbprint) || !msalCommon.StringUtils.isEmpty(certificate.privateKey); // Check that at most one credential is set on the application
if (clientSecretNotEmpty && clientAssertionNotEmpty || clientAssertionNotEmpty && certificateNotEmpty || clientSecretNotEmpty && certificateNotEmpty) {
throw msalCommon.ClientAuthError.createInvalidCredentialError();
}
if (clientSecretNotEmpty) {
this.clientSecret = configuration.auth.clientSecret;
return;
}
if (clientAssertionNotEmpty) {
this.clientAssertion = ClientAssertion.fromAssertion(configuration.auth.clientAssertion);
return;
}
if (!certificateNotEmpty) {
throw msalCommon.ClientAuthError.createInvalidCredentialError();
} else {
var _configuration$auth$c;
this.clientAssertion = ClientAssertion.fromCertificate(certificate.thumbprint, certificate.privateKey, (_configuration$auth$c = configuration.auth.clientCertificate) == null ? void 0 : _configuration$auth$c.x5c);
}
};
return ConfidentialClientApplication;
}(ClientApplication);
Object.defineProperty(exports, 'AuthError', {
enumerable: true,
get: function () {
return msalCommon.AuthError;
}
});
Object.defineProperty(exports, 'AuthErrorMessage', {
enumerable: true,
get: function () {
return msalCommon.AuthErrorMessage;
}
});
Object.defineProperty(exports, 'ClientAuthError', {
enumerable: true,
get: function () {
return msalCommon.ClientAuthError;
}
});
Object.defineProperty(exports, 'ClientAuthErrorMessage', {
enumerable: true,
get: function () {
return msalCommon.ClientAuthErrorMessage;
}
});
Object.defineProperty(exports, 'ClientConfigurationError', {
enumerable: true,
get: function () {
return msalCommon.ClientConfigurationError;
}
});
Object.defineProperty(exports, 'ClientConfigurationErrorMessage', {
enumerable: true,
get: function () {
return msalCommon.ClientConfigurationErrorMessage;
}
});
Object.defineProperty(exports, 'InteractionRequiredAuthError', {
enumerable: true,
get: function () {
return msalCommon.InteractionRequiredAuthError;
}
});
Object.defineProperty(exports, 'LogLevel', {
enumerable: true,
get: function () {
return msalCommon.LogLevel;
}
});
Object.defineProperty(exports, 'Logger', {
enumerable: true,
get: function () {
return msalCommon.Logger;
}
});
Object.defineProperty(exports, 'PromptValue', {
enumerable: true,
get: function () {
return msalCommon.PromptValue;
}
});
Object.defineProperty(exports, 'ProtocolMode', {
enumerable: true,
get: function () {
return msalCommon.ProtocolMode;
}
});
Object.defineProperty(exports, 'ResponseMode', {
enumerable: true,
get: function () {
return msalCommon.ResponseMode;
}
});
Object.defineProperty(exports, 'ServerError', {
enumerable: true,
get: function () {
return msalCommon.ServerError;
}
});
Object.defineProperty(exports, 'TokenCacheContext', {
enumerable: true,
get: function () {
return msalCommon.TokenCacheContext;
}
});
exports.ConfidentialClientApplication = ConfidentialClientApplication;
exports.CryptoProvider = CryptoProvider;
exports.Deserializer = Deserializer;
exports.PublicClientApplication = PublicClientApplication;
exports.Serializer = Serializer;
exports.Storage = Storage;
exports.TokenCache = TokenCache;
exports.buildAppConfiguration = buildAppConfiguration;
//# sourceMappingURL=msal-node.cjs.development.js.map
|
import os
import unittest
import numpy as np
import adetector.train as train
from adetector.config import TEST_DATA_FOLDER, N_MFCC, N_TIMEBINS
from adetector.DataGenerator import DataGenerator_Sup
class TestUtils(unittest.TestCase):
def setUp(self):
self.n_ads = 2293 # number of ad files
self.n_music = 1013 # number of music files
self.n_podcasts = 300 # number of podcast files
self.pos_files = np.load(os.path.join(TEST_DATA_FOLDER,
'pos_file_paths.npy'))
self.music_files = np.load(os.path.join(TEST_DATA_FOLDER,
'music_file_paths.npy'))
self.podcast_files = np.load(os.path.join(TEST_DATA_FOLDER,
'podcast_file_paths.npy'))
self.train_files = np.load(os.path.join(TEST_DATA_FOLDER,
'train_file_paths.npy'))
self.test_files = np.load(os.path.join(TEST_DATA_FOLDER,
'test_file_paths.npy'))
def test_list_data_output(self):
a, m, p = train.list_data()
self.assertAlmostEqual(len(a), self.n_ads)
self.assertAlmostEqual(len(m), self.n_music)
self.assertAlmostEqual(len(p), self.n_podcasts)
def test_create_data_generators_length(self):
trng, tstg = train.create_data_generators(self.pos_files,
self.music_files)
self.assertAlmostEqual(len(trng), 180)
self.assertAlmostEqual(len(tstg), 20)
def test_create_data_generators_output_shape(self):
trng, tstg = train.create_data_generators(self.pos_files,
self.music_files)
X, Y = trng.__getitem__(0)
self.assertTupleEqual(X.shape[1:], (N_MFCC, N_TIMEBINS, 1))
self.assertAlmostEqual(Y.shape[1], 1)
self.assertAlmostEqual(X.shape[0], Y.shape[0])
X1, Y1 = tstg.__getitem__(0)
self.assertTupleEqual(X1.shape[1:], (N_MFCC, N_TIMEBINS, 1))
self.assertAlmostEqual(Y1.shape[1], 1)
self.assertAlmostEqual(Y1.shape[0], X1.shape[0])
def test_create_data_generators_length_podcast_case(self):
trng, tstg = train.create_data_generators(self.pos_files,
self.podcast_files,
neg_type=True,
data_minutes=100)
self.assertAlmostEqual(len(trng), 31)
self.assertAlmostEqual(len(tstg), 3)
def test_create_data_generators_assertion_error_not_enough_files(self):
self.assertRaises(AssertionError, lambda: train.create_data_generators(
self.pos_files,
self.podcast_files,
neg_type=True))
def test_create_data_generators_balanced_bach(self):
trng, _ = train.create_data_generators(self.pos_files,
self.music_files)
_, Y = trng.__getitem__(1)
mean_value = np.mean(Y)
self.assertTrue(mean_value > 0.25)
self.assertTrue(mean_value < 0.75)
def test_create_data_generators_batch_normalization(self):
trng, _ = train.create_data_generators(self.pos_files,
self.music_files)
X, _ = trng.__getitem__(2)
mu = np.mean(X, axis=0)
std = np.std(X, axis=0)
zeros_array = np.zeros((N_MFCC, N_TIMEBINS, 1))
ones_array = np.ones((N_MFCC, N_TIMEBINS, 1))
self.assertTrue(np.allclose(mu, zeros_array))
self.assertTrue(np.allclose(std, ones_array))
def test_train_CNN_model_checkpoint(self):
if os.path.exists('model1.hdf5'):
os.remove('model1.hdf5')
train_generator = DataGenerator_Sup(self.train_files, dataset='train',
CNN=True)
_ = train.train_CNN_model(train_generator, epochs=1)
self.assertTrue(os.path.exists('model1.hdf5'))
def test_train_CNN_model_history_output(self):
epochs = 3
train_generator = DataGenerator_Sup(self.train_files, dataset='train',
CNN=True)
history = train.train_CNN_model(train_generator, epochs=epochs)
self.assertAlmostEqual(len(history['loss']), epochs)
self.assertTrue(history['loss'][0] > history['loss'][-1])
os.remove('model1.hdf5')
def test_evaluate_model_output(self):
test_generator = DataGenerator_Sup(self.test_files, dataset='test',
CNN=True)
loss, acc = train.evaluate_model(os.path.join(TEST_DATA_FOLDER,
'model1.hdf5'),
test_generator)
self.assertTrue(loss < 1)
self.assertTrue(acc > 0.5 and acc < 1)
if __name__ == '__main__':
unittest.main()
|
import numpy as np
from tools import sound_freq_sweep
from psychopy.sound import Sound
p = dict(
# Display:
monitor = 'testMonitor',#'ESI_psychophys',
full_screen = False,
screen_number = 0, #1,
refresh_rate = 60, # Hz
# Sounds:
correct_sound = Sound(sound_freq_sweep(2000,2000,.1)),
incorrect_sound = Sound(sound_freq_sweep(8000, 200, .1)),
# General:
n_trials = 250,
break_trials = 50,
fixation_size = 0.25,
rgb = np.array([1.,1.,1.]),
cue_reliability = 0.7,
# Stimuli:
res = 128,
temporal_freq = 4, # 0 for no flicker
sf = 4, # cycles/deg
ecc = 6, # dva
center_size = 3,
surr_size = 8,
center_contrast = 0.5,
center_c_var = 0.05,
surr_contrast = 1,
div_color = -1,
# Staircase:
start_amp = 0.05,
step = 0.01,
# Timing:
cue_dur = 5.5,
cue_to_stim = 5.3,
stim_dur = 0.38,
stim_to_stim = 0.4,
iti = .2,
)
|
"""
Adapted from:
https://github.com/dl4sits/BreizhCrops
Original implementation of Transformer model:
https://github.com/dl4sits/BreizhCrops/blob/master/breizhcrops/models/TransformerModel.py
"""
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.nn.modules import LayerNorm, Linear, ReLU
from torch.nn.modules.transformer import TransformerEncoder, TransformerEncoderLayer
from ..base import BaseMulticlassClassifier
from .schemas import TransformerModelSchema
class TransformerModel(BaseMulticlassClassifier):
"""Transformer Model for Multi-Class Classification"""
schema = TransformerModelSchema
def __init__(self, config):
super().__init__(config)
encoder_layer = TransformerEncoderLayer(
self.config.d_model,
self.config.n_head,
self.config.d_inner,
self.config.dropout,
self.config.activation,
)
encoder_norm = LayerNorm(self.config.d_model)
self.model.inlinear = Linear(self.config.input_dim, self.config.d_model)
self.model.relu = ReLU()
self.model.transformerencoder = TransformerEncoder(
encoder_layer, self.config.n_layers, encoder_norm
)
self.model.flatten = Flatten()
self.model.outlinear = Linear(self.config.d_model, self.config.num_classes)
def forward(self, x):
x = self.model.inlinear(x)
x = self.model.relu(x)
x = x.transpose(0, 1) # N x T x D -> T x N x D
x = self.model.transformerencoder(x)
x = x.transpose(0, 1) # T x N x D -> N x T x D
x = x.max(1)[0]
x = self.model.relu(x)
logits = self.model.outlinear(x)
logprobabilities = F.log_softmax(logits, dim=-1)
return logprobabilities
def load_optimizer(self):
"""Load the optimizer"""
return optim.Adam(
self.model.parameters(),
lr=self.config.learning_rate,
weight_decay=self.config.weight_decay,
)
class Flatten(nn.Module):
"""Flatten module"""
def forward(self, input):
return input.reshape(input.size(0), -1)
|
/*Copyright (c) 2019 , saberyjs@gmail.com
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All advertising materials mentioning features or use of this software
must display the following acknowledgement:
This product includes software developed by the <organization>.
4. Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef STL_CLONE_CLIENT_H
#define STL_CLONE_CLIENT_H
#include "global_header.h"
#include "buffer.h"
#include "http.h"
#include "log.h"
#include "http_response.h"
#define REQUEST_METHOD_GET 1
#define REQUEST_METHOD_POST 2
#define STATUS_RECEIVING_HEADER 1
#define STATUS_HEADER_RECEIVED_FINISHED 2
#define STATUS_RECEIVING_BODY 3
#define STATUS_RECEIVED_FROM_CLIENT_FINISHED 4
#define HTTP_REQUEST_LINE_METHOD 1
#define HTTP_REQUEST_LINE_URL 2
#define HTTP_REQUEST_LINE_HTTP_VERSION 3
#define HTTP_REQUEST_LINE_FINISHED 4
struct HttpHeader {
char *name;
char *value;
};
struct HttpQueryParam {
char *name;
char *value;
};
struct Client {
struct HttpRequest *request;//belong to which http request
struct sockaddr_in *addr; //only support ipv4
char *ip; //client ip address
struct HashTable *headers; //client headers
float http_version; //http version
char *request_url; //full request url
int method; //request method
char *port; //request port
char *host; //request host
char *uri; //request url except domain and query string
int sock; //client socket file descriptor,
unsigned int content_length;// request body length
short status;//current client parse status
struct ClientBuffer *buffer;//buffer that store client input
short request_line_parse_status;//request line parse status,
short ssl;//ssl request,
struct HashTable *query;//params in query string
char *hasTag;//hash tag
struct Log *log;//for debug
struct HttpResponse *response;
char *query_string;//full query string
char *protocol_version;//protocol version
char *tMethod;//method text
char *entry_file;//entry file
char *script_name;//script_name for cgi
char *path_info;//path info for cgi
char *content_type;//content type
};
struct Client *ClientCreate(int sock, struct sockaddr_in *clientAddr);
#endif //STL_CLONE_CLIENT_H
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const cdk = require("@aws-cdk/cdk");
const path = require("path");
const lambda = require("../lib");
const app = new cdk.App();
const stack = new cdk.Stack(app, 'aws-cdk-layer-version-1');
// Just for the example - granting to the current account is not necessary.
const awsAccountId = stack.accountId;
/// !show
const layer = new lambda.LayerVersion(stack, 'MyLayer', {
code: lambda.Code.directory(path.join(__dirname, 'layer-code')),
compatibleRuntimes: [lambda.Runtime.NodeJS810],
license: 'Apache-2.0',
description: 'A layer to test the L2 construct',
});
// To grant usage by other AWS accounts
layer.grantUsage('remote-account-grant', { accountId: awsAccountId });
// To grant usage to all accounts in some AWS Ogranization
// layer.grantUsage({ accountId: '*', organizationId });
new lambda.Function(stack, 'MyLayeredLambda', {
code: new lambda.InlineCode('foo'),
handler: 'index.handler',
runtime: lambda.Runtime.NodeJS810,
layers: [layer],
});
/// !hide
app.run();
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW50ZWcubGF5ZXItdmVyc2lvbi5saXQuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyJpbnRlZy5sYXllci12ZXJzaW9uLmxpdC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOztBQUFBLG9DQUFxQztBQUNyQyw2QkFBOEI7QUFDOUIsaUNBQWtDO0FBRWxDLE1BQU0sR0FBRyxHQUFHLElBQUksR0FBRyxDQUFDLEdBQUcsRUFBRSxDQUFDO0FBRTFCLE1BQU0sS0FBSyxHQUFHLElBQUksR0FBRyxDQUFDLEtBQUssQ0FBQyxHQUFHLEVBQUUseUJBQXlCLENBQUMsQ0FBQztBQUU1RCwyRUFBMkU7QUFDM0UsTUFBTSxZQUFZLEdBQUcsS0FBSyxDQUFDLFNBQVMsQ0FBQztBQUVyQyxTQUFTO0FBQ1QsTUFBTSxLQUFLLEdBQUcsSUFBSSxNQUFNLENBQUMsWUFBWSxDQUFDLEtBQUssRUFBRSxTQUFTLEVBQUU7SUFDdEQsSUFBSSxFQUFFLE1BQU0sQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLElBQUksQ0FBQyxJQUFJLENBQUMsU0FBUyxFQUFFLFlBQVksQ0FBQyxDQUFDO0lBQy9ELGtCQUFrQixFQUFFLENBQUMsTUFBTSxDQUFDLE9BQU8sQ0FBQyxTQUFTLENBQUM7SUFDOUMsT0FBTyxFQUFFLFlBQVk7SUFDckIsV0FBVyxFQUFFLGtDQUFrQztDQUNoRCxDQUFDLENBQUM7QUFFSCx1Q0FBdUM7QUFDdkMsS0FBSyxDQUFDLFVBQVUsQ0FBQyxzQkFBc0IsRUFBRSxFQUFFLFNBQVMsRUFBRSxZQUFZLEVBQUUsQ0FBQyxDQUFDO0FBRXRFLDBEQUEwRDtBQUMxRCx3REFBd0Q7QUFFeEQsSUFBSSxNQUFNLENBQUMsUUFBUSxDQUFDLEtBQUssRUFBRSxpQkFBaUIsRUFBRTtJQUM1QyxJQUFJLEVBQUUsSUFBSSxNQUFNLENBQUMsVUFBVSxDQUFDLEtBQUssQ0FBQztJQUNsQyxPQUFPLEVBQUUsZUFBZTtJQUN4QixPQUFPLEVBQUUsTUFBTSxDQUFDLE9BQU8sQ0FBQyxTQUFTO0lBQ2pDLE1BQU0sRUFBRSxDQUFDLEtBQUssQ0FBQztDQUNoQixDQUFDLENBQUM7QUFDSCxTQUFTO0FBRVQsR0FBRyxDQUFDLEdBQUcsRUFBRSxDQUFDIiwic291cmNlc0NvbnRlbnQiOlsiaW1wb3J0IGNkayA9IHJlcXVpcmUoJ0Bhd3MtY2RrL2NkaycpO1xuaW1wb3J0IHBhdGggPSByZXF1aXJlKCdwYXRoJyk7XG5pbXBvcnQgbGFtYmRhID0gcmVxdWlyZSgnLi4vbGliJyk7XG5cbmNvbnN0IGFwcCA9IG5ldyBjZGsuQXBwKCk7XG5cbmNvbnN0IHN0YWNrID0gbmV3IGNkay5TdGFjayhhcHAsICdhd3MtY2RrLWxheWVyLXZlcnNpb24tMScpO1xuXG4vLyBKdXN0IGZvciB0aGUgZXhhbXBsZSAtIGdyYW50aW5nIHRvIHRoZSBjdXJyZW50IGFjY291bnQgaXMgbm90IG5lY2Vzc2FyeS5cbmNvbnN0IGF3c0FjY291bnRJZCA9IHN0YWNrLmFjY291bnRJZDtcblxuLy8vICFzaG93XG5jb25zdCBsYXllciA9IG5ldyBsYW1iZGEuTGF5ZXJWZXJzaW9uKHN0YWNrLCAnTXlMYXllcicsIHtcbiAgY29kZTogbGFtYmRhLkNvZGUuZGlyZWN0b3J5KHBhdGguam9pbihfX2Rpcm5hbWUsICdsYXllci1jb2RlJykpLFxuICBjb21wYXRpYmxlUnVudGltZXM6IFtsYW1iZGEuUnVudGltZS5Ob2RlSlM4MTBdLFxuICBsaWNlbnNlOiAnQXBhY2hlLTIuMCcsXG4gIGRlc2NyaXB0aW9uOiAnQSBsYXllciB0byB0ZXN0IHRoZSBMMiBjb25zdHJ1Y3QnLFxufSk7XG5cbi8vIFRvIGdyYW50IHVzYWdlIGJ5IG90aGVyIEFXUyBhY2NvdW50c1xubGF5ZXIuZ3JhbnRVc2FnZSgncmVtb3RlLWFjY291bnQtZ3JhbnQnLCB7IGFjY291bnRJZDogYXdzQWNjb3VudElkIH0pO1xuXG4vLyBUbyBncmFudCB1c2FnZSB0byBhbGwgYWNjb3VudHMgaW4gc29tZSBBV1MgT2dyYW5pemF0aW9uXG4vLyBsYXllci5ncmFudFVzYWdlKHsgYWNjb3VudElkOiAnKicsIG9yZ2FuaXphdGlvbklkIH0pO1xuXG5uZXcgbGFtYmRhLkZ1bmN0aW9uKHN0YWNrLCAnTXlMYXllcmVkTGFtYmRhJywge1xuICBjb2RlOiBuZXcgbGFtYmRhLklubGluZUNvZGUoJ2ZvbycpLFxuICBoYW5kbGVyOiAnaW5kZXguaGFuZGxlcicsXG4gIHJ1bnRpbWU6IGxhbWJkYS5SdW50aW1lLk5vZGVKUzgxMCxcbiAgbGF5ZXJzOiBbbGF5ZXJdLFxufSk7XG4vLy8gIWhpZGVcblxuYXBwLnJ1bigpO1xuIl19
|
module.exports.Token = require('./token.model');
module.exports.User = require('./user.model');
module.exports.Thai = require('./thai.model');
module.exports.Lop = require('./lop.model');
module.exports.Sinhvien = require('./sinhvien.model');
module.exports.ApparelSize = require('./apparelSize.model');
module.exports.Product = require('./product.model');
module.exports.ProductCategories = require('./productCategories.model');
|
from rest_framework import serializers
from rest_framework.utils.serializer_helpers import (
ReturnDict
)
from events import models
class PrimarySpeakerSerializer(serializers.Serializer):
thumbnail_url = serializers.CharField()
name = serializers.CharField()
github_profile_url = serializers.CharField()
twitter_profile_url = serializers.CharField()
facebook_profile_url = serializers.CharField()
class TalkDetailSerializer(serializers.ModelSerializer):
speakers = serializers.SerializerMethodField()
def get_speakers(self, obj):
return [
ReturnDict(PrimarySpeakerSerializer(
data={'thumbnail_url': i.user.get_thumbnail_url(),
'name': i.user.speaker_name,
'github_profile_url': i.user.github_profile_url,
'twitter_profile_url': i.user.twitter_profile_url,
'facebook_profile_url': i.user.facebook_profile_url}).get_initial(),
serializer=PrimarySpeakerSerializer) for i in obj.speakers]
class Meta:
model = models.TalkProposal
fields = [
"title",
"category",
"language",
"python_level",
"recording_policy",
"abstract",
"detailed_description",
"slide_link",
"slido_embed_link",
# "sponsored"
"speakers"
]
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CommentV1Schema = void 0;
const pip_services3_commons_node_1 = require("pip-services3-commons-node");
const pip_services3_commons_node_2 = require("pip-services3-commons-node");
const pip_services3_commons_node_3 = require("pip-services3-commons-node");
const ReferenceV1Schema_1 = require("./ReferenceV1Schema");
const ContentV1Schema_1 = require("./ContentV1Schema");
const MemeV1Schema_1 = require("./MemeV1Schema");
class CommentV1Schema extends pip_services3_commons_node_1.ObjectSchema {
constructor() {
super();
this.withOptionalProperty('id', pip_services3_commons_node_3.TypeCode.String);
this.withOptionalProperty('refs', new pip_services3_commons_node_2.ArraySchema(new ReferenceV1Schema_1.ReferenceV1Schema()));
this.withOptionalProperty('parent_ids', new pip_services3_commons_node_2.ArraySchema(pip_services3_commons_node_3.TypeCode.String));
this.withOptionalProperty('children_counter', pip_services3_commons_node_3.TypeCode.Long);
this.withRequiredProperty('comment_state', pip_services3_commons_node_3.TypeCode.String);
this.withRequiredProperty('deleted', pip_services3_commons_node_3.TypeCode.Boolean);
this.withOptionalProperty('creator_id', pip_services3_commons_node_3.TypeCode.String);
this.withOptionalProperty('creator_name', pip_services3_commons_node_3.TypeCode.String);
this.withOptionalProperty('create_time', pip_services3_commons_node_3.TypeCode.DateTime);
this.withOptionalProperty('content', new pip_services3_commons_node_2.ArraySchema(new ContentV1Schema_1.ContentV1Schema()));
this.withOptionalProperty('memes', new pip_services3_commons_node_2.ArraySchema(new MemeV1Schema_1.MemeV1Schema()));
}
}
exports.CommentV1Schema = CommentV1Schema;
//# sourceMappingURL=CommentV1Schema.js.map
|
from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, mgr.Equals(s_msg_id, i0))
loc0.set_progress(0, mgr.Equals(x_s_msg_id, i0))
hint = Hint("h_s_msg_id0", env, frozenset([s_msg_id]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, mgr.Plus(s_c, r1)))
hint = Hint("h_s_c1", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(0, x_s_l)
hint = Hint("h_s_l0", env, frozenset([s_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, r1))
hint = Hint("h_delta1", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i1))
hint = Hint("h_s2r1", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(0, x_r_l)
hint = Hint("h_r_l0", env, frozenset([r_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, mgr.Plus(s2r, i1)))
hint = Hint("h_s2r2", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(r2s, i0))
loc0.set_progress(0, mgr.Equals(x_r2s, i1))
hint = Hint("h_r2s1", env, frozenset([r2s]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(1, mgr.Not(x_r_l))
loc1 = Location(env, mgr.Not(r_l))
loc1.set_progress(0, x_r_l)
hint = Hint("h_r_l1", env, frozenset([r_l]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, r0))
hint = Hint("h_s_timeout0", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(r2s, i0))
loc0.set_progress(0, mgr.Equals(x_r2s, i0))
hint = Hint("h_r2s0", env, frozenset([r2s]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s_msg_id, i0))
loc0.set_progress(0, mgr.Equals(x_s_msg_id, mgr.Plus(s_msg_id, i1)))
hint = Hint("h_s_msg_id1", env, frozenset([s_msg_id]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, r0))
hint = Hint("h_delta0", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, r0))
hint = Hint("h_s_c0", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
return frozenset(res)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-08 13:33
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='UserPermissions',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='permissions', serialize=False, to=settings.AUTH_USER_MODEL)),
('review_changesets', models.BooleanField(default=False, verbose_name='can review changesets')),
('direct_edit', models.BooleanField(default=False, verbose_name='can activate direct editing')),
('control_panel', models.BooleanField(default=False, verbose_name='can access control panel')),
('grant_permissions', models.BooleanField(default=False, verbose_name='can grant control permissions')),
('manage_announcements', models.BooleanField(default=False, verbose_name='manage announcements')),
],
options={
'verbose_name': 'User Permissions',
'verbose_name_plural': 'User Permissions',
'default_related_name': 'permissions',
},
),
]
|
const web3 = require('./config/web3setup');
var contract = require('./config/contract');
const seller_account = web3.eth.accounts.privateKeyToAccount('0xC45BEF72C39AF1E0E3DAE6D7A066D5608EAD56F19F01CDA2C94F4EAEBFC5B576');
web3.eth.accounts.wallet.add(seller_account);
console.log("Seller_account_address: "+seller_account.address);
const seller_broker_account = web3.eth.accounts.privateKeyToAccount('0xA64EC7AD3F9D75FD91C3FE03C4D22113BAFEC56422C969F4F17DF9691BE347FB');
web3.eth.accounts.wallet.add(seller_broker_account);
console.log("Seller_broker_account_address: "+seller_broker_account.address);
const buyer_broker_account = web3.eth.accounts.privateKeyToAccount('0xD59AAC141116CF3D5884EADCD49C39C2EAC9C37292EA48F2BDA14248B399BCF3');
web3.eth.accounts.wallet.add(buyer_broker_account);
console.log("Buyer_broker_account_address: "+buyer_broker_account.address);
const buyer_account = web3.eth.accounts.privateKeyToAccount('0x27BC36782D5E534DCD4A5A05115872C1A5FB4B158C3F2503B5F450A60F75F3B5');
web3.eth.accounts.wallet.add(buyer_account);
console.log("Buyer_account_address: "+buyer_account.address);
var milliseconds_start = (new Date).getTime();
var milliseconds_end;
contract.methods.pay_escrow_by_buyer_broker().send({
from:buyer_broker_account.address,
gas:1000000,
value:10
}).on('transactionHash', function(hash){
console.log("Txn hash: "+hash);
}).on('receipt', function (receipt) {
console.log(receipt);
milliseconds_end = (new Date).getTime();
console.log("Difference: "+ (milliseconds_end - milliseconds_start));
}).on('error', console.error);
|