text
stringlengths 3
1.05M
|
|---|
# -*- coding: utf-8 -*-
__author__ = """Francesco DeSensi"""
__email__ = 'desensif@gmail.com'
__version__ = '0.1.0'
|
const initialState = {};
export default (state = initialState, action) => {
switch (action.type) {
case 'USER_STATE/SAVE_USER_INFO': {
return {
...state, data: action.data,
};
}
case 'USER_STATE/REMOVE_CURRENT_USER_DATA': {
return {};
}
case 'USER_STATE/SET_AUTH_ERROR': {
return {
...state,
error: action.error,
};
}
default: {
return state;
}
}
};
|
#!/usr/bin/env node
import process from 'node:process';
import fs from 'node:fs';
import meow from 'meow';
import stripBomStream from 'strip-bom-stream';
const cli = meow(`
Usage
$ strip-bom <file> > <new-file>
$ cat <file> | strip-bom > <new-file>
Example
$ strip-bom unicorn.txt > unicorn-without-bom.txt
`, {
importMeta: import.meta,
});
const input = cli.input[0];
if (!input && process.stdin.isTTY) {
console.error('Expected a filename');
process.exit(1);
}
if (input) {
fs.createReadStream(input).pipe(stripBomStream()).pipe(process.stdout);
} else {
process.stdin.pipe(stripBomStream()).pipe(process.stdout);
}
|
//// [thisInSuperCall2.ts]
class Base {
constructor(a: any) {}
}
class Foo extends Base {
public x: number;
constructor() {
super(this); // no error
}
}
class Foo2 extends Base {
public x: number = 0;
constructor() {
super(this); // error
}
}
//// [thisInSuperCall2.js]
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Base = (function () {
function Base(a) {
}
return Base;
}());
var Foo = (function (_super) {
__extends(Foo, _super);
function Foo() {
_super.call(this, this); // no error
}
return Foo;
}(Base));
var Foo2 = (function (_super) {
__extends(Foo2, _super);
function Foo2() {
_super.call(this, this); // error
this.x = 0;
}
return Foo2;
}(Base));
|
export { default } from 'ember-flexberry-account/routes/login';
|
'use strict';
var $ = require('jquery');
var AddonHelper = require('js/addonHelper');
var S3NodeConfig = require('./s3NodeConfig').S3NodeConfig;
var url = window.contextVars.node.urls.api + 's3/settings/';
new S3NodeConfig('#s3Scope', url);
|
"""
ViewSets are essentially just a type of class based view, that doesn't provide
any method handlers, such as `get()`, `post()`, etc... but instead has actions,
such as `list()`, `retrieve()`, `create()`, etc...
Actions are only bound to methods at the point of instantiating the views.
user_list = UserViewSet.as_view({'get': 'list'})
user_detail = UserViewSet.as_view({'get': 'retrieve'})
Typically, rather than instantiate views from viewsets directly, you'll
register the viewset with a router and let the URL conf be determined
automatically.
router = DefaultRouter()
router.register(r'users', UserViewSet, 'user')
urlpatterns = router.urls
"""
from __future__ import unicode_literals
from functools import update_wrapper
from django.utils.decorators import classonlymethod
from rest_framework import views, generics, mixins
class ViewSetMixin(object):
"""
This is the magic.
Overrides `.as_view()` so that it takes an `actions` keyword that performs
the binding of HTTP methods to actions on the Resource.
For example, to create a concrete view binding the 'GET' and 'POST' methods
to the 'list' and 'create' actions...
view = MyViewSet.as_view({'get': 'list', 'post': 'create'})
"""
@classonlymethod
def as_view(cls, actions=None, **initkwargs):
"""
Because of the way class based views create a closure around the
instantiated view, we need to totally reimplement `.as_view`,
and slightly modify the view function that is created and returned.
"""
# The suffix initkwarg is reserved for identifing the viewset type
# eg. 'List' or 'Instance'.
cls.suffix = None
# sanitize keyword arguments
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError("You tried to pass in the %s method name as a "
"keyword argument to %s(). Don't do that."
% (key, cls.__name__))
if not hasattr(cls, key):
raise TypeError("%s() received an invalid keyword %r" % (
cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
# We also store the mapping of request methods to actions,
# so that we can later set the action attribute.
# eg. `self.action = 'list'` on an incoming GET request.
self.action_map = actions
# Bind methods to actions
# This is the bit that's different to a standard view
for method, action in actions.items():
handler = getattr(self, action)
setattr(self, method, handler)
# Patch this in as it's otherwise only present from 1.5 onwards
if hasattr(self, 'get') and not hasattr(self, 'head'):
self.head = self.get
# And continue as usual
return self.dispatch(request, *args, **kwargs)
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
# We need to set these on the view function, so that breadcrumb
# generation can pick out these bits of information from a
# resolved URL.
view.cls = cls
view.suffix = initkwargs.get('suffix', None)
return view
def initialize_request(self, request, *args, **kargs):
"""
Set the `.action` attribute on the view,
depending on the request method.
"""
request = super(ViewSetMixin, self).initialize_request(request, *args, **kargs)
self.action = self.action_map.get(request.method.lower())
return request
class ViewSet(ViewSetMixin, views.APIView):
"""
The base ViewSet class does not provide any actions by default.
"""
pass
class GenericViewSet(ViewSetMixin, generics.GenericAPIView):
"""
The GenericViewSet class does not provide any actions by default,
but does include the base set of generic view behavior, such as
the `get_object` and `get_queryset` methods.
"""
pass
class ReadOnlyModelViewSet(mixins.RetrieveModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
A viewset that provides default `list()` and `retrieve()` actions.
"""
pass
class ModelViewSet(mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
GenericViewSet):
"""
A viewset that provides default `create()`, `retrieve()`, `update()`,
`partial_update()`, `destroy()` and `list()` actions.
"""
pass
|
# coding: utf-8
"""
Intersight REST API
This is Intersight REST API
OpenAPI spec version: 1.0.9-262
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class SnmpUserRef(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'moid': 'str',
'object_type': 'str'
}
attribute_map = {
'moid': 'Moid',
'object_type': 'ObjectType'
}
def __init__(self, moid=None, object_type=None):
"""
SnmpUserRef - a model defined in Swagger
"""
self._moid = None
self._object_type = None
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
@property
def moid(self):
"""
Gets the moid of this SnmpUserRef.
:return: The moid of this SnmpUserRef.
:rtype: str
"""
return self._moid
@moid.setter
def moid(self, moid):
"""
Sets the moid of this SnmpUserRef.
:param moid: The moid of this SnmpUserRef.
:type: str
"""
self._moid = moid
@property
def object_type(self):
"""
Gets the object_type of this SnmpUserRef.
:return: The object_type of this SnmpUserRef.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""
Sets the object_type of this SnmpUserRef.
:param object_type: The object_type of this SnmpUserRef.
:type: str
"""
self._object_type = object_type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, SnmpUserRef):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
//==============================================================================
// bl_core.c
// multi model mesh demo based mesh core
//
// Created by Hugo Pristauz on 2022-Jan-02
// Copyright © 2022 Bluccino. All rights reserved.
//==============================================================================
// mcore derived from:
// Bluetooth: Mesh Generic OnOff, Generic Level, Lighting & Vendor Models
// Copyright (c) 2018 Vikrant More
// SPDX-License-Identifier: Apache-2.0
//==============================================================================
#include <drivers/gpio.h>
#include "app_gpio.h"
#include "ble_mesh.h"
#include "device_composition.h"
#include "no_transition_work_handler.h"
#include "state_binding.h"
#include "storage.h"
#include "transition.h"
//==============================================================================
// CORE level logging shorthands
//==============================================================================
#define LOG LOG_CORE
#define LOGO(lvl,col,o,val) LOGO_CORE(lvl,col"mcore:",o,val)
#define LOG0(lvl,col,o,val) LOGO_CORE(lvl,col,o,val)
#define ERR 1,BL_R
static BL_fct notify = NULL;
//==============================================================================
// let's go ...
//==============================================================================
#if defined(CONFIG_MCUMGR)
#include <mgmt/mcumgr/smp_bt.h>
#include "smp_svr.h"
#endif
static bool reset;
static void light_default_var_init(void)
{
ctl->tt = 0x00;
ctl->onpowerup = STATE_DEFAULT;
ctl->light->range_min = LIGHTNESS_MIN;
ctl->light->range_max = LIGHTNESS_MAX;
ctl->light->last = LIGHTNESS_MAX;
ctl->light->def = LIGHTNESS_MAX;
ctl->light->target = ctl->light->def;
ctl->temp->range_min = TEMP_MIN;
ctl->temp->range_max = TEMP_MAX;
ctl->temp->def = TEMP_MAX;
ctl->temp->target = ctl->temp->def;
ctl->duv->def = DELTA_UV_DEF;
ctl->duv->target = ctl->duv->def;
}
/* This function should only get call after execution of settings_load() */
static void light_default_status_init(void)
{
/* Retrieve Range of Lightness */
if (ctl->light->range) {
ctl->light->range_max = (uint16_t) (ctl->light->range >> 16);
ctl->light->range_min = (uint16_t) ctl->light->range;
}
/* Retrieve Range of Temperature */
if (ctl->temp->range) {
ctl->temp->range_max = (uint16_t) (ctl->temp->range >> 16);
ctl->temp->range_min = (uint16_t) ctl->temp->range;
}
ctl->light->last = constrain_lightness(ctl->light->last);
ctl->light->def = constrain_lightness(ctl->light->def);
ctl->light->target = constrain_lightness(ctl->light->target);
ctl->temp->def = constrain_temperature(ctl->temp->def);
ctl->temp->target = constrain_temperature(ctl->temp->target);
ctl->temp->current = ctl->temp->def;
ctl->duv->current = ctl->duv->def;
switch (ctl->onpowerup) {
case STATE_OFF:
ctl->light->current = 0U;
break;
case STATE_DEFAULT:
if (ctl->light->def == 0U) {
ctl->light->current = ctl->light->last;
} else {
ctl->light->current = ctl->light->def;
}
break;
case STATE_RESTORE:
ctl->light->current = ctl->light->target;
ctl->temp->current = ctl->temp->target;
ctl->duv->current = ctl->duv->target;
break;
}
ctl->light->target = ctl->light->current;
ctl->temp->target = ctl->temp->current;
ctl->duv->target = ctl->duv->current;
}
void update_vnd_led_gpio(void)
{
#ifndef ONE_LED_ONE_BUTTON_BOARD
#if !MIGRATION_STEP4
gpio_pin_set(led_device[1], DT_GPIO_PIN(DT_ALIAS(led1), gpios),
vnd_user_data.current == STATE_ON);
#endif
#endif
}
void update_led_gpio(void)
{
uint8_t power, color;
power = 100 * ((float) ctl->light->current / 65535);
color = 100 * ((float) (ctl->temp->current - ctl->temp->range_min) /
(ctl->temp->range_max - ctl->temp->range_min));
#if MIGRATION_STEP2
LOG(3,BL_G "power-> %d, color-> %d", power, color);
#else
printk("power-> %d, color-> %d\n", power, color);
#endif
#if !MIGRATION_STEP4
gpio_pin_set(led_device[0], DT_GPIO_PIN(DT_ALIAS(led0), gpios),
ctl->light->current);
#endif
#ifndef ONE_LED_ONE_BUTTON_BOARD
#if !MIGRATION_STEP4
gpio_pin_set(led_device[2], DT_GPIO_PIN(DT_ALIAS(led2), gpios),
power < 50);
#endif
#if !MIGRATION_STEP4
gpio_pin_set(led_device[3], DT_GPIO_PIN(DT_ALIAS(led3), gpios),
color < 50);
#endif
#endif
}
void update_light_state(void)
{
update_led_gpio();
if (ctl->transition->counter == 0 || reset == false) {
reset = true;
k_work_submit(&no_transition_work);
}
}
static void short_time_multireset_bt_mesh_unprovisioning(void)
{
if (reset_counter >= 4U)
{
reset_counter = 0U;
#if MIGRATION_STEP2
LOG(1,BL_M "BT Mesh reset");
#else
printk("BT Mesh reset\n");
#endif
bt_mesh_reset();
}
else
{
#if MIGRATION_STEP2
LOG(3,BL_M "reset counter -> %d", reset_counter);
#else
printk("Reset Counter -> %d\n", reset_counter);
#endif
reset_counter++;
}
save_on_flash(RESET_COUNTER);
}
static void reset_counter_timer_handler(struct k_timer *dummy)
{
reset_counter = 0U;
save_on_flash(RESET_COUNTER);
#if MIGRATION_STEP2
LOG(3,BL_M "reset counter set to zero");
#else
printk("Reset Counter set to Zero\n");
#endif
}
K_TIMER_DEFINE(reset_counter_timer, reset_counter_timer_handler, NULL);
//==============================================================================
// init (fomer main())
//==============================================================================
#if MIGRATION_STEP1
static int init(BL_ob *o, int val)
{
LOGO(5,BL_B,o,val); // log trace
notify = o->data; // store notify callback
#else
void main(void)
{
#endif
int err;
light_default_var_init();
app_gpio_init();
#if defined(CONFIG_MCUMGR)
smp_svr_init();
#endif
#if MIGRATION_STEP2
LOG(2,BL_B "initializing...");
#else
printk("Initializing...\n");
#endif
ps_settings_init();
/* Initialize the Bluetooth Subsystem */
err = bt_enable(NULL);
if (err)
{
#if MIGRATION_STEP2
LOG(ERR "Bluetooth init failed (err %d)", err);
#else
printk("Bluetooth init failed (err %d)\n", err);
#endif
return err;
}
bt_ready();
light_default_status_init();
update_light_state();
short_time_multireset_bt_mesh_unprovisioning();
k_timer_start(&reset_counter_timer, K_MSEC(7000), K_NO_WAIT);
#if defined(CONFIG_MCUMGR)
/* Initialize the Bluetooth mcumgr transport. */
smp_bt_register();
k_timer_start(&smp_svr_timer, K_NO_WAIT, K_MSEC(1000));
#endif
#if MIGRATION_STEP1
return 0; // OK
#endif
}
//==============================================================================
// THE core interface
// - [MESH:PRV val] and [MESH:ATT val] are posted from ble_mesh.c to here
//==============================================================================
#if MIGRATION_STEP1
int bl_core(BL_ob *o, int val)
{
switch (BL_ID(o->cl,o->op))
{
case BL_ID(_SYS,INIT_): // [SYS:INIT]
return init(o,val); // forward to init()
case BL_ID(_SYS,TICK_): // [SYS:TICK @0,cnt]
case BL_ID(_SYS,TOCK_): // [SYS:TICK @0,cnt]
return 0; // OK - nothing to tick/tock
case BL_ID(_MESH,PRV_): // [MESH:PRV val] (provision)
case BL_ID(_MESH,ATT_): // [MESH:ATT val] (attention)
case BL_ID(_BUTTON,PRESS_): // [BUTTON:PRESS @id](button pressed)
return bl_out(o,val,notify); // output message to subscriber
case BL_ID(_LED,SET_): // [LED:SET @id,onoff]
case BL_ID(_LED,TOGGLE_): // [LED:SET @id,onoff]
return gpio(o,val); // delegate to GPIO submodule
default:
return -1; // bad input
}
}
#endif // MIGRATION_STEP1
|
/*!
* OpenUI5
* (c) Copyright 2009-2021 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides class sap.ui.core.util.ExportType
sap.ui.define(['sap/ui/base/ManagedObject'],
function(ManagedObject) {
'use strict';
/**
* Constructor for a new ExportType.
*
* @param {string} [sId] id for the new control, generated automatically if no id is given
* @param {object} [mSettings] initial settings for the new control
*
* @class
* Base export type. Subclasses can be used for {@link sap.ui.core.util.Export Export}.
* @extends sap.ui.base.ManagedObject
*
* @author SAP SE
* @version 1.95.0
* @since 1.22.0
*
* @public
* @deprecated Since version 1.73
* @alias sap.ui.core.util.ExportType
*/
var ExportType = ManagedObject.extend('sap.ui.core.util.ExportType', {
metadata: {
library: "sap.ui.core",
properties: {
/**
* File extension.
*/
fileExtension: 'string',
/**
* MIME type.
*/
mimeType: 'string',
/**
* Charset.
*/
charset: 'string',
/**
* Whether to prepend an unicode byte order mark when saving as a file (only applies for utf-8 charset).
*/
byteOrderMark: { type: 'boolean', defaultValue: undefined }
}
}
});
/**
* @private
*/
ExportType.prototype.init = function() {
this._oExport = null;
};
/**
* Handles the generation process of the file.<br>
*
* @param {sap.ui.core.util.Export} oExport export instance
* @return {string} content
*
* @protected
*/
ExportType.prototype._generate = function(oExport) {
this._oExport = oExport;
var sContent = this.generate();
this._oExport = null;
return sContent;
};
/**
* Generates the file content.<br>
* Should be implemented by the individual types!
*
* @return {string} content
*
* @protected
*/
ExportType.prototype.generate = function() {
return '';
};
/**
* Returns the number of columns.
*
* @return {int} count
*
* @protected
*/
ExportType.prototype.getColumnCount = function() {
if (this._oExport) {
return this._oExport.getColumns().length;
}
return 0;
};
/**
* Returns the number of rows.
*
* @return {int} count
*
* @protected
*/
ExportType.prototype.getRowCount = function() {
if (this._oExport && this._oExport.getBinding("rows")) {
return this._oExport.getBinding("rows").getLength();
}
return 0;
};
/**
* Creates a column "generator" (inspired by ES6 Generators)
*
* @return {Generator} generator
* @protected
*/
ExportType.prototype.columnGenerator = function() {
/*
// Implementation using ES6 Generator
function* cellGenerator() {
var aColumns = this._oExport.getColumns(),
iColumns = aColumns.length;
for (var i = 0; i < iColumns; i++) {
yield {
index: i,
name: aColumns[i].getName()
};
}
}
*/
var i = 0,
aColumns = this._oExport.getColumns(),
iColumns = aColumns.length;
return {
next: function() {
if (i < iColumns) {
var iIndex = i;
i++;
return {
value: {
index: iIndex,
name: aColumns[iIndex].getName()
},
done: false
};
} else {
return {
value: undefined,
done: true
};
}
}
};
};
/**
* Creates a cell "generator" (inspired by ES6 Generators)
*
* @return {Generator} generator
* @protected
*/
ExportType.prototype.cellGenerator = function() {
/*
// Implementation using ES6 Generator
function* cellGenerator() {
var oRowTemplate = this._oExport.getAggregation('_template'),
aCells = oRowTemplate.getCells(),
iCells = aCells.length;
for (var i = 0; i < iCells; i++) {
yield {
index: i,
content: aCells[i].getContent()
};
}
}
*/
var i = 0,
oRowTemplate = this._oExport.getAggregation('_template'),
aCells = oRowTemplate.getCells(),
iCells = aCells.length;
return {
next: function() {
if (i < iCells) {
var iIndex = i;
i++;
// convert customData object array to key-value map
var mCustomData = {};
aCells[iIndex].getCustomData().forEach(function(oCustomData) {
mCustomData[oCustomData.getKey()] = oCustomData.getValue();
});
return {
value: {
index: iIndex,
content: aCells[iIndex].getContent(),
customData: mCustomData
},
done: false
};
} else {
return {
value: undefined,
done: true
};
}
}
};
};
/**
* Creates a row "generator" (inspired by ES6 Generators)
*
* @return {Generator} generator
* @protected
*/
ExportType.prototype.rowGenerator = function() {
/*
// Implementation using ES6 Generator
function* rowGenerator() {
var oExport = this._oExport,
oBinding = oExport.getBinding("rows"),
mBindingInfos = oExport.getBindingInfo("rows"),
aContexts = oBinding.getContexts(0, oBinding.getLength()),
iContexts = aContexts.length,
oRowTemplate = oExport.getAggregation('_template');
for (var i = 0; i < iCells; i++) {
oRowTemplate.setBindingContext(aContexts[i], mBindingInfos.model);
yield {
index: i,
cells: this.cellGenerator()
};
}
}
*/
var that = this,
i = 0,
oExport = this._oExport,
oBinding = oExport.getBinding("rows"),
mBindingInfos = oExport.getBindingInfo("rows"),
aContexts = oBinding.getContexts(0, oBinding.getLength()),
iContexts = aContexts.length,
oRowTemplate = oExport.getAggregation('_template');
return {
next: function() {
if (i < iContexts) {
var iIndex = i;
i++;
oRowTemplate.setBindingContext(aContexts[iIndex], mBindingInfos.model);
return {
value: {
index: iIndex,
cells: that.cellGenerator()
},
done: false
};
} else {
return {
value: undefined,
done: true
};
}
}
};
};
return ExportType;
});
|
var expect = require("chai").expect;
const WxrdBook = require("../app/wxrd-book");
describe("Wxrd Book", function() {
const myWxrdBook = new WxrdBook();
var testWxrd;
beforeEach(function(){
myWxrdBook.clearAllWxrds();
wxrdLst = myWxrdBook.getWxrdsByAlias("Test Wxrd");
testWxrd = myWxrdBook.getWxrdsByAlias("Test Wxrd")[0];
});
it("should have a UUID", function(){
expect(testWxrd).to.have.property("uuid");
});
it("should have an ordered list of aliases", function(){
expect(testWxrd).to.have.property("getAllAliases");
});
it("should have a metadata property for 'alias'", function(){
//TODO fill in test (want to change it from current implementation, alias should just be another bit of metadata)
});
it("should allow changing the default alias to something new", function(){
const wxrdWithDefaultAlias = myWxrdBook.getWxrdsByAlias("A Default Alias")[0];
expect(wxrdWithDefaultAlias.getDefaultAliasValue()).to.equal("A Default Alias");
const wxrdWithUpdatedAlias = myWxrdBook.updateAliasForUuid(wxrdWithDefaultAlias.uuid, "A New Alias");
const defaultUuid = wxrdWithDefaultAlias.uuid;
const updatedUuid = wxrdWithUpdatedAlias.uuid;
expect(defaultUuid).to.equal(updatedUuid);
const newRetrieval = myWxrdBook.getWxrdsByAlias("A New Alias")[0];
const defaultRetrieval = myWxrdBook.getWxrdsByAlias("A Default Alias")[0];
expect(newRetrieval).to.eql(defaultRetrieval);
});
it("should have optional metadata", function(){
expect(testWxrd).to.have.property("metaData");
});
it("should allow setting and retrieval of metadata by key", function() {
const wxrd = myWxrdBook.getWxrdsByAlias("test")[0];
wxrd.setMetaDataByKey("test key", "test value");
myWxrdBook.mergeByUuid(wxrd);
const testVal = myWxrdBook.getWxrdByUuid(wxrd.uuid).getMetaDataByKey("test key");
expect(testVal).to.equal("test value");
});
it("should get most recently updated value of for key, by timestamp", function(){
//TODO Fill In Test
//see metadata update comments for structure desired
const testKey = "test key";
const firstEntry = "test value (shouldn't show)";
const secondEntry = "test value second (should show)";
testWxrd.setMetaDataByKey(testKey, firstEntry);
var currentEntryValue = testWxrd.getMetaDataByKey(testKey);
expect(currentEntryValue).to.equal(firstEntry);
testWxrd.setMetaDataByKey(testKey, secondEntry);
currentEntryValue = testWxrd.getMetaDataByKey(testKey);
expect(currentEntryValue).to.equal(secondEntry);
});
it("should allow update of metadata by key, storing updated at timestamp", function() {
//TODO Fill In Test
//metadata should look like
// {
// metadata: {
// key: "key",
// values: [{
// value: "value at timestamp",
// updatedAt: "timestamp"
// },
// {
// value: "value at timestamp",
// updatedAt: "timestamp"
// }
// ]
// }
// }
});
});
|
'use strict';
// Call this function when the page loads (the "ready" event)
$(document).ready(function() {
initializePage();
})
/*
* Function that is called when the document is ready.
*/
function initializePage() {
$("#testjs").click(function(e) {
$('.jumbotron h1').text("Javascript has taken control");
$("#testjs").text("Please wait...");
$("#testjs").addClass("active");
});
$("a.thumbnail").click(projectClick);
// Add any additional listeners here
// example: $("#div-id").click(functionToCall);
}
function projectClick(e){
e.preventDefault();
$(this).css("background-color" , "#7fff00");
var containingProject = $(this).closest(".project");
var description = $(containingProject).find(".project-description");
if (description.length == 0) {
$(containingProject).append("<div class='project-description'><p>Description of the project.</p></div>");
} else {
$(description).fadeOut();
}
}
|
from django.db import models
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailadmin.edit_handlers import FieldPanel, \
InlinePanel, StreamFieldPanel
from wagtail.wagtailcore.fields import StreamField
from wagtail_embed_videos.edit_handlers import EmbedVideoChooserPanel
from wagtail.wagtailembeds.blocks import EmbedBlock
from wagtail.wagtailcore.blocks import StreamBlock, RawHTMLBlock
from modelcluster.fields import ParentalKey
# from utils.models import RelatedLink
from articles.models import ArticlePage
# custom streamblock for easy reddit & twitter embeds
class RedTwitBlock(StreamBlock):
twitter = EmbedBlock(icon="site")
reddit = RawHTMLBlock(icon="code")
# inline-able model to add site articles to the home page
class HomePagePost(Orderable):
link_page = models.ForeignKey(
'articles.ArticlePage',
null=True,
blank=True,
related_name='+'
)
page = ParentalKey('home.HomePage', related_name='home_posts')
# easy video embeds using wagtail-embed-videos package
class EmbedVideo(models.Model):
title = models.CharField(max_length=100, blank=True, null=True)
description = models.CharField(max_length=100, blank=True, null=True)
video = models.ForeignKey(
'wagtail_embed_videos.EmbedVideo',
verbose_name="Video",
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
panels = [FieldPanel('title'), FieldPanel('description'), EmbedVideoChooserPanel('video')]
class Meta:
abstract = True
# inline-able model to add videos to the featured sidebar on the home page
class HomePageVideo(EmbedVideo, Orderable):
page = ParentalKey('home.HomePage', related_name='featured_video')
class HomePage(Page):
auto_post = models.BooleanField(default=True)
embed = StreamField(RedTwitBlock(), null=True, blank=True)
# auto_sidebar not viable..for now..probably
# auto_sidebar = BooleanField(default=True, label="Auto sidebar posts")
@property
def auto_posts(self):
# number of posts to display on homepage
count = 3
# if auto_post is set on the homepage, front page posts will be 3 most recent articles from db
if self.auto_post:
auto_posts = ArticlePage.objects.live().order_by('-date')[:count]
return auto_posts
# else front page posts will be those selected in the admin (inline panel)
else:
pass
@property
def posts(self):
posts = self.home_posts.all()
return posts
# num_posts = len(posts)
# if fewer posts are selected than count, pad posts with some recent articles
# if num_posts < count:
# count = count - num_posts
# more_posts = ArticlePage.objects.live().order_by('-date')[:count]
# # len(selected posts + recent posts) = count
# posts = posts + more_posts
# return posts
# else:
"""
@property
def mini_posts(self):
# number of sidebar miniposts to display
count = 4
# see above note about auto_sidebar
# if auto_sidebar is set on the homepage, front side bar posts will be a random selection of site content + embeds
if self.auto_sidebar:
posts =
"""
# passing additional content to template via override of get_context method
def get_context(self, request):
# Grab the original context dict
context = super(HomePage, self).get_context(request)
# Update the context w/ a blogs key:value
context['posts'] = self.posts
if self.auto_post:
context['auto_posts'] = self.auto_posts
return context
class Meta:
verbose_name = "Homepage"
HomePage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('auto_post'),
StreamFieldPanel('embed'),
InlinePanel('featured_video', label="Featured Videos"),
InlinePanel('home_posts', label="Homepage Posts")
]
|
#This input file is supposed to perform computations in a purely periodic domain
#To look for phases and stuff, that sort of thing
import sys
import os
resultDir = os.environ.get('RESULTS')
if resultDir == None :
print "WARNING! $RESULTS not set! Attempt to write results will fail!\n"
# Expecting input avConc, rateConstFull, sysSize, numSteps, equilSteps, fileCode
from KMCLib import *
from KMCLib.Backend import Backend
import numpy
avConc = float(sys.argv[1])
rateConstFull = float(sys.argv[2])
sysSize = int(sys.argv[3])
numSteps = int(sys.argv[4])
equilSteps = int(sys.argv[5])
fileInfo = sys.argv[6]
resultsPlace = resultDir+"/"+fileInfo+"/"
if not os.path.exists(resultsPlace):
os.makedirs(resultsPlace)
with open(resultsPlace+'/settings', 'w') as f:
f.write('AverageConcentration = ' + str(avConc) +'\n')
f.write('FullRate = ' + str(rateConstFull) +'\n')
f.write('SysSize = ' + str(sysSize) +'\n')
f.write('NumSteps = '+str(numSteps) +'\n')
f.write('EquilSteps = '+str(equilSteps) +"\n")
"""I've put this in the file to make command line input easier"""
# Load the configuration and interactions.
# We're in 1d, so everything's a bit trivial
cell_vectors = [[1.0,0.0,0.0],
[0.0,1.0,0.0],
[0.0,0.0,1.0]]
# Only bothering with one set
basis_points = [[0.0, 0.0, 0.0]]
unit_cell = KMCUnitCell(cell_vectors=cell_vectors,
basis_points=basis_points)
# Define the lattice.
xRep = 1
yRep = 1
zRep = sysSize
numPoints = xRep*zRep*yRep
lattice = KMCLattice(unit_cell=unit_cell,
repetitions=(xRep,yRep,zRep),
periodic=(False, False, True))
# Generate the initial types; note that there's no top or bottom atoms in this simulation.
types = ["V"]*numPoints
#Stick
width = int(zRep*avConc)
start = sysSize/2 - width/2
end = start + width
for i in range(start, end):
types[i] = "O"
"""
# find a site which is not yet occupied by a "O" type.
pos = int(numpy.random.rand()*zRep)
while (types[pos] != "V"):
pos = int(numpy.random.rand()*zRep)
# Set the type.
types[pos] = "O"
"""
# Setup the configuration.
configuration = KMCConfiguration(lattice=lattice,
types=types,
possible_types=["O","V"])
# Rates.
rateConstEmpty = 1.0
#
##
###
"""I've put the processes in here to make it easier to adjust them via command line arguments."""
# Fill the list of processes.
processes = []
# Only on the first set of basis_points for O/V
basis_sites = [0]
# Bulk processes
# Up, empty.
#0
elements_before = ["O", "V"]
elements_after = ["V", "O"]
coordinates = [[0.0, 0.0, 0.0], [0.0, 0.0, 1.0]]
processes.append( KMCProcess(coordinates=coordinates,
elements_before=elements_before,
elements_after=elements_after,
basis_sites=basis_sites,
rate_constant=1.0))
# Will customise
# Down, empty.
#1
elements_before = ["O", "V"]
elements_after = ["V", "O"]
coordinates = [[0.0, 0.0, 0.0], [0.0, 0.0, -1.0]]
processes.append( KMCProcess(coordinates=coordinates,
elements_before=elements_before,
elements_after=elements_after,
basis_sites=basis_sites,
rate_constant=1.0))
# Will customise
# Create the interactions object.
interactions = KMCInteractions(processes, implicit_wildcards=True)
# Define the custom rates calculator, using the lol model as a template
class modelRates(KMCRateCalculatorPlugin):
""" Class for defining the custom rates function for the KMCLib paper. """
def rate(self, geometry, elements_before, elements_after, rate_constant, process_number, global_coordinate):
if len([e for e in elements_before if e == "O"]) == 2:
return rateConstFull
else:
return rateConstEmpty
def cutoff(self):
""" Overloaded base class API function """
return 1.0
interactions.setRateCalculator(rate_calculator=modelRates)
"""End of processes"""
###
##
#
# Create the model.
model = KMCLatticeModel(configuration, interactions)
# Define the parameters; not entirely sure if these are sensible or not...
control_parameters = KMCControlParameters(number_of_steps=numSteps, dump_interval=numSteps)
# Run the simulation - save trajectory to resultsPlace, which should by now exist
model.run(control_parameters, trajectory_filename=(resultsPlace+"Inittraj1.tr"))
# Define the parameters; not entirely sure if these are sensible or not...
control_parameters = KMCControlParameters(number_of_steps=equilSteps, dump_interval=1)
# Run the simulation - save trajectory to resultsPlace, which should by now exist
model.run(control_parameters, trajectory_filename=(resultsPlace+"traj.tr"))
print("Process would appear to have succesfully terminated! How very suspicious...")
|
import React from 'react';
const TableauxTheory = function(props) {
return (
<div>
<h1>Eu sou a teoria do Tableaux</h1>
</div>
)
}
export default TableauxTheory;
|
"""
send_report_to_es
Send a puppet report to ElasticSearch.
Configuration is read from the file specified in the environment variable
`PUPPET_ES_CONFIG` (defaults to /etc/puppet_es.conf) and uses ConfigParser
syntax. A sample configuration file is included as etc/puppet_es.conf.example.
Usage:
send_report_to_es [-h|--help] <filename>
Options:
-h/--help Show this help text and exit
Parameters:
filename The JSON file for the report to load and send to ElasticSearch
"""
from __future__ import print_function
import ConfigParser
from contextlib import contextmanager
import fcntl
import fnmatch
import json
import logging
import logging.handlers
import os
import socket
import sys
import textwrap
import dateutil.parser
from elasticsearch import Elasticsearch
import elasticsearch.helpers
logger = logging.getLogger(__name__)
syslog_handler = logging.handlers.SysLogHandler(address='/dev/log')
default_log_format = '%(asctime)s %(pathname)s[%(process)d] %(message)s'
class ReportParseError(Exception):
pass
class ExternalDependencyError(Exception):
pass
class NonIdempotentElasticSearchError(Exception):
pass
class InvalidReport(ValueError):
pass
class DuplicateRunError(Exception):
pass
def prep_logging(conf, log_format):
try:
log_formatter = logging.Formatter(log_format)
logger.setLevel(getattr(logging, conf.get('level', 'WARNING')))
use_syslog = conf.get('syslog', True)
logfile = conf.get('file')
stderr = conf.get('stderr', False)
if not use_syslog:
logger.removeHandler(syslog_handler)
else:
syslog_handler.setFormatter(log_formatter)
if logfile:
file_handler = logging.FileHandler(logfile)
file_handler.setFormatter(log_formatter)
logger.addHandler(file_handler)
if stderr:
stderr_formatter = logging.StreamHandler()
stderr_formatter.setFormatter(log_formatter)
logger.addHandler(stderr_formatter)
logger.info('Using log format {}'.format(log_format))
if use_syslog:
logger.info('Logging to syslog')
if logfile:
logger.info('Logging to file {}'.format(logfile))
if stderr:
logger.info('Logging to stderr')
except Exception as e:
msg = 'Something went wrong while configuring the logger: {}'.format(e)
logger.exception(msg)
raise ExternalDependencyError(msg)
def help():
print(__doc__)
exit(0)
@contextmanager
def get_lock():
lockfile = '/tmp/puppet_es.pid'
if os.path.isfile(lockfile):
with open(lockfile) as f:
pid = f.read()
if os.kill(pid, 0):
msg = 'An existing job is running with pid {}'.format(pid)
logging.exception(msg)
raise DuplicateRunError(msg)
else:
logging.warning('Cleaning up stale pid file.')
os.remove(lockfile)
try:
fd = os.open(lockfile, os.O_EXCL|os.O_CREAT|os.O_RDWR)
try:
fcntl.flock(fd, fcntl.LOCK_EX|fcntl.LOCK_NB)
os.write(fd, str(os.getpid()))
yield
finally:
fcntl.flock(fd, fcntl.LOCK_UN)
os.close(fd)
os.remove(lockfile)
except OSError as e:
msg = 'Could not get lock, perhaps another process is running? {}'.format(str(e))
logging.exception(msg)
raise
def parse_json(filename):
try:
with open(filename) as f:
return json.load(f)
except IOError as e:
msg = 'Could not open {0} for reading: {1}'.format(filename, e)
logger.exception(msg)
raise ExternalDependencyError(msg)
except ValueError as e:
msg = 'Could not parse JSON in {0}: {1}'.format(filename, e)
logger.exception(msg)
raise ExternalDependencyError(msg)
except Exception as e:
msg = 'Something went wrong while parsing the JSON report: {}'.format(e)
logger.exception(msg)
raise ExternalDependencyError(msg)
def get_conf():
try:
conf_file = os.environ.get('PUPPET_ES_CONFIG', '/etc/puppet_es.conf')
conf = ConfigParser.RawConfigParser()
conf.read(conf_file)
result = dict()
for section in conf.sections():
result[section] = dict()
with required_setting('elasticsearch', 'host'):
result['elasticsearch']['host'] = conf.get('elasticsearch', 'host')
with required_setting('elasticsearch', 'host'):
try:
result['elasticsearch']['port'] = conf.getint('elasticsearch', 'port')
except ValueError as e:
msg = 'Option "port" in section "elasticsearch" in config file should be an integer.'
logger.exception(msg)
raise
try:
result['elasticsearch']['index'] = conf.get('elasticsearch', 'index')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
try:
result['logging']['level'] = conf.get('logging', 'level')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
try:
result['logging']['syslog'] = conf.getboolean('logging', 'syslog')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
except ValueError as e:
msg = 'Option "syslog" in section "logging" in config file should be a boolean'
logger.exception(msg)
raise
try:
result['logging']['stderr'] = conf.getboolean('logging', 'stderr')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
except ValueError as e:
msg = 'Option "stderr" in section "logging" in config file should be a boolean'
logger.exception(msg)
raise
try:
result['logging']['file'] = conf.get('logging', 'file')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
try:
result['base']['on_error'] = conf.get('base', 'on_error')
if result['base']['on_error'] == 'archive':
try:
result['base']['archive_dir'] = conf.get('base', 'archive_dir')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
msg = 'Option "archive_dir" in section "base" is required if "on_error" is set to "archive"'
logger.exception(msg)
raise ValueError(msg)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
try:
result['base']['on_success'] = conf.get('base', 'on_success')
if result['base']['on_success'] == 'archive':
try:
result['base']['archive_dir'] = conf.get('base', 'archive_dir')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
msg = 'Option "archive_dir" in section "base" is required if "on_success" is set to "archive"'
logger.exception(msg)
raise ValueError(msg)
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
# This is not a required parameter.
pass
return result
except Exception as e:
msg = 'Something went wrong while loading the config file: {}'.format(e)
logger.exception(msg)
raise ExternalDependencyError(msg)
@contextmanager
def required_setting(section, option):
try:
yield
except ConfigParser.NoSectionError as e:
msg = 'Section "{0}" in config file is required: {1}'.format(section, e)
logger.exception(msg)
raise
except ConfigParser.NoOptionError as e:
msg = ('Option "{0}" in section "{1}" in config file is required: {2}'.format(option, section, e))
logger.exception(msg)
raise
def prep_report(source):
try:
result = dict()
# We want the values for these keys on the top-level object.
for key in ['transaction_uuid', 'host', 'time', 'configuration_version', 'status', 'environment']:
result[key] = source[key]
# We add the fqdn of the node we're running on as "master"
result['master'] = socket.getfqdn()
# Below, we want to pull out certain metrics and make them top-level
# fields because ElasticSearch likes that better. Note that there are
# no metrics in a failed compile.
if source.get('metrics'):
# We want the counts for all the resource statuses.
for k, v in {v[0]: v[2] for v in source['metrics']['resources']['values']}.iteritems():
result['{}_resources'.format(k)] = v
# We want the counts for all the event statuses.
for k, v in {v[0]: v[2] for v in source['metrics']['events']['values']}.iteritems():
result['{}_events'.format(k)] = v
# We only want the global timing metrics, not the per-resource-type ones.
times = {v[0]: v[2] for v in source['metrics']['time']['values']}
for key in ['config_retrieval', 'total']:
result['{}_time'.format(key)] = times[key]
# There's only a single changes count value.
result['total_changes'] = source['metrics']['changes']['values'][0][2]
return result
except Exception as e:
msg = 'Something went wrong while preparing the report object for submission: {}'.format(e)
logger.exception(msg)
raise ReportParseError(msg)
def prep_resources(report):
try:
results = []
if report.get('resource_statuses'):
for name, resource in report['resource_statuses'].iteritems():
# Some of the fields should have a different key name from the report.
result = {
'name': name,
'master': socket.getfqdn(),
'resource_title': resource['title'],
'file_line': resource['line'],
}
# We want to set some values from the global report for correlation.
for key in ['transaction_uuid', 'configuration_version', 'environment', 'host']:
result[key] = report[key]
# We only care about some of the fields on the resource.
for key in ['resource_type', 'file', 'failed', 'changed', 'time', 'out_of_sync', 'skipped',
'change_count', 'out_of_sync_count']:
result[key] = resource[key]
results.append(result)
return results
except Exception as e:
msg = 'Something went wrong while preparing the resource_status objects for submission: {}'.format(e)
logger.exception(msg)
raise ReportParseError(msg)
def prep_events(report):
try:
results = []
if report.get('resource_statuses'):
for name, resource in report['resource_statuses'].iteritems():
for event in resource['events']:
result = dict()
# We want to set some values from the global report for correlation.
for key in ['transaction_uuid', 'configuration_version', 'environment', 'host']:
result[key] = report[key]
result['master'] = socket.getfqdn()
# We need to be able to identify which resource the event was for.
result['resource_name'] = name
# These are actually all the fields in report version 4.
for key in ['audited', 'property', 'previous_value', 'desired_value', 'historical_value', 'message',
'name', 'time', 'status']:
result[key] = event[key]
results.append(result)
return results
except Exception as e:
msg = 'Something went wrong while preparing the event objects for submission: {}'.format(e)
logger.exception(msg)
raise ReportParseError(msg)
def generate_actions(report, resources, events, index='puppet-{isoyear}.{isoweek}'):
d = dateutil.parser.parse(report['time'])
(isoyear, isoweek, isoday) = d.isocalendar()
day = d.day
month = d.month
year = d.year
index_vars = {
'certname': report['host'],
'fqdn': socket.getfqdn(),
'isoday': isoday,
'isoweek': isoweek,
'isoyear': isoyear,
'day': day,
'month': month,
'year': year,
}
actions = []
report.update({'_index': index.format(**index_vars), '_type': 'report'})
actions.append(report)
for resource in resources:
resource.update({'_index': index.format(**index_vars), '_type': 'resource_status'})
actions.append(resource)
for event in events:
event.update({'_index': index.format(**index_vars), '_type': 'event'})
actions.append(event)
return actions
def es_submit(reports, config):
try:
actions = []
for filename in reports:
report = reports[filename]
actions += generate_actions(report=report['report'], resources=report['resources'], events=report['events'])
es = Elasticsearch([{'host': config['elasticsearch']['host'], 'port': config['elasticsearch']['port']}])
oks, fails = elasticsearch.helpers.bulk(client=es,
actions=actions,
raise_on_error=False,
raise_on_exception=False)
logger.info('Submitted {0} documents to {1}'.format(oks, config['elasticsearch']['host']))
for filename in reports:
report = reports[filename]
logger.info('Submitted report for host {0} with transaction uuid {1}'.format(
report['report']['host'],
report['report']['transaction_uuid']))
for err in fails:
err = err[u'create']
logger.exception(textwrap.dedent("""
Failed to submit data to {0}:
Received status code {1}
Error: {2}
Exception: {3}
Data: {4}
""".format(config['elasticsearch']['host'],
err.get('status'),
err.get('error'),
err.get('exception'),
err.get('data'))))
if fails:
msg1 = '{0} document(s) failed to index on {1}'.format(len(fails), config['elasticsearch']['host'])
es_error = elasticsearch.helpers.BulkIndexError(msg1, fails)
msg2 = 'Errors occurred while indexing: {}'.format(es_error)
logger.exception(msg1)
logger.exception(msg2)
raise NonIdempotentElasticSearchError(msg2)
except elasticsearch.ElasticsearchException as e:
msg = 'Something went wrong while connecting to ElasticSearch: {}'.format(e)
logger.exception(msg)
raise ExternalDependencyError(msg)
def prep_full(report):
if report['report_format'] != 4:
msg = 'Cannot handle report version {}'.format(report['report_format'])
logger.exception(msg)
raise InvalidReport(msg)
return dict(report=prep_report(report), resources=prep_resources(report), events=prep_events(report))
def handle_report_file(action, filename, archive_dir=None):
if action == 'delete':
logger.info('Deleting file {}'.format(filename))
os.remove(filename)
elif action == 'archive':
if not archive_dir:
raise ValueError('Cannot archive without archive_dir set')
logger.info('Moving file {0} to {1}'.format(filename, archive_dir))
os.rename(filename, '{0}/{1}'.format(archive_dir, os.path.basename(filename)))
else:
pass
def main():
with get_lock():
no_file_formatter = logging.Formatter(default_log_format.format('no file loaded yet'))
syslog_handler.setFormatter(no_file_formatter)
logger.addHandler(syslog_handler)
if len(sys.argv) < 2 or sys.argv[1] == '-h' or sys.argv[1] == '--help':
help()
exit(0)
try:
conf = get_conf()
if conf.get('logging') and conf['logging'].get('log_format'):
log_format = conf['logging']['log_format']
else:
log_format = default_log_format
prep_logging(conf.get('logging', dict()), log_format)
except ExternalDependencyError as e:
logging.exception('Caught ExternalDependencyError: {}'.format(e))
raise
except Exception as e:
logging.exception('Caught Exception')
logger.exception(str(e))
raise
reports = dict()
directory = sys.argv[1]
if os.path.isfile(directory):
try:
raw = parse_json(directory)
reports[directory] = prep_full(raw)
except ReportParseError as e:
logging.exception('Caught ReportParseError: {}'.format(e))
if conf and 'base' in conf:
behavior = conf['base'].get('on_error', 'ignore')
handle_report_file(behavior, directory, conf['base'].get('archive_dir', None))
else:
handle_report_file('ignore', directory)
except Exception as e:
logging.exception('Caught Exception')
logger.exception(str(e))
else:
for root, dirs, files in os.walk(directory, onerror=lambda exc: logger.exception(str(exc))):
for basename in fnmatch.filter(files, '*.json'):
filename = '{0}/{1}'.format(root, basename)
try:
raw = parse_json(filename)
reports[filename] = prep_full(raw)
except ReportParseError as e:
logging.exception('Caught ReportParseError: {}'.format(e))
if conf and 'base' in conf:
behavior = conf['base'].get('on_error', 'ignore')
handle_report_file(behavior, filename, conf['base'].get('archive_dir', None))
else:
handle_report_file('ignore', filename)
except Exception as e:
logging.exception('Caught Exception')
logger.exception(str(e))
try:
es_submit(reports=reports, config=conf)
except ExternalDependencyError as e:
logging.exception('Caught ExternalDependencyError: {}'.format(e))
raise
except NonIdempotentElasticSearchError as e:
logging.exception('Caught NonIdempotentElasticSearchError: {}'.format(e))
for filename in reports:
if conf and 'base' in conf:
behavior = conf['base'].get('on_error', 'ignore')
handle_report_file(behavior, filename, conf['base'].get('archive_dir', None))
else:
handle_report_file('ignore', filename)
raise
except Exception as e:
logging.exception('Caught Exception')
logger.exception(str(e))
raise
else:
logging.info('Successfully completed job')
for filename in reports:
if conf and 'base' in conf:
behavior = conf['base'].get('on_success', 'ignore')
handle_report_file(behavior, filename, conf['base'].get('archive_dir', None))
else:
handle_report_file('ignore', filename)
|
from .exceptions import ViddlerAPIException
class ViddlerAPI(object):
def __init__(self, apikey, username, password):
from .users import UsersAPI
from .videos import VideosAPI
from .api import ApiAPI
from .encoding import EncodingAPI
self.users = UsersAPI(apikey)
self.sessionid = self.users.auth(username, password)
self.videos = VideosAPI(apikey, self.sessionid)
self.api = ApiAPI(apikey, self.sessionid)
self.encoding = EncodingAPI(apikey, self.sessionid)
ViddlerAPIException = ViddlerAPIException
|
from totalimpact.providers import provider
from totalimpact.providers.provider import Provider, ProviderContentMalformedError
from unicode_helpers import remove_nonprinting_characters
import os, re
import logging
logger = logging.getLogger('ti.providers.arxiv')
def clean_arxiv_id(arxiv_id):
arxiv_id = remove_nonprinting_characters(arxiv_id)
arxiv_id = arxiv_id.lower().replace("arxiv:", "").replace("http://arxiv.org/abs/", "")
return arxiv_id
class Arxiv(Provider):
example_id = ("arxiv", "1305.3328")
url = "http://arxiv.org"
descr = "arXiv is an e-print service in the fields of physics, mathematics, computer science, quantitative biology, quantitative finance and statistics."
biblio_url_template = "http://export.arxiv.org/api/query?id_list=%s"
aliases_url_template = "http://arxiv.org/abs/%s"
static_meta_dict = {}
def __init__(self):
super(Arxiv, self).__init__()
def is_relevant_alias(self, alias):
(namespace, nid) = alias
if (namespace == "arxiv"):
return True
else:
return False
# overriding default because overriding aliases method
@property
def provides_aliases(self):
return True
# overriding default because overriding member items method
@property
def provides_members(self):
return True
# overriding because don't need to look up
def member_items(self,
query_dict,
provider_url_template=None,
cache_enabled=True):
if not self.provides_members:
raise NotImplementedError()
self.logger.debug(u"%s getting member_items for %s" % (self.provider_name, query_dict))
arxiv_ids = query_dict.split("\n")
aliases_tuples = [("arxiv", clean_arxiv_id(arxiv_id)) for arxiv_id in arxiv_ids if arxiv_id]
return(aliases_tuples)
# overriding
def aliases(self,
aliases,
provider_url_template=None,
cache_enabled=True):
arxiv_id = self.get_best_id(aliases)
if not provider_url_template:
provider_url_template = self.aliases_url_template
new_alias = [("url", self._get_templated_url(provider_url_template, arxiv_id, "aliases"))]
if new_alias in aliases:
new_alias = [] #override because isn't new
return new_alias
def _extract_biblio(self, page, id=None):
dict_of_keylists = {
'title' : ['entry', 'title'],
'date' : ['entry', 'published'],
}
biblio_dict = provider._extract_from_xml(page, dict_of_keylists)
dom_authors = provider._find_all_in_xml(page, "name")
try:
authors = [author.firstChild.data for author in dom_authors]
biblio_dict["authors"] = ", ".join([author.split(" ")[-1] for author in authors])
except (AttributeError, TypeError):
pass
try:
biblio_dict["year"] = biblio_dict["date"][0:4]
except KeyError:
pass
biblio_dict["repository"] = "arXiv"
biblio_dict["free_fulltext_url"] = self._get_templated_url(self.aliases_url_template, id, "aliases")
return biblio_dict
|
s1 = "abc"
s2 = "uvwxyz"
s3 = ""
size = int()
if (len(s1) > len(s2)):
size = len(s2)
else:
size = len(s1)
for i in range(size):
s3 += s1[i] + s2[i]
if(len(s1) == size):
s3 += s2[size:]
else:
s3 += s1[size:]
print(s3)
|
import sys
import time
# Startup information
STARTING = "Running {} on {}"
INMEMORY = "in-memory raster"
SEQUENTIAL = "sequential raster blocks"
CONCURRENT = "concurrent raster blocks"
# Completion status
COMPLETION = "Finished in {}"
WRITEOUT = "Wrote output to {}"
# Warnings
STRIPED = "Blocks are lines with shape {}. Rewrite the data blocks for sequential and parallel processing."
NOTILING = "Raster with shape {} is not tiled. Rewrite the data with tiling for sequential and parallel processing."
# Errors
NONINTERSECTING = "Input rasters are non-intersecting"
NONALIGNED = "Raster cells are not aligned between inputs"
def printtime(t0: float, t1: float) -> str:
"""Return the elapsed time between t0 and t1 in h:m:s formatted string
Parameters:
t0: initial time
t1: final time
Returns:
elapsed time
"""
m, s = divmod(t1 - t0, 60)
h, m = divmod(m, 60)
fmt = '%d:%02d:%02d' % (h, m, s)
return fmt
def progress(
value: int,
endvalue: int,
bar_length: int = 20,
msg: str = None
) -> None:
"""Display and update a progress bar
Parameters:
value: current value representing the progress
endvalue: expected value at completion
bar_length: number of characters used to render the bar
msg: message to display
"""
done_char = '#'
todo_char = ' '
percent = float(value) / endvalue
progress = done_char * int(round(percent * bar_length))
spaces = todo_char * (bar_length - len(progress))
message = "\rPercent: [{0}] {1}% {2}".format(
progress + spaces, int(round(percent * 100)), msg
)
sys.stdout.write(message)
if value == endvalue - 1:
sys.stdout.write('\n')
sys.stdout.flush()
|
// Copyright (C) 2013-2017 Internet Systems Consortium, Inc. ("ISC")
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef PROTOCOL_UTIL_H
#define PROTOCOL_UTIL_H
#include <dhcp/pkt4.h>
#include <util/buffer.h>
#include <stdint.h>
namespace isc {
namespace dhcp {
/// @brief Exception thrown when error occurred during parsing packet's headers.
///
/// This exception is thrown when parsing link, Internet or Transport layer
/// header has failed.
class InvalidPacketHeader : public Exception {
public:
InvalidPacketHeader(const char* file, size_t line, const char* what) :
isc::Exception(file, line, what) { };
};
/// Size of the Ethernet frame header.
static const size_t ETHERNET_HEADER_LEN = 14;
/// Offset of the 2-byte word in the Ethernet packet which
/// holds the type of the protocol it encapsulates.
static const size_t ETHERNET_PACKET_TYPE_OFFSET = 12;
/// This value is held in the Ethertype field of Ethernet frame
/// and indicates that an IP packet is encapsulated with this
/// frame. In the standard headers, there is an ETHERTYPE_IP,
/// constant which serves the same purpose. However, it is more
/// convenient to have our constant because we avoid
/// inclusion of additional headers, which have different names
/// and locations on different OSes.
static const uint16_t ETHERNET_TYPE_IP = 0x0800;
/// Minimal IPv4 header length.
static const size_t MIN_IP_HEADER_LEN = 20;
/// Offset in the IP header where the flags field starts.
static const size_t IP_FLAGS_OFFSET = 6;
/// Offset of the byte in IP header which holds the type
/// of the protocol it encapsulates.
static const size_t IP_PROTO_TYPE_OFFSET = 9;
/// Offset of source address in the IPv4 header.
static const size_t IP_SRC_ADDR_OFFSET = 12;
/// Offset of destination address in the IPv4 header.
static const size_t IP_DEST_ADDR_OFFSET = 16;
/// UDP header length.
static const size_t UDP_HEADER_LEN = 8;
/// Offset within UDP header where destination port is held.
static const size_t UDP_DEST_PORT = 2;
/// @brief Decode the Ethernet header.
///
/// This function reads Ethernet frame header from the provided
/// buffer at the current read position. The source HW address
/// is read from the header and assigned as client address in
/// the pkt object. The buffer read pointer is set to the end
/// of the Ethernet frame header if read was successful.
///
/// @warning This function does not check that the provided 'pkt'
/// pointer is valid. Caller must make sure that pointer is
/// allocated.
///
/// @param buf input buffer holding header to be parsed.
/// @param [out] pkt packet object receiving HW source address read from header.
///
/// @throw InvalidPacketHeader if packet header is truncated
/// @throw BadValue if pkt object is NULL.
void decodeEthernetHeader(util::InputBuffer& buf, Pkt4Ptr& pkt);
/// @brief Decode IP and UDP header.
///
/// This function reads IP and UDP headers from the provided buffer
/// at the current read position. The source and destination IP
/// addresses and ports and read from these headers and stored in
/// the appropriate members of the pkt object.
///
/// @warning This function does not check that the provided 'pkt'
/// pointer is valid. Caller must make sure that pointer is
/// allocated.
///
/// @param buf input buffer holding headers to be parsed.
/// @param [out] pkt packet object where IP addresses and ports
/// are stored.
///
/// @throw InvalidPacketHeader if packet header is truncated
/// @throw BadValue if pkt object is NULL.
void decodeIpUdpHeader(util::InputBuffer& buf, Pkt4Ptr& pkt);
/// @brief Writes ethernet frame header into a buffer.
///
/// @warning This function does not check that the provided 'pkt'
/// pointer is valid. Caller must make sure that pointer is
/// allocated.
///
/// @param pkt packet object holding source and destination HW address.
/// @param [out] out_buf buffer where a header is written.
void writeEthernetHeader(const Pkt4Ptr& pkt,
util::OutputBuffer& out_buf);
/// @brief Writes both IP and UDP header into output buffer
///
/// This utility function assembles IP and UDP packet headers for the
/// provided DHCPv4 message. The source and destination addreses and
/// ports stored in the pkt object are copied as source and destination
/// addresses and ports into IP/UDP headers.
///
/// @warning This function does not check that the provided 'pkt'
/// pointer is valid. Caller must make sure that pointer is
/// allocated.
///
/// @param pkt DHCPv4 packet to be sent in IP packet
/// @param [out] out_buf buffer where an IP header is written
void writeIpUdpHeader(const Pkt4Ptr& pkt, util::OutputBuffer& out_buf);
/// @brief Calculates checksum for provided buffer
///
/// This function returns the sum of 16-bit values from the provided
/// buffer. If the third parameter is specified, it indicates the
/// initial checksum value. This parameter can be a result of
/// calcChecksum function's invocation on different data buffer.
/// The IP or UDP checksum value is a complement of the result returned
/// by this function. However, this function does not compute complement
/// of the summed values. It must be calculated outside of this function
/// before writing the value to the packet buffer.
///
/// The IP header checksum calculation algorithm has been defined in
/// <a href="https://tools.ietf.org/html/rfc791#page-14">RFC 791</a>
///
/// @param buf buffer for which the checksum is calculated.
/// @param buf_size size of the buffer for which checksum is calculated.
/// @param sum initial checksum value, other values will be added to it.
///
/// @return calculated checksum.
uint16_t calcChecksum(const uint8_t* buf, const uint32_t buf_size,
uint32_t sum = 0);
}
}
#endif // PROTOCOL_UTIL_H
|
#!/usr/bin/env python3
import logging
import os, re, shutil, sys, tempfile
from argparse import (ArgumentParser, ArgumentDefaultsHelpFormatter, RawDescriptionHelpFormatter)
from mob_suite.version import __version__
import mob_suite.mob_init
from collections import OrderedDict
from operator import itemgetter
from mob_suite.blast import BlastRunner
from mob_suite.wrappers import mash
from Bio import SeqIO
from mob_suite.utils import fix_fasta_header, \
calcFastaStats, \
verify_init, \
check_dependencies, \
read_sequence_info, \
writeReport, \
sort_biomarkers, \
ETE3_db_status_check, \
calc_md5, \
GC, \
read_fasta_dict, \
identify_biomarkers, \
parseMash, \
determine_mpf_type, \
hostrange, \
dict_from_alt_key_list, \
read_file_to_dict, \
blast_mge, \
writeMGEresults
from mob_suite.constants import ETE3DBTAXAFILE, \
MOB_TYPER_REPORT_HEADER, \
MOB_CLUSTER_INFO_HEADER, \
default_database_dir, \
ETE3_LOCK_FILE, \
LIT_PLASMID_TAXONOMY_HEADER
def init_console_logger(lvl=2):
root = logging.getLogger()
LOG_FORMAT = '%(asctime)s %(name)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
logging_levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
report_lvl = logging_levels[lvl]
root.setLevel(report_lvl) # set root logger level
logging.basicConfig(format=LOG_FORMAT, level=report_lvl)
return logging.getLogger(__name__)
def parse_args():
"Parse the input arguments, use '-h' for help"
class CustomFormatter(ArgumentDefaultsHelpFormatter, RawDescriptionHelpFormatter):
pass
parser = ArgumentParser(
description="MOB-Typer: Plasmid typing and mobility prediction: {}".format(
__version__), formatter_class=CustomFormatter)
parser.add_argument('-i', '--infile', type=str, required=True, help='Input assembly fasta file to process')
parser.add_argument('-o', '--out_file', type=str, required=True, help='Output file to write results')
parser.add_argument('-g', '--mge_report_file', type=str, required=False, help='Output file for MGE results')
parser.add_argument('-a', '--analysis_dir', type=str, required=False,
help='Working directory for storing temporary results')
parser.add_argument('-n', '--num_threads', type=int, required=False, help='Number of threads to be used', default=1)
parser.add_argument('-s', '--sample_id', type=str, required=False, help='Sample Prefix for reports')
parser.add_argument('-f', '--force', required=False, help='Overwrite existing directory',
action='store_true')
parser.add_argument('-x', '--multi', required=False, help='Treat each sequence as an independant plasmid',
action='store_true')
parser.add_argument('--min_rep_evalue', type=float, required=False,
help='Minimum evalue threshold for replicon blastn',
default=0.00001)
parser.add_argument('--min_mob_evalue', type=float, required=False,
help='Minimum evalue threshold for relaxase tblastn',
default=0.00001)
parser.add_argument('--min_con_evalue', type=float, required=False, help='Minimum evalue threshold for contig blastn',
default=0.00001)
parser.add_argument('--min_length', type=str, required=False, help='Minimum length of blast hits',
default=500)
parser.add_argument('--min_rep_ident', type=int, required=False, help='Minimum sequence identity for replicons',
default=80)
parser.add_argument('--min_mob_ident', type=int, required=False, help='Minimum sequence identity for relaxases',
default=80)
parser.add_argument('--min_con_ident', type=int, required=False, help='Minimum sequence identity for contigs',
default=80)
parser.add_argument('--min_rpp_ident', type=int, required=False,
help='Minimum sequence identity for MGE', default=80)
parser.add_argument('--min_rep_cov', type=int, required=False,
help='Minimum percentage coverage of replicon query by input assembly',
default=80)
parser.add_argument('--min_mob_cov', type=int, required=False,
help='Minimum percentage coverage of relaxase query by input assembly',
default=80)
parser.add_argument('--min_con_cov', type=int, required=False,
help='Minimum percentage coverage of assembly contig by the plasmid reference database to be considered',
default=70)
parser.add_argument('--min_rpp_cov', type=int, required=False,
help='Minimum percentage coverage of MGE',
default=80)
parser.add_argument('--min_rpp_evalue', type=float, required=False,
help='Minimum evalue threshold for repetitve elements blastn',
default=0.00001)
parser.add_argument('--min_overlap', type=int, required=False,
help='Minimum overlap of fragments',
default=10)
parser.add_argument('-k', '--keep_tmp', required=False, help='Do not delete temporary file directory',
action='store_true')
parser.add_argument('--debug', required=False, help='Show debug information', action='store_true')
parser.add_argument('--plasmid_mash_db', type=str, required=False,
help='Companion Mash database of reference database',
default=os.path.join(default_database_dir,
'ncbi_plasmid_full_seqs.fas.msh'))
parser.add_argument('-m', '--plasmid_meta', type=str, required=False,
help='MOB-cluster plasmid cluster formatted file matched to the reference plasmid db',
default=os.path.join(default_database_dir,
'clusters.txt'))
parser.add_argument('--plasmid_db_type', type=str, required=False, help='Blast database type of reference database',
default='blastn')
parser.add_argument('--plasmid_replicons', type=str, required=False, help='Fasta of plasmid replicons',
default=os.path.join(default_database_dir,
'rep.dna.fas'))
parser.add_argument('--repetitive_mask', type=str, required=False, help='Fasta of known repetitive elements',
default=os.path.join(default_database_dir,
'repetitive.dna.fas'))
parser.add_argument('--plasmid_mob', type=str, required=False, help='Fasta of plasmid relaxases',
default=os.path.join(default_database_dir,
'mob.proteins.faa'))
parser.add_argument('--plasmid_mpf', type=str, required=False, help='Fasta of known plasmid mate-pair proteins',
default=os.path.join(default_database_dir,
'mpf.proteins.faa'))
parser.add_argument('--plasmid_orit', type=str, required=False, help='Fasta of known plasmid oriT dna sequences',
default=os.path.join(default_database_dir,
'orit.fas'))
parser.add_argument('-d', '--database_directory',
default=default_database_dir,
required=False,
help='Directory you want to use for your databases. If the databases are not already '
'downloaded, they will be downloaded automatically. Defaults to {}'.format(
default_database_dir))
parser.add_argument('--primary_cluster_dist', type=float, required=False,
help='Mash distance for assigning primary cluster id 0 - 1', default=0.06)
parser.add_argument('-V', '--version', action='version', version="%(prog)s " + __version__)
return parser.parse_args()
def initMOBTyperReportTemplate(header):
data = {}
for i in header:
data[i] = '-'
return data
def main():
args = parse_args()
if args.debug:
logger = init_console_logger(3)
else:
logger = init_console_logger(2)
logger.info('Running Mob-typer version {}'.format(__version__))
logger.info('Processing fasta file {}'.format(args.infile))
if not os.path.isfile(args.infile):
logger.info('Error, fasta file does not exist {}'.format(args.infile))
sys.exit()
if not args.analysis_dir:
tmp_dir = tempfile.TemporaryDirectory(dir=tempfile.gettempdir()).name
else:
tmp_dir = args.analysis_dir
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir, 0o755)
if not isinstance(args.num_threads, int):
logger.info('Error number of threads must be an integer, you specified "{}"'.format(args.num_threads))
database_dir = os.path.abspath(args.database_directory)
if args.sample_id is None:
sample_id = re.sub(r"\.(fasta|fa|fas){1,1}", "", os.path.basename(args.infile))
else:
sample_id = args.sample_id
# Script arguments
input_fasta = args.infile
report_file = args.out_file
mge_report_file = args.mge_report_file
num_threads = int(args.num_threads)
keep_tmp = args.keep_tmp
if args.multi:
multi = True
else:
multi = False
if not (args.primary_cluster_dist >= 0 and args.primary_cluster_dist <= 1):
logging.error('Error distance thresholds must be between 0 - 1: {}'.format(args.primary_cluster_dist))
sys.exit()
else:
primary_distance = float(args.primary_cluster_dist)
min_length = int(args.min_length)
if database_dir == default_database_dir:
mob_ref = args.plasmid_mob
mash_db = args.plasmid_mash_db
replicon_ref = args.plasmid_replicons
plasmid_meta = args.plasmid_meta
mpf_ref = args.plasmid_mpf
plasmid_orit = args.plasmid_orit
repetitive_mask_file = args.repetitive_mask
verify_init(logger, database_dir)
else:
mob_ref = os.path.join(database_dir, 'mob.proteins.faa')
mash_db = os.path.join(database_dir, 'ncbi_plasmid_full_seqs.fas.msh')
replicon_ref = os.path.join(database_dir, 'rep.dna.fas')
plasmid_meta = os.path.join(database_dir, 'clusters.txt')
mpf_ref = os.path.join(database_dir, 'mpf.proteins.faa')
plasmid_orit = os.path.join(database_dir, 'orit.fas')
repetitive_mask_file = os.path.join(database_dir, 'repetitive.dna.fas')
ETE3DBTAXAFILE = os.path.abspath(database_dir + "/taxa.sqlite")
LIT_PLASMID_TAXONOMY_FILE = os.path.join(database_dir, "host_range_literature_plasmidDB.txt")
NCBI_PLASMID_TAXONOMY_FILE = plasmid_meta
fixed_fasta = os.path.join(tmp_dir, 'fixed.input.fasta')
replicon_blast_results = os.path.join(tmp_dir, 'replicon_blast_results.txt')
mob_blast_results = os.path.join(tmp_dir, 'mobtyper_blast_results.txt')
mpf_blast_results = os.path.join(tmp_dir, 'mpf_blast_results.txt')
orit_blast_results = os.path.join(tmp_dir, 'orit_blast_results.txt')
repetitive_blast_results = os.path.join(tmp_dir, 'repetitive_blast_results.txt')
if os.path.isfile(mob_blast_results):
os.remove(mob_blast_results)
if os.path.isfile(mpf_blast_results):
os.remove(mpf_blast_results)
if os.path.isfile(orit_blast_results):
os.remove(orit_blast_results)
if os.path.isfile(replicon_blast_results):
os.remove(replicon_blast_results)
# Input numeric params
min_rep_ident = float(args.min_rep_ident)
min_mob_ident = float(args.min_mob_ident)
min_ori_ident = float(args.min_rep_ident)
min_mpf_ident = float(args.min_mob_ident)
min_rpp_ident = float(args.min_rpp_ident)
idents = {'min_rep_ident': min_rep_ident, 'min_mob_ident': min_mob_ident, 'min_ori_ident': min_ori_ident}
for param in idents:
value = float(idents[param])
if value < 60:
logger.error("Error: {} is too low, please specify an integer between 70 - 100".format(param))
sys.exit(-1)
if value > 100:
logger.error("Error: {} is too high, please specify an integer between 70 - 100".format(param))
sys.exit(-1)
min_rep_cov = float(args.min_rep_cov)
min_mob_cov = float(args.min_mob_cov)
min_ori_cov = float(args.min_rep_cov)
min_mpf_cov = float(args.min_mob_cov)
min_rpp_cov = float(args.min_rpp_cov)
covs = {'min_rep_cov': min_rep_cov, 'min_mob_cov': min_mob_cov, 'min_con_cov': min_ori_cov,
'min_rpp_cov': min_ori_cov}
for param in covs:
value = float(covs[param])
if value < 50:
logger.error("Error: {} is too low, please specify an integer between 50 - 100".format(param))
sys.exit(-1)
if value > 100:
logger.error("Error: {} is too high, please specify an integer between 50 - 100".format(param))
sys.exit(-1)
min_rep_evalue = float(args.min_rep_evalue)
min_mob_evalue = float(args.min_mob_evalue)
min_ori_evalue = float(args.min_rep_evalue)
min_mpf_evalue = float(args.min_mob_evalue)
min_rpp_evalue = float(args.min_rpp_evalue)
evalues = {'min_rep_evalue': min_rep_evalue, 'min_mob_evalue': min_mob_evalue, 'min_con_evalue': min_ori_evalue}
for param in evalues:
value = float(evalues[param])
if value > 1:
logger.error("Error: {} is too high, please specify an float evalue between 0 to 1".format(param))
sys.exit(-1)
check_dependencies(logger)
needed_dbs = [replicon_ref, mob_ref, mash_db, mpf_ref]
for db in needed_dbs:
if (not os.path.isfile(db)):
logger.info('Warning! Needed database missing "{}"'.format(db))
mob_suite.mob_init.main()
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir, 0o755)
# Get cluster information
reference_sequence_meta = read_sequence_info(plasmid_meta, MOB_CLUSTER_INFO_HEADER)
# initilize master record tracking
id_mapping = fix_fasta_header(input_fasta, fixed_fasta)
contig_info = {}
with open(fixed_fasta, "r") as handle:
for record in SeqIO.parse(handle, "fasta"):
id = str(record.id)
contig_info[id] = {}
for feature in MOB_TYPER_REPORT_HEADER:
contig_info[id][feature] = ''
seq = str(record.seq)
contig_info[id]['md5'] = calc_md5(seq)
contig_info[id]['gc'] = GC(seq)
contig_info[id]['size'] = len(seq)
contig_info[id]['contig_id'] = id
contig_info[id]['sample_id'] = sample_id
handle.close()
# Makeblastdb
blast_runner = BlastRunner(fixed_fasta, tmp_dir)
build_success = blast_runner.makeblastdb(fixed_fasta, 'nucl', logging=logging)
if build_success == False:
logging.error("Could not build blast database, check error messages..cannot continue")
sys.exit()
# run individual marker blasts
contig_info = identify_biomarkers(contig_info, fixed_fasta, tmp_dir, 25, logging, \
replicon_ref, min_rep_ident, min_rep_cov, min_rep_evalue, replicon_blast_results, \
mob_ref, min_mob_ident, min_mob_cov, min_mob_evalue, mob_blast_results, \
mpf_ref, min_mpf_ident, min_mpf_cov, min_mpf_evalue, mpf_blast_results, \
None, None, None, None, \
plasmid_orit, orit_blast_results, repetitive_blast_results, \
num_threads=num_threads)
m = mash()
mobtyper_results = []
mash_input_fasta = fixed_fasta + '.msh'
ncbi = dict_from_alt_key_list(
read_file_to_dict(NCBI_PLASMID_TAXONOMY_FILE, MOB_CLUSTER_INFO_HEADER, separater="\t"),
"sample_id")
lit = dict_from_alt_key_list(
read_file_to_dict(LIT_PLASMID_TAXONOMY_FILE, LIT_PLASMID_TAXONOMY_HEADER, separater="\t"), "sample_id")
if multi:
m.mashsketch(input_fasta=fixed_fasta, output_path=mash_input_fasta, sketch_ind=True, num_threads=num_threads)
mash_results = parseMash(
m.run_mash(reference_db=mash_db, input_fasta=mash_input_fasta, table=False, num_threads=num_threads))
for seq_id in mash_results:
record = {}
for field in MOB_TYPER_REPORT_HEADER:
if field in contig_info[seq_id]:
record[field] = contig_info[seq_id][field]
else:
record[field] = ''
record['sample_id'] = seq_id
record['num_contigs'] = 1
distances = OrderedDict(sorted(mash_results[seq_id].items(), key=itemgetter(1), reverse=False))
for mash_neighbor_id in distances:
dist = distances[mash_neighbor_id]
if mash_neighbor_id not in reference_sequence_meta:
continue
else:
record['mash_nearest_neighbor'] = mash_neighbor_id
record['mash_neighbor_distance'] = dist
record['primary_cluster_id'] = reference_sequence_meta[mash_neighbor_id]['primary_cluster_id']
record['secondary_cluster_id'] = reference_sequence_meta[mash_neighbor_id]['secondary_cluster_id']
record['mash_neighbor_identification'] = reference_sequence_meta[mash_neighbor_id]['organism']
break
mobtyper_results.append(record)
else:
m.mashsketch(input_fasta=fixed_fasta, output_path=mash_input_fasta, sketch_ind=False, num_threads=num_threads)
mash_results = parseMash(
m.run_mash(reference_db=mash_db, input_fasta=mash_input_fasta, table=False, num_threads=num_threads))
record = {}
for field in MOB_TYPER_REPORT_HEADER:
record[field] = ''
record['sample_id'] = sample_id
fastaSeqStats = calcFastaStats(fixed_fasta)
record['md5'] = fastaSeqStats['md5']
record['total_length'] = fastaSeqStats['size']
record['num_contigs'] = fastaSeqStats['num_seq']
record['gc'] = fastaSeqStats['gc_content']
record['mash_nearest_neighbor'] = '-'
record['mash_neighbor_distance'] = 1
record['primary_cluster_id'] = '-'
record['secondary_cluster_id'] = '-'
record['mash_neighbor_identification'] = '-'
for seq_id in mash_results:
distances = OrderedDict(sorted(mash_results[seq_id].items(), key=itemgetter(1), reverse=False))
mash_neighbor_id = next(iter(distances))
dist = distances[mash_neighbor_id]
if mash_neighbor_id not in reference_sequence_meta:
continue
record['mash_nearest_neighbor'] = mash_neighbor_id
record['mash_neighbor_distance'] = dist
record['primary_cluster_id'] = reference_sequence_meta[mash_neighbor_id]['primary_cluster_id']
record['secondary_cluster_id'] = reference_sequence_meta[mash_neighbor_id]['secondary_cluster_id']
record['mash_neighbor_identification'] = reference_sequence_meta[mash_neighbor_id]['organism']
record['rep_type(s)'] = []
record['rep_type_accession(s)'] = []
record['relaxase_type(s)'] = []
record['relaxase_type_accession(s)'] = []
record['mpf_type'] = []
record['mpf_type_accession(s)'] = []
record['orit_type(s)'] = []
record['orit_accession(s)'] = []
for seq_id in contig_info:
record['rep_type(s)'].append(contig_info[seq_id]['rep_type(s)'])
record['rep_type_accession(s)'].append(contig_info[seq_id]['rep_type_accession(s)'])
record['relaxase_type(s)'].append(contig_info[seq_id]['relaxase_type(s)'])
record['relaxase_type_accession(s)'].append(contig_info[seq_id]['relaxase_type_accession(s)'])
record['mpf_type'].append(contig_info[seq_id]['mpf_type'])
record['mpf_type_accession(s)'].append(contig_info[seq_id]['mpf_type_accession(s)'])
record['orit_type(s)'].append(contig_info[seq_id]['orit_type(s)'])
record['orit_accession(s)'].append(contig_info[seq_id]['orit_accession(s)'])
for field in record:
tmp = []
if record[field] == None:
continue
if isinstance(record[field], list):
length = len(record[field])
for i in range(0, length):
tmp += record[field][i].split(',')
elif isinstance(record[field], str) and len(record[field]) > 0:
tmp += record[field].split(',')
if len(tmp) > 0:
record[field] = []
for d in tmp:
if len(d) > 0:
record[field].append(d)
mobtyper_results.append(record)
for i in range(0, len(mobtyper_results)):
record = mobtyper_results[i]
sample_id = record['sample_id']
if isinstance(record['sample_id'], list):
sample_id = record['sample_id'][0]
if sample_id in id_mapping:
original_id = id_mapping[sample_id]
record['sample_id'] = original_id
bio_markers = sort_biomarkers({0: {'types': record['rep_type(s)'], 'acs': record['rep_type_accession(s)']},
1: {'types': record['relaxase_type(s)'],
'acs': record['relaxase_type_accession(s)']},
2: {'types': record['mpf_type'], 'acs': record['mpf_type_accession(s)']},
3: {'types': record['orit_type(s)'], 'acs': record['orit_accession(s)']}, })
record['rep_type(s)'] = bio_markers[0]['types']
record['rep_type_accession(s)'] = bio_markers[0]['acs']
record['relaxase_type(s)'] = bio_markers[1]['types']
record['relaxase_type_accession(s)'] = bio_markers[1]['acs']
record['mpf_type'] = bio_markers[2]['types']
record['mpf_type_accession(s)'] = bio_markers[2]['acs']
record['orit_type(s)'] = bio_markers[3]['types']
record['orit_accession(s)'] = bio_markers[3]['acs']
if (isinstance(record['mash_neighbor_distance'], float) or isinstance(record['mash_neighbor_distance'],
int)) and record[
'mash_neighbor_distance'] <= primary_distance:
mob_cluster_id = record['primary_cluster_id']
else:
mob_cluster_id = None
# Patches that sometimes results are concatonated into strings if contigs are merged into a single results
if isinstance(record['rep_type(s)'], list):
record['rep_type(s)'] = ",".join(record['rep_type(s)'])
if isinstance(record['relaxase_type_accession(s)'], list):
record['relaxase_type_accession(s)'] = ",".join(record['relaxase_type_accession(s)'])
host_range = hostrange(record['rep_type(s)'].split(','), record['relaxase_type_accession(s)'].split(','),
mob_cluster_id, ncbi, lit)
for field in host_range:
record[field] = host_range[field]
if isinstance(record['mpf_type'], list):
record['mpf_type'] = determine_mpf_type(record['mpf_type'])
elif isinstance(record['mpf_type'], str):
record['mpf_type'] = determine_mpf_type(record['mpf_type'].split(','))
for field in record:
if isinstance(record[field], list):
record[field] = ",".join(record[field])
record['predicted_mobility'] = 'non-mobilizable'
if len(record['relaxase_type(s)']) > 0 and len(record['mpf_type']):
record['predicted_mobility'] = 'conjugative'
elif len(record['relaxase_type(s)']) > 0 or len(record['orit_type(s)']) > 0:
record['predicted_mobility'] = 'mobilizable'
mobtyper_results[i] = record
writeReport(mobtyper_results, MOB_TYPER_REPORT_HEADER, report_file)
# Peform MGE detection
if mge_report_file is not None:
mge_results = blast_mge(fixed_fasta, repetitive_mask_file, tmp_dir, min_length,
logging, min_rpp_ident, min_rpp_cov, min_rpp_evalue, num_threads)
contig_memberships = {'chromosome': {}, 'plasmid': {}}
for i in range(0, len(mobtyper_results)):
primary_cluster_id = mobtyper_results[i]['primary_cluster_id']
if not primary_cluster_id in contig_memberships['plasmid']:
contig_memberships['plasmid'][primary_cluster_id] = {}
contig_id = mobtyper_results[i]['sample_id']
mobtyper_results[i]['molecule_type'] = 'plasmid'
mobtyper_results[i]['contig_id'] = contig_id
mobtyper_results[i]['size'] = mobtyper_results[i]['total_length']
contig_memberships['plasmid'][primary_cluster_id][contig_id] = mobtyper_results[i]
writeMGEresults(contig_memberships, mge_results, mge_report_file)
logger.info("MOB-typer MGE results written to {}".format(mge_report_file))
if not keep_tmp:
shutil.rmtree(tmp_dir)
logger.info("MOB-typer completed and results written to {}".format(report_file))
# call main function
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('ashlar', '0016_auto_20151016_1951'),
]
operations = [
migrations.RenameField(
model_name='record',
old_name='nominatim',
new_name='location_text',
),
]
|
import logging
from copy import deepcopy
from ctrail.introspect import *
def print_agent_vrfs(x, filters=None, indent_level=0, indent=' ', verb=0):
print_keys = (
['name', 'RD', 'vn'],
[],
[]
)
if (filters is not None) and (not filter_generic(x, filters)):
return 0
return print_generic(x, print_keys=print_keys, indent_level=indent_level,
indent=indent)
def print_agent_intfs(x, filters=None, indent_level=0, indent=' ', verb=0):
print_keys = (
['name', 'index', 'vrf_name', 'vn_name', 'vm_name', 'vm_uuid'],
['admin_state', 'active', 'ipv4_active', 'ip6_active', 'l2_active'],
['type', 'label', 'l2_label', 'mac_addr', 'ip_addr', 'ip6_addr']
)
if (filters is not None) and (not filter_generic(x, filters)):
return 0
return print_generic(x, print_keys=print_keys, indent_level=indent_level,
indent=indent)
def print_agent_nhs(x, filters=None, indent_level=0, indent=' ', verb=0):
print_keys = (
['nh_index', 'type', 'ref_count', 'itf', 'vrf'],
['mac', 'sip', 'valid', 'policy'],
[]
)
if (filters is not None) and (not filter_generic(x, filters)):
return 0
return print_generic(x, print_keys=print_keys, indent_level=indent_level,
indent=indent)
def print_agent_acl(x, filters=None, indent_level=0, indent=' ', verb=0):
print_keys = (
['name', 'uuid', 'dynamic_acl'],
[],
[]
)
if (filters is not None) and (not filter_generic(x, filters)):
return 0
print("{}".format(indent_level * indent), end='')
for k in print_keys[0]:
print("{}: {} ".format(k, x.get(k, 'n/a')), end='')
print()
for ace in x['entries']['AclEntrySandeshData']:
print_agent_ace(ace, indent_level=(indent_level + 1), indent=indent)
return (1 + len(x['entries']['AclEntrySandeshData']))
def print_agent_ace(x, filters=None, indent_level=0, indent=' ', verb=0):
print_keys = (
['ace_id', 'rule_type', 'src_type', 'src', 'dst_type', 'dst', 'uuid'],
['proto_l', 'src_port_l', 'dst_port_l'],
['action_l']
)
if (filters is not None) and (not filter_generic(x, filters)):
return 0
print("{}".format(indent_level * indent), end='')
for k in print_keys[0]:
print("{}: {} ".format(k, x.get(k, 'n/a')), end='')
print()
for key_line in print_keys[1:]:
print("{}".format((indent_level + 1) * indent), end='')
for k in key_line:
key_text = 'n/a'
if ('SandeshRange' in x[k]) and isinstance(x[k]['SandeshRange'], list):
key_list = x[k]['SandeshRange']
key_text = ', '.join([ord_dict_prettify(le, hide_key_names=('action')) for le in key_list])
elif ('ActionStr' in x[k]) and isinstance(x[k]['ActionStr'], list):
key_list = x[k]['ActionStr']
key_text = ', '.join([ord_dict_prettify(le, hide_key_names=('action')) for le in key_list])
print("{}: {} ".format(k, key_text), end='')
print()
return 3
def print_agent_route(x, filters=None, indent_level=0, indent=' ', verb=0):
if (filters is not None) and (not filter_generic(x, filters)):
return 0
if 'src_ip' in x:
print("{}{}/{} src_vrf: {} paths:".format(indent_level * indent, x['src_ip'],
x['src_plen'], x['src_vrf']))
elif ('mac' in x):
print("{}mac: {} src_vrf: {} paths:".format(indent_level * indent, x['mac'],
x['src_vrf']))
else:
print("{}???? src_vrf: {} paths:".format(indent_level * indent, x['src_vrf']))
for p in x['path_list']['PathSandeshData']:
print_agent_path(p, indent_level=(indent_level + 1), indent=indent)
return (1 + len(x['path_list']['PathSandeshData']))
def print_agent_path(x, filters=None, indent_level=0, indent=' ', verb=0):
print_keys = (
['label', 'gw_ip', 'peer', 'active_tunnel_type', 'vrf', 'info'],
[],
[]
)
if (filters is not None) and (not filter_generic(x, filters)):
return 0
print("{}nh_index: {} ".format(indent_level * indent,
x['nh']['NhSandeshData']['nh_index']),
end='')
for k in print_keys[0]:
print("{}: {} ".format(k, x.get(k, 'n/a')), end='')
print()
if 'element' in x['dest_vn_list']:
dest_vn_list_text = ', '.join([str(y) for y in x['dest_vn_list']['element']])
else:
dest_vn_list_text = 'n/a'
if 'element' in x['communities']:
communities_text = ', '.join([str(y) for y in x['communities']['element']])
else:
communities_text = 'n/a'
print("{}dest_vn_list: {}".format((indent_level + 1) * indent, dest_vn_list_text))
print("{}communities: {}".format((indent_level + 1) * indent, communities_text))
return 3
def get_state(address, port, vrf_ids=(), acls=False, verb=0):
dump = False
introspect_requests = {
'vrouter agent vrfs': {
'url': 'Snh_VrfListReq', 'print_func': print_agent_vrfs,
'snh_keys': ['__VrfListResp_list', 'VrfListResp', 'vrf_list', 'VrfSandeshData'],
'filters': None
},
'vrouter agent interfaces': {
'url': 'Snh_ItfReq', 'print_func': print_agent_intfs,
'snh_keys': ['__ItfResp_list', 'ItfResp', 'itf_list', 'ItfSandeshData'],
'filters': None
},
'vrouter agent next-hops': {
'url': 'Snh_NhListReq', 'print_func': print_agent_nhs,
'snh_keys': ['__NhListResp_list', 'NhListResp', 'nh_list', 'NhSandeshData'],
'filters': None
},
'access-lists' : {
'url': 'Snh_AclReq', 'print_func': print_agent_acl,
'snh_keys': ['__AclResp_list', 'AclResp', 'acl_list', 'AclSandeshData'],
'filters': None
}
}
per_vrf_requests = {
'vrouter agent IPv4 routes': {
'url_template': 'Snh_Inet4UcRouteReq?vrf_index={}',
'url': 'Snh_Inet4UcRouteReq', 'print_func': print_agent_route,
'snh_keys': ['__Inet4UcRouteResp_list', 'Inet4UcRouteResp', 'route_list', 'RouteUcSandeshData'],
'filters': None
},
'vrouter agent IPv6 routes': {
'url_template': 'Snh_Inet6UcRouteReq?vrf_index={}',
'url': 'Snh_Inet6UcRouteReq', 'print_func': print_agent_route,
'snh_keys': ['__Inet6UcRouteResp_list', 'Inet6UcRouteResp', 'route_list', 'RouteUcSandeshData'],
'filters': None
},
'vrouter agent L2 routes': {
'url_template': 'Snh_Layer2RouteReq?vrf_index={}',
'url': 'Snh_Layer2RouteReq', 'print_func': print_agent_route,
'snh_keys': ['__Layer2RouteResp_list', 'Layer2RouteResp', 'route_list', 'RouteL2SandeshData'],
'filters': None
},
}
current_requests = introspect_requests
if len(vrf_ids):
generic_keys = list(per_vrf_requests.keys())
for k in generic_keys:
for vrf_id in vrf_ids:
req_name = "{} (vrf id {})".format(k, vrf_id)
req_url = per_vrf_requests[k]['url_template'].format(vrf_id)
per_vrf_requests[req_name] = deepcopy(per_vrf_requests[k])
per_vrf_requests[req_name]['url'] = req_url
per_vrf_requests.pop(k)
current_requests = per_vrf_requests
elif acls:
current_requests = {'access-lists': introspect_requests['access-lists']}
if verb > 2:
dump = True
sandesh_generic_requests(address, port, current_requests, dump, verb)
|
# @File(label="Directory of the images sequence", style="directory") images_sequence_dir
# @String(label="Image File Extension", required=false, value=".tif") image_extension
# @String(label="Output Filename", required=false, value=".") stack_name
# @String(label="Output Filename", required=false, value=".tif") output_name
import os
from ij import IJ
from pprint import pprint
from net.imagej.axis import Axes
from net.imglib2.view import Views
# Find image files
images_sequence_dir = str(images_sequence_dir)
print(images_sequence_dir)
fnames = []
for root, _, filenames in os.walk(images_sequence_dir):
for fname in filenames:
if fname.endswith(image_extension):
fnames.append(os.path.join(root, fname))
fnames = sorted(fnames)
pprint(fnames)
if len(fnames) < 1:
raise Exception("Not image files found in %s" % images_sequence_dir)
# Open and stack images
stack = []
for fname in fnames:
image = IJ.openImage(fname)
# then display it.
image.show()
IJ.run("Images to Stack", "name="+str(stack_name)+" title=[] use")
IJ.saveAs("Tiff", os.path.join(images_sequence_dir,str(output_name)))
|
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const bcrypt = require('bcrypt-nodejs')
const userSchema = new Schema({
username: {type: String, required: true, unique: true, lowercase: true},
password: {type: String, required: true}
});
// On Save Hook, encrypt password
userSchema.pre('save', function (next) {
const user = this;
bcrypt.genSalt(10, function (err, salt) {
if (err) {
return next(err);
}
bcrypt.hash(user.password, salt, null, function (err, hash) {
if (err) {
return next(err);
}
user.password = hash;
next();
});
});
});
userSchema.methods.comparePassword = function (candidatePassword, callback) {
bcrypt.compare(candidatePassword, this.password, function (err, isMatch) {
if (err) {
return callback(err);
}
callback(null, isMatch);
});
}
// generating a hash
userSchema.methods.generateHash = function(password) {
return bcrypt.hashSync(password, bcrypt.genSaltSync(10), null);
};
// checking if password is valid
userSchema.methods.validPassword = function(password) {
return bcrypt.compareSync(password, this.password);
};
const ModelClass = mongoose.model('user', userSchema);
module.exports = ModelClass;
|
// Karma configuration
module.exports = function(config) {
config.set({
// see https://www.npmjs.com/package/karma-typescript
karmaTypescriptConfig: {
compilerOptions: {
lib: ['dom', 'es6'],
},
bundlerOptions: {
resolve: {
alias: {
'jquery': './src/jq/jquery.js',
'jquery-ui': './src/jq/jquery-ui.js',
}
}
},
exclude: ["demo"] // ignore dummy demo .ts files
},
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['jasmine', 'karma-typescript'],
// list of files / patterns to load in the browser
files: [
'src/**/*.ts', // TODO: have to list files else the import in each won't be found!
'spec/*-spec.ts',
// 'spec/e2e/*-spec.js' issues with ReferenceError: `browser` & `element` is not defined
],
// BUT list of files to exclude
exclude: [
// used for webpack h5/jq/static .js
'src/gridstack-h5.ts',
'src/gridstack-jq.ts',
'src/gridstack-static.ts',
'src/jq/*', // use h5 version for unit testing
// 'src/h5/*', // use jq version for unit testing
],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
'**/*.ts': ['karma-typescript']
},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress', 'karma-typescript'],
coverageReporter: {
type: 'lcov', // lcov or lcovonly are required for generating lcov.info files
dir: 'coverage/'
},
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN
// config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['ChromeHeadlessCustom'],
customLaunchers: {
ChromeHeadlessCustom: {
base: 'ChromeHeadless',
flags: ['--window-size=800,600']
}
},
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: true,
// Concurrency level
// how many browser should be started simultaneous
concurrency: Infinity,
random: false,
client: {
jasmine: {
random: false
}
}
});
};
|
from typing import Dict, List, Optional
from caldera.consensus.block_record import BlockRecord
from caldera.types.blockchain_format.sized_bytes import bytes32
from caldera.types.blockchain_format.sub_epoch_summary import SubEpochSummary
from caldera.types.blockchain_format.vdf import VDFInfo
from caldera.types.header_block import HeaderBlock
from caldera.types.weight_proof import SubEpochChallengeSegment
from caldera.util.ints import uint32
class BlockchainInterface:
def get_peak_height(self) -> Optional[uint32]:
pass
def block_record(self, header_hash: bytes32) -> BlockRecord:
pass
def height_to_block_record(self, height: uint32) -> BlockRecord:
pass
def get_ses_heights(self) -> List[uint32]:
pass
def get_ses(self, height: uint32) -> SubEpochSummary:
pass
def height_to_hash(self, height: uint32) -> Optional[bytes32]:
pass
def contains_block(self, header_hash: bytes32) -> bool:
pass
def remove_block_record(self, header_hash: bytes32):
pass
def add_block_record(self, block_record: BlockRecord):
pass
def contains_height(self, height: uint32) -> bool:
pass
async def warmup(self, fork_point: uint32):
pass
async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]:
pass
async def get_block_records_in_range(self, start: int, stop: int) -> Dict[bytes32, BlockRecord]:
pass
async def get_header_blocks_in_range(
self, start: int, stop: int, tx_filter: bool = True
) -> Dict[bytes32, HeaderBlock]:
pass
async def get_header_block_by_height(
self, height: int, header_hash: bytes32, tx_filter: bool = True
) -> Optional[HeaderBlock]:
pass
async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]:
pass
def try_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]:
if self.contains_block(header_hash):
return self.block_record(header_hash)
return None
async def persist_sub_epoch_challenge_segments(
self, sub_epoch_summary_height: uint32, segments: List[SubEpochChallengeSegment]
):
pass
async def get_sub_epoch_challenge_segments(
self,
sub_epoch_summary_height: uint32,
) -> Optional[List[SubEpochChallengeSegment]]:
pass
def seen_compact_proofs(self, vdf_info: VDFInfo, height: uint32) -> bool:
pass
|
import pandas as pd
import numpy as np
def markers_by_hierarhy(inf_aver, var_names, hierarhy_df, quantile=[0.05, 0.1, 0.2], mode='exclusive'):
"""Find which genes are expressed at which level of cell type hierarchy.
Assigns expression counts for each gene to higher levels of hierarhy using estimates of average expression for the lowest level and substracts that expression from the lowest level. For example, low level annotation can be `Inh_SST neurones`, high level `Inh neurones`, very high level `neurones`, top level `all cell types`. The function can deal with any number of layers but the order needs to be carefully considered (from broad to specific).
.. math::
g_{g} = min_{f} g_{f,g}
.. math::
g_{fn,g} = (min_{f∈fn} g_{f,g}) - g_{g}
.. math::
...
.. math::
g_{f3,g} = (min_{f∈f3} g_{f,g}) - ... - g_{fn,g} - g_{g}
.. math::
g_{f2,g} = (min_{f∈f2} g_{f,g}) - g_{f3,g} - ... - g_{fn,g} - g_{g}
.. math::
g_{f1, g} = g_{f,g} - g_{f2,g} - g_{f3,g} - ... - g_{fn,g} - g_{g}
Here, :math:`g_{f,g}` represents average expression of each gene in each level 1 cluster.
:math:`g_{f1,g}` represents average expression of each gene unique to each level 1 cluster.
:math:`g_{f2,g}` represents average expression of each gene unique to each level 2 cluster.
:math:`g_{f3,g}` represents average expression of each gene unique to each level 3 cluster.
:math:`g_{fn,g}` represents average expression of each gene unique to each level n cluster (can be deep).
:math:`g_{g}` represents average expression of each gene unique to the top level (all cells).
:param inf_aver: np.ndarray with :math:`g_{g,f}` or with :math:`g_{g,f,s}` where `s` represents posterior samples
:param var_names: list, array or index with variable names
:param hierarhy_df: pd.DataFrame that provides mapping between clusters at different levels.
Index corresponds to level 1 :math:`f1`, first columns to the top level, second columns to the n-th level :math:`fn`,
last column corresponds to the second level :math:`f2`.
It is crucial the order of cell types :math:`f` in `hierarhy_df` matches the order of cell types in axis 1 of `inf_aver`.
:param quantile: list of posterior distribution quantiles to be computed
:param mode: 'exclusive' or 'tree' mode. In 'exclusive' mode, the number of counts specific to each layer is computed (e.g. counts at layer 2 are excluded from layer 1). In 'tree' mode, children nodes inherit the expression of their parents (e.g. layer 1 countains the original counts :math:`g_{f,g}`, layer 2 contains counts from all parent layers :math:`g_{f2,g} + g_{f3,g} + ... + g_{fn,g} + g_{g}`.
:return: When input is :math:`g_{g,f}` the output is pd.DataFrame with values for :math:`f1, f2, f3, ..., fn, all`in columns. When input is :math:`g_{g,f,s}` where `s` represents posterior sample the output is a dictionary with posterior samples for :math:`g_{g,f1-fn+all,s}` and similar dataframes for 'mean' and quantiles of the posterior distribution (e.g. 'q0.05').
"""
region_aver = pd.DataFrame(index=var_names)
celltype_aver = pd.DataFrame(index=var_names)
results = {}
names = {}
if len(inf_aver.shape) == 2: # using summarised posterior samples
results[f'level_1'] = pd.DataFrame(inf_aver, index=var_names,
columns=list(hierarhy_df.index))
for k in np.arange(hierarhy_df.shape[1]) + 2:
k_names = list(hierarhy_df.iloc[:,k-2].unique())
k_level = hierarhy_df.shape[1] + 3 - k
results[f'level_{k_level}'] = pd.DataFrame(index=var_names,
columns=k_names)
# iterate over clusters at each level (e.g. f2, f3 ...)
for c in k_names:
ind = hierarhy_df.iloc[:,k-2] == c
c_names = hierarhy_df.index[ind]
ind_min = results[f'level_1'][c_names].min(1)
results[f'level_{k_level}'][c] = ind_min
results[f'level_1'][c_names] = (results[f'level_1'][c_names].T - ind_min).T
if mode == 'tree':
# when mode is tree, add counts from parent levels
for plev in np.arange(len(results) - 1):
p_level = len(results) - plev
p_names = list(hierarhy_df.iloc[:,plev].unique())
# iterate over clusters at each level (e.g. f2, f3 ...)
for p in p_names:
ind = hierarhy_df.iloc[:,plev] == p
if (plev) == (len(results) - 2):
ch_names = hierarhy_df.index[ind]
else:
ch_names = hierarhy_df.loc[ind,:].iloc[:,plev+1]
results[f'level_{p_level-1}'][ch_names] \
= (results[f'level_{p_level-1}'][ch_names].T + results[f'level_{p_level}'][p].values).T
# concatenate to produce a general summary
sep_inf_aver = pd.concat(list(results.values()), axis=1)
return sep_inf_aver
elif len(inf_aver.shape) == 3: # using all posterior samples
n_genes = inf_aver.shape[0]
n_samples = inf_aver.shape[2]
results[f'level_1'] = inf_aver.copy()
names[f'level_1'] = list(hierarhy_df.index)
for k in np.arange(hierarhy_df.shape[1]) + 2:
k_names = list(hierarhy_df.iloc[:,k-2].unique())
k_level = hierarhy_df.shape[1] + 3 - k
results[f'level_{k_level}'] = np.zeros((n_genes, len(k_names), n_samples))
names[f'level_{k_level}'] = k_names
# iterate over clusters at each level (e.g. f2, f3 ...)
for c in k_names:
ind = hierarhy_df.iloc[:,k-2] == c
k_ind = np.isin(k_names, c)
ind_min = results[f'level_1'][:, ind, :].min(axis=1).reshape((n_genes, 1, n_samples))
results[f'level_{k_level}'][:, k_ind, :] = ind_min
results[f'level_1'][:, ind, :] = results[f'level_1'][:, ind, :] - ind_min
if mode == 'tree':
# when mode is tree, add counts from parent levels
for plev in np.arange(len(results) - 1):
p_level = len(results) - plev
p_names = list(hierarhy_df.iloc[:,plev].unique())
# iterate over clusters at each level (e.g. f2, f3 ...)
for p in p_names:
ind = hierarhy_df.iloc[:,plev] == p
if (plev) == (len(results) - 2):
ch_names = hierarhy_df.index[ind]
else:
ch_names = hierarhy_df.loc[ind,:].iloc[:,plev+1]
ind = np.isin(names[f'level_{p_level-1}'], ch_names)
p_ind = np.isin(p_names, p)
results[f'level_{p_level-1}'][:, ind, :] \
= results[f'level_{p_level-1}'][:, ind, :] \
+ results[f'level_{p_level}'][:, p_ind, :].reshape((n_genes, 1, n_samples))
sep_inf_aver = np.concatenate(list(results.values()), axis=1)
from itertools import chain
sep_inf_aver_names = list(chain(*names.values()))
out = {'samples': sep_inf_aver,
'mean': pd.DataFrame(np.squeeze(np.mean(sep_inf_aver, axis=2)),
index=var_names,
columns=sep_inf_aver_names)
}
for q in quantile:
out[f'q{q}'] = pd.DataFrame(np.squeeze(np.quantile(sep_inf_aver, q=q, axis=2)),
index=var_names,
columns=sep_inf_aver_names)
# TODO remove redundant layers
return out
|
#!/usr/bin/env python
'''
Generate valid and invalid base58 address and private key test vectors.
Usage:
gen_base58_test_vectors.py valid 50 > ../../src/test/data/base58_keys_valid.json
gen_base58_test_vectors.py invalid 50 > ../../src/test/data/base58_keys_invalid.json
'''
# 2012 Wladimir J. van der Laan
# Released under MIT License
import os
from itertools import islice
from base58 import b58encode, b58decode, b58encode_chk, b58decode_chk, b58chars
import random
from binascii import b2a_hex
# key types
PUBKEY_ADDRESS = 25
SCRIPT_ADDRESS = 5
PUBKEY_ADDRESS_TEST = 111
SCRIPT_ADDRESS_TEST = 196
PRIVKEY = 128
PRIVKEY_TEST = 239
metadata_keys = ['isPrivkey', 'isTestnet', 'addrType', 'isCompressed']
# templates for valid sequences
templates = [
# prefix, payload_size, suffix, metadata
# None = N/A
((PUBKEY_ADDRESS,), 20, (), (False, False, 'pubkey', None)),
((SCRIPT_ADDRESS,), 20, (), (False, False, 'script', None)),
((PUBKEY_ADDRESS_TEST,), 20, (), (False, True, 'pubkey', None)),
((SCRIPT_ADDRESS_TEST,), 20, (), (False, True, 'script', None)),
((PRIVKEY,), 32, (), (True, False, None, False)),
((PRIVKEY,), 32, (1,), (True, False, None, True)),
((PRIVKEY_TEST,), 32, (), (True, True, None, False)),
((PRIVKEY_TEST,), 32, (1,), (True, True, None, True))
]
def is_valid(v):
'''Check vector v for validity'''
result = b58decode_chk(v)
if result is None:
return False
valid = False
for template in templates:
prefix = str(bytearray(template[0]))
suffix = str(bytearray(template[2]))
if result.startswith(prefix) and result.endswith(suffix):
if (len(result) - len(prefix) - len(suffix)) == template[1]:
return True
return False
def gen_valid_vectors():
'''Generate valid test vectors'''
while True:
for template in templates:
prefix = str(bytearray(template[0]))
payload = os.urandom(template[1])
suffix = str(bytearray(template[2]))
rv = b58encode_chk(prefix + payload + suffix)
assert is_valid(rv)
metadata = dict([(x,y) for (x,y) in zip(metadata_keys,template[3]) if y is not None])
yield (rv, b2a_hex(payload), metadata)
def gen_invalid_vector(template, corrupt_prefix, randomize_payload_size, corrupt_suffix):
'''Generate possibly invalid vector'''
if corrupt_prefix:
prefix = os.urandom(1)
else:
prefix = str(bytearray(template[0]))
if randomize_payload_size:
payload = os.urandom(max(int(random.expovariate(0.5)), 50))
else:
payload = os.urandom(template[1])
if corrupt_suffix:
suffix = os.urandom(len(template[2]))
else:
suffix = str(bytearray(template[2]))
return b58encode_chk(prefix + payload + suffix)
def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
yield "",
yield "x",
while True:
# kinds of invalid vectors:
# invalid prefix
# invalid payload length
# invalid (randomized) suffix (add random data)
# corrupt checksum
for template in templates:
val = gen_invalid_vector(template, randbool(0.2), randbool(0.2), randbool(0.2))
if random.randint(0,10)<1: # line corruption
if randbool(): # add random character to end
val += random.choice(b58chars)
else: # replace random character in the middle
n = random.randint(0, len(val))
val = val[0:n] + random.choice(b58chars) + val[n+1:]
if not is_valid(val):
yield val,
if __name__ == '__main__':
import sys, json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
try:
uiter = iters[sys.argv[1]]
except IndexError:
uiter = gen_valid_vectors
try:
count = int(sys.argv[2])
except IndexError:
count = 0
data = list(islice(uiter(), count))
json.dump(data, sys.stdout, sort_keys=True, indent=4)
sys.stdout.write('\n')
|
/* eslint-disable react/prop-types */
/**
Copyright 2016 Autodesk,Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import React from 'react';
import { boolean, text } from '@kadira/storybook-addon-knobs';
import Select from '../../src/2016-12-01/select';
import { WithSource } from '../../.storybook/addons/source-addon';
module.exports = function someSearchResults() {
const props = {
filter: text('Filter', 'd'),
open: boolean('Open', true),
options: [
{ value: 'one', label: 'One', key: 1 },
{ value: 'two', label: 'Two', key: 2 }
],
searchable: boolean('Searchable', true)
};
const react = `
import React from 'react';
import ReactDOM from 'react-dom';
import {Select} from '@orion-ui/react-components/lib/2016-12-01';
class App extends React.Component {
render() {
const options = ${JSON.stringify(props.options, null, 2)};
return (
<Select options={options} open={${props.open}} searchable={${props.searchable}} filter="${props.filter}" />
)
}
}
ReactDOM.render(React.createElement(App), document.body);`;
const angular = `
// app controller
import 'angular';
import '@orion-ui/angular/lib/2016-12-01';
angular
.module('app', ['orion'])
.controller('AppController', function () {
var app = this;
app.sizes = ${JSON.stringify(props.options, null, 2)};
app.searchable = ${props.searchable};
app.open = ${props.searchable};
app.filter = ${props.searchable};
});
// app.html
<!doctype html>
<html lang="en" ng-app="app">
<body ng-controller="AppController as app">
<orion-select options="app.options" open="app.open" searchable="app.searchable" filter="app.filter" />
</body>
</html>`;
return (
<WithSource react={react} angular={angular}>
<Select {...props} />
</WithSource>
);
};
|
$(function(){
const outletTable = $('#table-outlet').dataTable({
"processing": true,
"serverSide": true,
"order": [],
"ajax": {
"url": base_url + "master/get_outlet_json",
"type": "POST"
},
"columns": [
{"data" : "id_outlet"},
{"data": "nama_outlet"},
{"data": "alamat"},
{"data": "telepon"},
{"data": "email"},
{
"data": "id_outlet",
"render" : function(data, type, row) {
return `<a title="ubah" class="btn btn-warning" href="${base_url}master/ubah_outlet/${data}"><i class="fa fa-edit"></i></a>
<a title="hapus" class="btn btn-danger hapus_outlet" data-href="${base_url}master/hapus_outlet/${data}"><i class="fa fa-trash"></i></a>`
}
}
],
})
$(document).on('click', '.hapus_outlet', function(){
hapus($(this).data('href'))
})
})
|
from django.contrib.contenttypes.models import ContentType
from django.db import models
def build_polymorphic_ctypes_map(cls):
# {'1': 'unified_job', '2': 'Job', '3': 'project_update', ...}
mapping = {}
for ct in ContentType.objects.filter(app_label='main'):
ct_model_class = ct.model_class()
if ct_model_class and issubclass(ct_model_class, cls):
mapping[ct.id] = ct_model_class._camel_to_underscore(ct_model_class.__name__)
return mapping
def SET_NULL(collector, field, sub_objs, using):
return models.SET_NULL(collector, field, sub_objs.non_polymorphic(), using)
|
"""
===============
Demo Gridspec04
===============
"""
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
def make_ticklabels_invisible(fig):
for i, ax in enumerate(fig.axes):
ax.text(0.5, 0.5, "ax%d" % (i+1), va="center", ha="center")
ax.tick_params(labelbottom=False, labelleft=False)
# gridspec inside gridspec
f = plt.figure()
gs0 = gridspec.GridSpec(1, 2)
gs00 = gridspec.GridSpecFromSubplotSpec(3, 3, subplot_spec=gs0[0])
ax1 = plt.Subplot(f, gs00[:-1, :])
f.add_subplot(ax1)
ax2 = plt.Subplot(f, gs00[-1, :-1])
f.add_subplot(ax2)
ax3 = plt.Subplot(f, gs00[-1, -1])
f.add_subplot(ax3)
gs01 = gridspec.GridSpecFromSubplotSpec(3, 3, subplot_spec=gs0[1])
ax4 = plt.Subplot(f, gs01[:, :-1])
f.add_subplot(ax4)
ax5 = plt.Subplot(f, gs01[:-1, -1])
f.add_subplot(ax5)
ax6 = plt.Subplot(f, gs01[-1, -1])
f.add_subplot(ax6)
plt.suptitle("GridSpec Inside GridSpec")
make_ticklabels_invisible(f)
plt.show()
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.genPoints = genPoints;
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
function genPoints(points, boundary, type) {
var minX = boundary.minX,
minY = boundary.minY,
maxX = boundary.maxX,
maxY = boundary.maxY;
var normalisedPoints = points.map(function (item) {
return typeof item === 'number' ? item : item.value;
});
var totalPoints = normalisedPoints.length;
var maxValue = Math.max.apply(Math, _toConsumableArray(normalisedPoints)) + 1;
var minValue = Math.min.apply(Math, _toConsumableArray(normalisedPoints));
if (minValue) minValue -= 1;
var gridX = (maxX - minX) / (totalPoints - 1);
if (type === 'bar') gridX = maxX / totalPoints;
var gridY = (maxY - minY) / (maxValue - minValue);
return normalisedPoints.map(function (value, index) {
return {
x: minX + index * gridX,
y: maxY - (value - minValue) * gridY + +(index === totalPoints - 1) * 0.00001 - +(index === 0) * 0.00001,
value: value
};
});
}
//# sourceMappingURL=core.js.map
|
// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2019 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef BGL_RPC_REQUEST_H
#define BGL_RPC_REQUEST_H
#include <string>
#include <univalue.h>
UniValue JSONRPCRequestObj(const std::string& strMethod, const UniValue& params, const UniValue& id);
UniValue JSONRPCReplyObj(const UniValue& result, const UniValue& error, const UniValue& id);
std::string JSONRPCReply(const UniValue& result, const UniValue& error, const UniValue& id);
UniValue JSONRPCError(int code, const std::string& message);
/** Generate a new RPC authentication cookie and write it to disk */
bool GenerateAuthCookie(std::string *cookie_out);
/** Read the RPC authentication cookie from disk */
bool GetAuthCookie(std::string *cookie_out);
/** Delete RPC authentication cookie from disk */
void DeleteAuthCookie();
/** Parse JSON-RPC batch reply into a vector */
std::vector<UniValue> JSONRPCProcessBatchReply(const UniValue &in, size_t num);
class JSONRPCRequest
{
public:
UniValue id;
std::string strMethod;
UniValue params;
bool fHelp;
std::string URI;
std::string authUser;
std::string peerAddr;
JSONRPCRequest() : id(NullUniValue), params(NullUniValue), fHelp(false) {}
void parse(const UniValue& valRequest);
};
#endif // BGL_RPC_REQUEST_H
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Abstraction for asynchronous job execution """
class Executor(object):
"""
Base abstract executor interface for asynchronous job submission.
Allows submit asynchronous jobs and returns the Future object.
"""
# timeout for jobs that may hang
DEFAULT_TIMEOUT = 120
def submit(self, func, *args, **kwargs):
"""
Pass task (function, arguments) to the Executor.
Parameters
----------
func : callable
function to be run by a worker
args : list or tuple, optional
arguments passed to the function
kwargs : dict, optional
The keyword arguments
Returns
-------
future : Future
Future object wrapping the task which can be used to
collect the task's result.
"""
raise NotImplementedError()
class Future(object):
"""
Base class of the future object.
The implementations can return object of subclass of this.
This objects encapsulates the asynchronous execution of task
submitted to another thread, or another worker for execution.
Future objects store the state of tasks--can be polled for
result or a blocking call to retrieve the result can be used.
"""
def done(self):
"""
Return True if job was successfully cancelled or finished running.
"""
raise NotImplementedError()
def get(self, timeout=None):
"""
Get the result. This will block until the result is available.
Parameters
----------
timeout : int or float, optional
Maximum number of seconds to wait before it timeouts.
If not specified, it means we block until the result is available.
Returns
-------
result : Any
The result returned by the submitted function.
Raises
------
TimeoutError : if the result call timeouts.
"""
raise NotImplementedError()
class FutureError(RuntimeError):
"""Base error class of all future events"""
# pylint:disable=redefined-builtin
class TimeoutError(FutureError):
"""Error raised when a task is timeout."""
class ExecutionError(FutureError):
"""
Error raised when future execution crashes or failed.
"""
|
import tornado.ioloop
import tornado.web
from routers import NestedRouter, GenericRouter
from handlers.main import MainHandler
# r = GenericRouter("/api/", trailing_slash=False)
r = NestedRouter("/api/", trailing_slash=False)
# r.register(r"/clusters/(?P<cluster_id>[^/.]+)/pods", MainHandler)
r.register(("clusters", "pods"), MainHandler)
# r.register(r"/me", MainHandler)
r.register(r"me", MainHandler)
for i in r.rules:
print(i)
def make_app():
return tornado.web.Application(r.rules)
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
|
var app = app || {};
(function() {
'use strict';
app.Project = Backbone.Model.extend({
defaults: {
title: '',
chinesetitle: '',
image: '',
description: '',
keywords: '',
demo: '',
source: '',
code: '',
type: ''
},
dump: function() {
console.log(JSON.stringify(this.toJSON()));
}
});
})();
|
var mysql = require("mysql");
const inquirer = require("inquirer");
const cTable = require('console.table');
var conn = mysql.createConnection({
host: "localhost",
port: 3306,
user: "root",
database: "bamazon"
});
conn.connect(function (err) {
if (err) throw err;
start();
});
function start() {
conn.query('SELECT * FROM `products`', function (err, res) {
if (err) { throw err; };
if (res && res.length){
console.table(res);
};
inquirer.prompt([
{
name: "choice",
message: "Hello! Which item would you like to purchase? (type q to quit)",
validate: function (value) {
if (value === 'q' || isNaN(value) === false && parseInt(value) > 0) {
return true;
}
return false;
}
}]).then(function (answer) {
if (answer.choice === 'q'){
conn.end();
}
else{
howMany(answer.choice);
}
})
});
};
function howMany(choice){
var userChoice = choice;
inquirer.prompt([
{
name: "howMany",
message: "How many units of the product would you like to buy? (type q to quit)",
validate: function (value) {
if (value === 'q' || isNaN(value) === false && parseInt(value) > 0) {
return true;
}
return false;
}
}]).then(function (answer) {
if (answer.howMany === 'q') {
conn.end();
}
else {
checkInventory(userChoice, answer.howMany);
}
})
}
function checkInventory(id, quantity){
var number = parseInt(quantity);
conn.query('SELECT stock_quantity, price, product_sales FROM `products` WHERE ?', { item_id: id }, function (err, res) {
var price = Number(res[0].price);
var sales = parseInt(res[0].product_sales) + number;
if (err) throw err;
if (res && res.length) {
if (res[0].stock_quantity >= number) {
conn.query(
"UPDATE products SET ?,? WHERE ?",
[
{
stock_quantity: (res[0].stock_quantity - number)
},
{
product_sales: sales
},
{
item_id: id
},
],
function (err) {
if (err) throw err;
console.log('\x1b[33m%s\x1b[0m', '\n Thank you for your purchase. Your total is: $' + (price * number).toFixed(2) + '\n');
start();
}
);
}
else {
console.log('\x1b[31m%s\x1b[0m', '\n Our apologies: There is an insufficient quantity to complete this purchase! Please try again!\n');
start();
}
}
});
};
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.bitfinex import bitfinex
import hashlib
import math
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import NotSupported
class bitfinex2(bitfinex):
def describe(self):
return self.deep_extend(super(bitfinex2, self).describe(), {
'id': 'bitfinex2',
'name': 'Bitfinex',
'countries': ['VG'],
'version': 'v2',
'certified': False,
# new metainfo interface
'has': {
'CORS': True,
'createLimitOrder': False,
'createMarketOrder': False,
'createOrder': False,
'deposit': False,
'editOrder': False,
'fetchDepositAddress': False,
'fetchClosedOrders': False,
'fetchFundingFees': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': False,
'fetchOrder': True,
'fetchTickers': True,
'fetchTradingFee': False,
'fetchTradingFees': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'3h': '3h',
'6h': '6h',
'12h': '12h',
'1d': '1D',
'1w': '7D',
'2w': '14D',
'1M': '1M',
},
'rateLimit': 1500,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766244-e328a50c-5ed2-11e7-947b-041416579bb3.jpg',
'api': {
'v1': 'https://api.bitfinex.com',
'public': 'https://api-pub.bitfinex.com',
'private': 'https://api.bitfinex.com',
},
'www': 'https://www.bitfinex.com',
'doc': [
'https://docs.bitfinex.com/v2/docs/',
'https://github.com/bitfinexcom/bitfinex-api-node',
],
'fees': 'https://www.bitfinex.com/fees',
},
'api': {
'v1': {
'get': [
'symbols',
'symbols_details',
],
},
'public': {
'get': [
'conf/pub:map:currency:label',
'platform/status',
'tickers',
'ticker/{symbol}',
'trades/{symbol}/hist',
'book/{symbol}/{precision}',
'book/{symbol}/P0',
'book/{symbol}/P1',
'book/{symbol}/P2',
'book/{symbol}/P3',
'book/{symbol}/R0',
'stats1/{key}:{size}:{symbol}:{side}/{section}',
'stats1/{key}:{size}:{symbol}/{section}',
'stats1/{key}:{size}:{symbol}:long/last',
'stats1/{key}:{size}:{symbol}:long/hist',
'stats1/{key}:{size}:{symbol}:short/last',
'stats1/{key}:{size}:{symbol}:short/hist',
'candles/trade:{timeframe}:{symbol}/{section}',
'candles/trade:{timeframe}:{symbol}/last',
'candles/trade:{timeframe}:{symbol}/hist',
],
'post': [
'calc/trade/avg',
'calc/fx',
],
},
'private': {
'post': [
'auth/r/wallets',
'auth/r/orders/{symbol}',
'auth/r/orders/{symbol}/new',
'auth/r/orders/{symbol}/hist',
'auth/r/order/{symbol}:{id}/trades',
'auth/w/order/submit',
'auth/r/trades/hist',
'auth/r/trades/{symbol}/hist',
'auth/r/positions',
'auth/r/positions/hist',
'auth/r/positions/audit',
'auth/r/funding/offers/{symbol}',
'auth/r/funding/offers/{symbol}/hist',
'auth/r/funding/loans/{symbol}',
'auth/r/funding/loans/{symbol}/hist',
'auth/r/funding/credits/{symbol}',
'auth/r/funding/credits/{symbol}/hist',
'auth/r/funding/trades/{symbol}/hist',
'auth/r/info/margin/{key}',
'auth/r/info/funding/{key}',
'auth/r/ledgers/hist',
'auth/r/movements/hist',
'auth/r/movements/{currency}/hist',
'auth/r/stats/perf:{timeframe}/hist',
'auth/r/alerts',
'auth/w/alert/set',
'auth/w/alert/{type}:{symbol}:{price}/del',
'auth/calc/order/avail',
'auth/r/ledgers/{symbol}/hist',
'auth/r/settings',
'auth/w/settings/set',
'auth/w/settings/del',
'auth/r/info/user',
],
},
},
'fees': {
'trading': {
'maker': 0.1 / 100,
'taker': 0.2 / 100,
},
'funding': {
'withdraw': {
'BTC': 0.0004,
'BCH': 0.0001,
'ETH': 0.00135,
'EOS': 0.0,
'LTC': 0.001,
'OMG': 0.15097,
'IOT': 0.0,
'NEO': 0.0,
'ETC': 0.01,
'XRP': 0.02,
'ETP': 0.01,
'ZEC': 0.001,
'BTG': 0.0,
'DASH': 0.01,
'XMR': 0.0001,
'QTM': 0.01,
'EDO': 0.23687,
'DAT': 9.8858,
'AVT': 1.1251,
'SAN': 0.35977,
'USDT': 5.0,
'SPK': 16.971,
'BAT': 1.1209,
'GNT': 2.8789,
'SNT': 9.0848,
'QASH': 1.726,
'YYW': 7.9464,
},
},
},
'wsconf': {
'conx-tpls': {
'default': {
'type': 'ws',
'baseurl': 'wss://api.bitfinex.com/ws/2',
'wait4readyEvent': 'statusok',
},
},
'methodmap': {
'_websocketTimeoutRemoveNonce': '_websocketTimeoutRemoveNonce',
},
'events': {
'ob': {
'conx-tpl': 'default',
'conx-param': {
'url': '{baseurl}',
'id': '{id}',
},
},
'trade': {
'conx-tpl': 'default',
'conx-param': {
'url': '{baseurl}',
'id': '{id}',
},
},
},
},
'options': {
'precision': 'R0', # P0, P1, P2, P3, P4, R0
'orderTypes': {
'MARKET': None,
'EXCHANGE MARKET': 'market',
'LIMIT': None,
'EXCHANGE LIMIT': 'limit',
'STOP': None,
'EXCHANGE STOP': 'stopOrLoss',
'TRAILING STOP': None,
'EXCHANGE TRAILING STOP': None,
'FOK': None,
'EXCHANGE FOK': 'limit FOK',
'STOP LIMIT': None,
'EXCHANGE STOP LIMIT': 'limit stop',
'IOC': None,
'EXCHANGE IOC': 'limit ioc',
},
'fiat': {
'USD': 'USD',
'EUR': 'EUR',
'JPY': 'JPY',
'GBP': 'GBP',
},
},
})
def is_fiat(self, code):
return(code in self.options['fiat'])
def get_currency_id(self, code):
return 'f' + code
def fetch_markets(self, params={}):
response = self.v1GetSymbolsDetails(params)
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'pair')
id = id.upper()
baseId = None
quoteId = None
if id.find(':') >= 0:
parts = id.split(':')
baseId = parts[0]
quoteId = parts[1]
else:
baseId = id[0:3]
quoteId = id[3:6]
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
id = 't' + id
baseId = self.get_currency_id(baseId)
quoteId = self.get_currency_id(quoteId)
precision = {
'price': self.safe_integer(market, 'price_precision'),
'amount': self.safe_integer(market, 'price_precision'),
}
limits = {
'amount': {
'min': self.safe_float(market, 'minimum_order_size'),
'max': self.safe_float(market, 'maximum_order_size'),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': math.pow(10, precision['price']),
},
}
limits['cost'] = {
'min': limits['amount']['min'] * limits['price']['min'],
'max': None,
}
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'precision': precision,
'limits': limits,
'info': market,
'swap': False,
'spot': False,
'futures': False,
})
return result
def fetch_balance(self, params={}):
# self api call does not return the 'used' amount - use the v1 version instead(which also returns zero balances)
self.load_markets()
response = self.privatePostAuthRWallets(params)
balanceType = self.safe_string(params, 'type', 'exchange')
result = {'info': response}
for b in range(0, len(response)):
balance = response[b]
accountType = balance[0]
currency = balance[1]
total = balance[2]
available = balance[4]
if accountType == balanceType:
if currency[0] == 't':
currency = currency[1:]
code = self.safe_currency_code(currency)
account = self.account()
# do not fill in zeroes and missing values in the parser
# rewrite and unify the following to use the unified parseBalance
account['total'] = total
if not available:
if available == 0:
account['free'] = 0
account['used'] = total
else:
account['free'] = total
else:
account['free'] = available
account['used'] = account['total'] - account['free']
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
precision = self.safe_value(self.options, 'precision', 'R0')
request = {
'symbol': self.market_id(symbol),
'precision': precision,
}
if limit is not None:
request['len'] = limit # 25 or 100
fullRequest = self.extend(request, params)
orderbook = self.publicGetBookSymbolPrecision(fullRequest)
timestamp = self.milliseconds()
result = {
'bids': [],
'asks': [],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'nonce': None,
}
priceIndex = 1 if (fullRequest['precision'] == 'R0') else 0
for i in range(0, len(orderbook)):
order = orderbook[i]
price = order[priceIndex]
amount = abs(order[2])
side = 'bids' if (order[2] > 0) else 'asks'
result[side].append([price, amount])
result['bids'] = self.sort_by(result['bids'], 0, True)
result['asks'] = self.sort_by(result['asks'], 0)
return result
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market is not None:
symbol = market['symbol']
length = len(ticker)
last = ticker[length - 4]
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': ticker[length - 2],
'low': ticker[length - 1],
'bid': ticker[length - 10],
'bidVolume': None,
'ask': ticker[length - 8],
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': ticker[length - 6],
'percentage': ticker[length - 5] * 100,
'average': None,
'baseVolume': ticker[length - 3],
'quoteVolume': None,
'info': ticker,
}
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
ids = self.market_ids(symbols)
request['symbols'] = ','.join(ids)
else:
request['symbols'] = 'ALL'
tickers = self.publicGetTickers(self.extend(request, params))
result = {}
for i in range(0, len(tickers)):
ticker = tickers[i]
id = ticker[0]
if id in self.markets_by_id:
market = self.markets_by_id[id]
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
return result
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
ticker = self.publicGetTickerSymbol(self.extend(request, params))
return self.parse_ticker(ticker, market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# [
# ID,
# MTS, # timestamp
# AMOUNT,
# PRICE
# ]
#
# fetchMyTrades(private)
#
# [
# ID,
# PAIR,
# MTS_CREATE,
# ORDER_ID,
# EXEC_AMOUNT,
# EXEC_PRICE,
# ORDER_TYPE,
# ORDER_PRICE,
# MAKER,
# FEE,
# FEE_CURRENCY,
# ...
# ]
#
tradeLength = len(trade)
isPrivate = (tradeLength > 5)
id = str(trade[0])
amountIndex = 4 if isPrivate else 2
amount = trade[amountIndex]
cost = None
priceIndex = 5 if isPrivate else 3
price = trade[priceIndex]
side = None
orderId = None
takerOrMaker = None
type = None
fee = None
symbol = None
timestampIndex = 2 if isPrivate else 1
timestamp = trade[timestampIndex]
if isPrivate:
marketId = trade[1]
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
orderId = str(trade[3])
takerOrMaker = 'maker' if (trade[8] == 1) else 'taker'
feeCost = trade[9]
feeCurrency = self.safe_currency_code(trade[10])
if feeCost is not None:
fee = {
'cost': abs(feeCost),
'currency': feeCurrency,
}
orderType = trade[6]
type = self.safe_string(self.options['orderTypes'], orderType)
if symbol is None:
if market is not None:
symbol = market['symbol']
if amount is not None:
side = 'sell' if (amount < 0) else 'buy'
amount = abs(amount)
if cost is None:
if price is not None:
cost = amount * price
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'side': side,
'type': type,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
sort = '-1'
request = {
'symbol': market['id'],
}
if since is not None:
request['start'] = since
sort = '1'
if limit is not None:
request['limit'] = limit # default 120, max 5000
request['sort'] = sort
response = self.publicGetTradesSymbolHist(self.extend(request, params))
#
# [
# [
# ID,
# MTS, # timestamp
# AMOUNT,
# PRICE
# ]
# ]
#
trades = self.sort_by(response, 1)
return self.parse_trades(trades, market, None, limit)
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=100, params={}):
self.load_markets()
market = self.market(symbol)
if limit is None:
limit = 100 # default 100, max 5000
if since is None:
since = self.milliseconds() - self.parse_timeframe(timeframe) * limit * 1000
request = {
'symbol': market['id'],
'timeframe': self.timeframes[timeframe],
'sort': 1,
'start': since,
'limit': limit,
}
response = self.publicGetCandlesTradeTimeframeSymbolHist(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
raise NotSupported(self.id + ' createOrder not implemented yet')
def cancel_order(self, id, symbol=None, params={}):
raise NotSupported(self.id + ' cancelOrder not implemented yet')
def fetch_order(self, id, symbol=None, params={}):
raise NotSupported(self.id + ' fetchOrder not implemented yet')
def fetch_deposit_address(self, currency, params={}):
raise NotSupported(self.id + ' fetchDepositAddress() not implemented yet.')
def withdraw(self, code, amount, address, tag=None, params={}):
raise NotSupported(self.id + ' withdraw not implemented yet')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {
'end': self.milliseconds(),
}
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 25, max 1000
method = 'privatePostAuthRTradesHist'
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
method = 'privatePostAuthRTradesSymbolHist'
response = getattr(self, method)(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'v1':
request = api + request
else:
request = self.version + request
url = self.urls['api'][api] + '/' + request
if api == 'public':
if query:
url += '?' + self.urlencode(query)
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
body = self.json(query)
auth = '/api/' + request + nonce + body
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha384)
headers = {
'bfx-nonce': nonce,
'bfx-apikey': self.apiKey,
'bfx-signature': signature,
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if response:
if 'message' in response:
if response['message'].find('not enough exchange balance') >= 0:
raise InsufficientFunds(self.id + ' ' + self.json(response))
raise ExchangeError(self.id + ' ' + self.json(response))
return response
elif response == '':
raise ExchangeError(self.id + ' returned empty response')
return response
def _websocket_on_message(self, contextId, data):
msg = json.loads(data)
# console.log(msg)
event = self.safe_string(msg, 'event')
if event is not None:
if event == 'subscribed':
channel = self.safe_string(msg, 'channel')
if channel == 'book':
self._websocket_handle_subscription(contextId, 'ob', msg)
elif channel == 'trades':
self._websocket_handle_subscription(contextId, 'trade', msg)
elif event == 'unsubscribed':
self._websocket_handle_unsubscription(contextId, msg)
elif event == 'error':
self._websocket_handle_error(contextId, msg)
elif event == 'info':
self._websocket_handle_info_version(contextId, msg)
else:
# channel data
chanId = msg[0]
data = msg[1]
if data == 'hb':
# print('heartbeat')
return
chanKey = '_' + str(chanId)
channels = self._contextGet(contextId, 'channels')
if not (chanKey in channels):
self.emit('err', new ExchangeError(self.id + ' msg received from unregistered channels:' + chanId), contextId)
return
symbol = channels[chanKey]['symbol']
event = channels[chanKey]['event']
if event == 'ob':
self._websocket_handle_order_book(contextId, symbol, msg)
elif event == 'trade':
self._websocket_handle_trade(contextId, symbol, msg)
def _websocket_handle_info_version(self, contextId, data):
version = self.safe_integer(data, 'version')
if version is not None:
self.websocketSendJson({
'event': 'conf',
'flags': 32768,
})
self.emit('statusok', True)
def _websocket_handle_error(self, contextId, msg):
channel = self.safe_string(msg, 'channel')
errorMsg = self.safe_string(msg, 'msg')
errorCode = self.safe_string(msg, 'code')
ex = ExchangeError(self.id + ' ' + errorCode + ':' + errorMsg)
if channel == 'book':
id = self.safe_string(msg, 'symbol')
symbol = self._websocketFindSymbol(id)
self._websocket_process_pending_nonces(contextId, 'sub-nonces', 'ob', symbol, False, ex)
elif channel == 'trades':
id = self.safe_string(msg, 'symbol')
symbol = self._websocketFindSymbol(id)
self._websocket_process_pending_nonces(contextId, 'sub-nonces', 'trade', symbol, False, ex)
self.emit('err', ex, contextId)
def _websocket_handle_trade(self, contextId, symbol, msg):
market = self.market(symbol)
trades = None
# From http://blog.bitfinex.com/api/websocket-api-update:
# "We are splitting the public trade messages into two: a “te” message which mimics the current behavior, and a “tu” message which will be delayed by 1-2 seconds and include the tradeId. If the tradeId is important to you, use the “tu” message. If speed is important to you, listen to the “te” message. Or of course use both if you’d like."
if msg[1] == 'te':
# te update
trades = [msg[2]]
elif msg[1] == 'tu':
# tu update, ignore
return
else:
# snapshot
trades = msg[1]
trades = self.parse_trades(trades, market)
for i in range(0, len(trades)):
self.emit('trade', symbol, trades[i])
def _websocket_handle_order_book(self, contextId, symbol, msg):
data = msg[1]
firstElement = data[0]
timestamp = None
dt = None
length = len(msg)
if length > 2:
timestamp = msg[2]
dt = self.iso8601(timestamp)
symbolData = self._contextGetSymbolData(contextId, 'ob', symbol)
if isinstance(firstElement, list):
# snapshot
symbolData['ob'] = {
'bids': [],
'asks': [],
'timestamp': timestamp,
'datetime': dt,
'nonce': None,
}
for i in range(0, len(data)):
record = data[i]
price = record[0]
c = record[1]
amount = record[2]
side = None
isBid = None
if amount > 0:
side = 'bids'
isBid = True
else:
side = 'asks'
isBid = False
amount = -amount
if c == 0:
# remove
self.updateBidAsk([price, 0], symbolData['ob'][side], isBid)
else:
# update
self.updateBidAsk([price, amount], symbolData['ob'][side], isBid)
else:
# update
price = data[0]
c = data[1]
amount = data[2]
side = None
isBid = None
if amount > 0:
side = 'bids'
isBid = True
else:
side = 'asks'
isBid = False
amount = -amount
if c == 0:
# remove
self.updateBidAsk([price, 0], symbolData['ob'][side], isBid)
else:
# update
self.updateBidAsk([price, amount], symbolData['ob'][side], isBid)
symbolData['ob']['timestamp'] = timestamp
symbolData['ob']['datetime'] = dt
self.emit('ob', symbol, self._cloneOrderBook(symbolData['ob'], symbolData['limit']))
self._contextSetSymbolData(contextId, 'ob', symbol, symbolData)
def _websocket_process_pending_nonces(self, contextId, nonceKey, event, symbol, success, ex):
symbolData = self._contextGetSymbolData(contextId, event, symbol)
if nonceKey in symbolData:
nonces = symbolData[nonceKey]
keys = list(nonces.keys())
for i in range(0, len(keys)):
nonce = keys[i]
self._cancelTimeout(nonces[nonce])
self.emit(nonce, success, ex)
symbolData[nonceKey] = {}
self._contextSetSymbolData(contextId, event, symbol, symbolData)
def _websocket_handle_subscription(self, contextId, event, msg):
id = self.safe_string(msg, 'symbol')
symbol = self._websocketFindSymbol(id)
channel = self.safe_integer(msg, 'chanId')
chanKey = '_' + str(channel)
channels = self._contextGet(contextId, 'channels')
if channels is None:
channels = {}
channels[chanKey] = {
'response': msg,
'symbol': symbol,
'event': event,
}
self._contextSet(contextId, 'channels', channels)
symbolData = self._contextGetSymbolData(contextId, event, symbol)
symbolData['channelId'] = channel
self._contextSetSymbolData(contextId, event, symbol, symbolData)
if event == 'ob':
self._websocket_process_pending_nonces(contextId, 'sub-nonces', 'ob', symbol, True, None)
elif event == 'trade':
self._websocket_process_pending_nonces(contextId, 'sub-nonces', 'trade', symbol, True, None)
def _websocket_handle_unsubscription(self, contextId, msg):
status = self.safe_string(msg, 'status')
if status == 'OK':
chanId = self.safe_integer(msg, 'chanId')
chanKey = '_' + str(chanId)
channels = self._contextGet(contextId, 'channels')
if not (chanKey in channels):
self.emit('err', new ExchangeError(self.id + ' msg received from unregistered channels:' + chanId), contextId)
return
symbol = channels[chanKey]['symbol']
event = channels[chanKey]['event']
# remove channel ids ?
self.omit(channels, chanKey)
self._contextSet(contextId, 'channels', channels)
self._websocket_process_pending_nonces(contextId, 'unsub-nonces', event, symbol, True, None)
def _websocket_subscribe(self, contextId, event, symbol, nonce, params={}):
if event != 'ob' and event != 'trade':
raise NotSupported('subscribe ' + event + '(' + symbol + ') not supported for exchange ' + self.id)
# save nonce for subscription response
symbolData = self._contextGetSymbolData(contextId, event, symbol)
if not ('sub-nonces' in symbolData):
symbolData['sub-nonces'] = {}
symbolData['limit'] = self.safe_integer(params, 'limit', None)
nonceStr = str(nonce)
handle = self._setTimeout(contextId, self.timeout, self._websocketMethodMap('_websocketTimeoutRemoveNonce'), [contextId, nonceStr, event, symbol, 'sub-nonce'])
symbolData['sub-nonces'][nonceStr] = handle
self._contextSetSymbolData(contextId, event, symbol, symbolData)
# send request
id = self.market_id(symbol)
if event == 'ob':
self.websocketSendJson({
'event': 'subscribe',
'channel': 'book',
'symbol': id,
'prec': 'P0',
'freq': 'F0',
'len': '100',
})
elif event == 'trade':
self.websocketSendJson({
'event': 'subscribe',
'channel': 'trades',
'symbol': id,
})
def _websocket_unsubscribe(self, contextId, event, symbol, nonce, params={}):
if event != 'ob' and event != 'trade':
raise NotSupported('unsubscribe ' + event + '(' + symbol + ') not supported for exchange ' + self.id)
symbolData = self._contextGetSymbolData(contextId, event, symbol)
payload = {
'event': 'unsubscribe',
'chanId': symbolData['channelId'],
}
if not ('unsub-nonces' in symbolData):
symbolData['unsub-nonces'] = {}
nonceStr = str(nonce)
handle = self._setTimeout(contextId, self.timeout, self._websocketMethodMap('_websocketTimeoutRemoveNonce'), [contextId, nonceStr, event, symbol, 'unsub-nonces'])
symbolData['unsub-nonces'][nonceStr] = handle
self._contextSetSymbolData(contextId, event, symbol, symbolData)
self.websocketSendJson(payload)
def _websocket_timeout_remove_nonce(self, contextId, timerNonce, event, symbol, key):
symbolData = self._contextGetSymbolData(contextId, event, symbol)
if key in symbolData:
nonces = symbolData[key]
if timerNonce in nonces:
self.omit(symbolData[key], timerNonce)
self._contextSetSymbolData(contextId, event, symbol, symbolData)
def _get_current_websocket_orderbook(self, contextId, symbol, limit):
data = self._contextGetSymbolData(contextId, 'ob', symbol)
if ('ob' in data) and (data['ob'] is not None):
return self._cloneOrderBook(data['ob'], limit)
return None
|
/*
A CkCallback is a simple way for a library to return data
to a wide variety of user code, without the library having
to handle all 17 possible cases.
This object is implemented as a union, so the entire object
can be sent as bytes. Another option would be to use a virtual
"send" method.
Initial version by Orion Sky Lawlor, olawlor@acm.org, 2/8/2002
*/
#ifndef _CKCALLBACK_H_
#define _CKCALLBACK_H_
#include "cksection.h"
#include "conv-ccs.h" /*for CcsDelayedReply struct*/
#include "charm.h"
#include "ckarrayindex.h"
typedef void (*CkCallbackFn)(void *param,void *message);
typedef void (*Ck1CallbackFn)(void *message);
class CProxyElement_ArrayBase; /*forward declaration*/
class CProxySection_ArrayBase;/*forward declaration*/
class CProxyElement_Group; /*forward declaration*/
class CProxy_NodeGroup;
class Chare;
class Group;
class NodeGroup;
class ArrayElement;
#define CkSelfCallback(ep) CkCallback(this, ep)
class CkCallback {
public:
enum callbackType {
invalid=0, //Invalid callback
ignore, //Do nothing
ckExit, //Call ckExit
resumeThread, //Resume a waiting thread (d.thread)
callCFn, //Call a C function pointer with a parameter (d.cfn)
call1Fn, //Call a C function pointer on any processor (d.c1fn)
sendChare, //Send to a chare (d.chare)
sendGroup, //Send to a group (d.group)
sendNodeGroup, //Send to a nodegroup (d.group)
sendArray, //Send to an array (d.array)
isendChare, //Inlined send to a chare (d.chare)
isendGroup, //Inlined send to a group (d.group)
isendNodeGroup, //Inlined send to a nodegroup (d.group)
isendArray, //Inlined send to an array (d.array)
bcastGroup, //Broadcast to a group (d.group)
bcastNodeGroup, //Broadcast to a nodegroup (d.group)
bcastArray, //Broadcast to an array (d.array)
bcastSection,//Broadcast to a section(d.section)
replyCCS // Reply to a CCS message (d.ccsReply)
};
private:
union callbackData {
struct s_thread { //resumeThread
int onPE; //Thread is waiting on this PE
int cb; //The suspending callback (0 if already done)
CthThread th; //Thread to resume (NULL if none waiting)
void *ret; //Place to put the returned message
} thread;
struct s_cfn { //callCFn
int onPE; //Call on this PE
CkCallbackFn fn; //Function to call
void *param; //User parameter
} cfn;
struct s_c1fn { //call1Fn
Ck1CallbackFn fn; //Function to call on whatever processor
} c1fn;
struct s_chare { //sendChare
int ep; //Entry point to call
CkChareID id; //Chare to call it on
CMK_REFNUM_TYPE refnum; // Reference number to set on the message
bool hasRefnum;
} chare;
struct s_group { //(sendGroup, bcastGroup)
int ep; //Entry point to call
CkGroupID id; //Group to call it on
int onPE; //Processor to send to (if any)
CMK_REFNUM_TYPE refnum; // Reference number to set on the message
bool hasRefnum;
} group;
struct s_array { //(sendArray, bcastArray)
int ep; //Entry point to call
CkGroupID id; //Array ID to call it on
CkArrayIndexBase idx; //Index to send to (if any)
CMK_REFNUM_TYPE refnum; // Reference number to set on the message
bool hasRefnum;
} array;
struct s_section{
CkSectionInfoStruct sinfo;
CkArrayIndex *_elems;
int _nElems;
int *pelist;
int npes;
int ep;
CMK_REFNUM_TYPE refnum; // Reference number to set on the message
bool hasRefnum;
} section;
struct s_ccsReply {
CcsDelayedReply reply;
} ccsReply;
//callbackData(){memset(this,0,sizeof(callbackData));}
//callbackData()=default;
//constructor()=default;
};
public:
callbackType type;
callbackData d;
bool operator==(CkCallback & other){
if(type != other.type)
return false;
switch (type) {
case resumeThread:
return (d.thread.onPE == other.d.thread.onPE &&
d.thread.cb == other.d.thread.cb);
case isendChare:
case sendChare:
return (d.chare.ep == other.d.chare.ep &&
d.chare.id.onPE == other.d.chare.id.onPE &&
d.chare.hasRefnum == other.d.chare.hasRefnum &&
d.chare.refnum == other.d.chare.refnum);
case isendGroup:
case sendGroup:
case isendNodeGroup:
case sendNodeGroup:
return (d.group.ep == other.d.group.ep &&
d.group.id == other.d.group.id &&
d.group.onPE == other.d.group.onPE &&
d.group.hasRefnum == other.d.group.hasRefnum &&
d.group.refnum == other.d.group.refnum);
case bcastNodeGroup:
case bcastGroup:
return (d.group.ep == other.d.group.ep &&
d.group.id == other.d.group.id &&
d.group.hasRefnum == other.d.group.hasRefnum &&
d.group.refnum == other.d.group.refnum);
case isendArray:
case sendArray:
return (d.array.ep == other.d.array.ep &&
d.array.id == other.d.array.id &&
d.array.idx == other.d.array.idx &&
d.array.hasRefnum == other.d.array.hasRefnum &&
d.array.refnum == other.d.array.refnum);
case bcastArray:
return (d.array.ep == other.d.array.ep &&
d.array.id == other.d.array.id &&
d.array.hasRefnum == other.d.array.hasRefnum &&
d.array.refnum == other.d.array.refnum);
case replyCCS:
return true;
case call1Fn:
return (d.c1fn.fn == other.d.c1fn.fn);
case callCFn:
return (d.cfn.fn == other.d.cfn.fn &&
d.cfn.onPE == other.d.cfn.onPE &&
d.cfn.param == other.d.cfn.param);
case ignore:
case ckExit:
case invalid:
case bcastSection:
return true;
default:
CkAbort("Inconsistent CkCallback type");
return false;
}
}
void impl_thread_init(void);
void *impl_thread_delay(void) const;
CkCallback(void) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=invalid;
}
//This is how you create ignore, ckExit, and resumeThreads:
CkCallback(callbackType t) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
if (t==resumeThread) impl_thread_init();
type=t;
}
// Call a C function on the current PE
CkCallback(Ck1CallbackFn fn) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=call1Fn;
d.c1fn.fn=fn;
}
// Call a C function on the current PE
CkCallback(CkCallbackFn fn,void *param) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=callCFn;
d.cfn.onPE=CkMyPe(); d.cfn.fn=fn; d.cfn.param=param;
}
// Call a chare entry method
CkCallback(int ep,const CkChareID &id,bool doInline=false) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=doInline?isendChare:sendChare;
d.chare.ep=ep; d.chare.id=id;
d.chare.hasRefnum = false;
d.chare.refnum = 0;
}
// Bcast to nodegroup
CkCallback(int ep,const CProxy_NodeGroup &ngp);
// Bcast to a group or nodegroup
CkCallback(int ep,const CkGroupID &id, int isNodeGroup=0) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=isNodeGroup?bcastNodeGroup:bcastGroup;
d.group.ep=ep; d.group.id=id;
d.group.hasRefnum = false;
d.group.refnum = 0;
}
// Send to nodegroup element
CkCallback(int ep,int onPE,const CProxy_NodeGroup &ngp,bool doInline=false);
// Send to group/nodegroup element
CkCallback(int ep,int onPE,const CkGroupID &id,bool doInline=false, int isNodeGroup=0) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=doInline?(isNodeGroup?isendNodeGroup:isendGroup):(isNodeGroup?sendNodeGroup:sendGroup);
d.group.ep=ep; d.group.id=id; d.group.onPE=onPE;
d.group.hasRefnum = false;
d.group.refnum = 0;
}
// Send to specified group element
CkCallback(int ep,const CProxyElement_Group &grpElt,bool doInline=false);
// Bcast to array
CkCallback(int ep,const CkArrayID &id) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=bcastArray;
d.array.ep=ep; d.array.id=id;
d.array.hasRefnum = false;
d.array.refnum = 0;
}
// Send to array element
CkCallback(int ep,const CkArrayIndex &idx,const CkArrayID &id,bool doInline=false) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=doInline?isendArray:sendArray;
d.array.ep=ep; d.array.id=id; d.array.idx = idx;
d.array.hasRefnum = false;
d.array.refnum = 0;
}
// Bcast to array
CkCallback(int ep,const CProxyElement_ArrayBase &arrElt,bool doInline=false);
//Bcast to section
CkCallback(int ep,CProxySection_ArrayBase §Elt,bool doInline=false);
CkCallback(int ep, CkSectionID &sid);
// Send to chare
CkCallback(Chare *p, int ep, bool doInline=false);
// Send to group element on current PE
CkCallback(Group *p, int ep, bool doInline=false);
// Send to nodegroup element on current node
CkCallback(NodeGroup *p, int ep, bool doInline=false);
// Send to specified array element
CkCallback(ArrayElement *p, int ep,bool doInline=false);
CkCallback(const CcsDelayedReply &reply) {
#if CMK_REPLAYSYSTEM
memset(this, 0, sizeof(CkCallback));
#endif
type=replyCCS;
d.ccsReply.reply=reply;
}
~CkCallback() {
thread_destroy();
if (bcastSection == type) {
if (d.section._elems != NULL) delete [] d.section._elems;
if (d.section.pelist != NULL) delete [] d.section.pelist;
}
}
int isInvalid(void) const {return type==invalid;}
/// Does this callback point at something that may not be at the same
/// address after a checkpoint/restart cycle?
bool containsPointer() const;
/**
* Interface used by threaded callbacks:
* Libraries should call these from their "start" entry points.
* Use "return cb.thread_delay()" to suspend the thread before
* the return.
* It's a no-op for everything but threads.
*/
void *thread_delay(void) const {
if (type==resumeThread) return impl_thread_delay();
return NULL;
}
void thread_destroy() const;
/**
* Send this message back to the caller.
*
* Libraries should call this from their "done" entry points.
* It takes the given message and handles it appropriately.
* After the send(), this callback is finished and cannot be reused.
*/
void send(void *msg=NULL) const;
/**
* Send this data, formatted as a CkDataMsg, back to the caller.
*/
void send(int length,const void *data) const;
void pup(PUP::er &p);
void setRefnum(CMK_REFNUM_TYPE refnum) {
switch(type) {
case sendChare:
case isendChare:
d.chare.hasRefnum = true;
d.chare.refnum = refnum;
break;
case sendGroup:
case sendNodeGroup:
case isendGroup:
case isendNodeGroup:
case bcastGroup:
case bcastNodeGroup:
d.group.hasRefnum = true;
d.group.refnum = refnum;
break;
case sendArray:
case isendArray:
case bcastArray:
d.array.hasRefnum = true;
d.array.refnum = refnum;
break;
case bcastSection:
d.section.hasRefnum = true;
d.section.refnum = refnum;
break;
default:
CkAbort("Tried to set a refnum on a callback not directed at an entry method");
}
}
};
//PUPbytes(CkCallback) //FIXME: write a real pup routine
/**
* Convenience class: a thread-suspending callback.
* Makes sure the thread actually gets delayed, even if the
* library can't or won't call "thread_delay".
* The return value is lost, so your library needs to call
* thread_delay itself if you want a return value.
* Modification Filippo: Passing in an pointer argument, the return
* value will be stored in that pointer
*/
class CkCallbackResumeThread : public CkCallback {
protected: void ** result;
public:
CkCallbackResumeThread(void)
:CkCallback(resumeThread) { result = NULL; }
CkCallbackResumeThread(void * &ptr)
:CkCallback(resumeThread) { result = &ptr; }
~CkCallbackResumeThread(void);
};
void _registerCkCallback(void); //used by init
void CkCallbackInit();
#endif
|
/*
* ESPRESSIF MIT License
*
* Copyright (c) 2018 <ESPRESSIF SYSTEMS (SHANGHAI) PTE LTD>
*
* Permission is hereby granted for use on all ESPRESSIF SYSTEMS products, in which case,
* it is free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished
* to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
#ifndef _BAIDU_ACCESS_TOKEN
#define _BAIDU_ACCESS_TOKEN
#ifdef __cplusplus
extern "C" {
#endif
/**
* @brief Get baidu access token
*
* @param[in] access_key The access key
* @param[in] access_secret The access secret
*
* @return Access token response from baidu, need to freed after used
*/
char *baidu_get_access_token(const char *access_key, const char *access_secret);
#ifdef __cplusplus
}
#endif
#endif
|
/**
* @fileoverview Flag all the variables that were declared but never used
* @author Raghav Dua <duaraghav8@gmail.com>
*/
'use strict';
module.exports = {
meta: {
docs: {
recommended: true,
type: 'error',
description: 'Flag all the variables that were declared but never used'
},
schema: []
},
create: function (context) {
var allVariableDeclarations = {};
//collect all variable declarations from VariableDeclarators and DeclarativeExpressions
function inspectVariableDeclarator (emitted) {
var node = emitted.node;
if (!emitted.exit) {
allVariableDeclarations [node.id.name] = node;
}
}
function inspectDeclarativeExpression (emitted) {
var node = emitted.node;
//do not examine if the declaration is part of a Struct definition
if (!emitted.exit && node.parent.type !== 'StructDeclaration') {
allVariableDeclarations [node.name] = node;
}
}
//While exiting Progam Node, all the vars that haven't been used still exist inside VariableDeclarations. Report them
function inspectProgram (emitted) {
if (emitted.exit) {
Object.keys (allVariableDeclarations).forEach (function (name) {
context.report ({
node: allVariableDeclarations [name],
message: 'Variable \'' + name + '\' is declared but never used.'
});
});
}
}
//As soon as the first use of a variable is encountered, delete that variable's node from allVariableDeclarations
function inspectIdentifier (emitted) {
if (!emitted.exit) {
var node = emitted.node,
sourceCode = context.getSourceCode ();
if (
allVariableDeclarations [node.name] &&
sourceCode.getParent (node).type !== 'VariableDeclarator'
) {
delete allVariableDeclarations [node.name];
}
}
}
return {
Identifier: inspectIdentifier,
Program: inspectProgram,
DeclarativeExpression: inspectDeclarativeExpression,
VariableDeclarator: inspectVariableDeclarator
};
}
};
|
#!/usr/bin/python
# Copyright (c) 2010-2013, Regents of the University of California.
# All rights reserved.
#
# Released under the BSD 3-Clause license as published at the link below.
# https://openwsn.atlassian.net/wiki/display/OW/License
import logging
import random
import threading
from math import radians, cos, sin, asin, sqrt, log10
from openvisualizer.eventbus.eventbusclient import EventBusClient
class Propagation(EventBusClient):
""" The propagation model of the engine. """
SIGNAL_WIRELESSTXSTART = 'wirelessTxStart'
SIGNAL_WIRELESSTXEND = 'wirelessTxEnd'
FREQUENCY_GHz = 2.4
TX_POWER_dBm = 0.0
PISTER_HACK_LOSS = 40.0
SENSITIVITY_dBm = -101.0
GREY_AREA_dB = 15.0
def __init__(self, sim_topology):
# store params
from openvisualizer.simengine import simengine
self.engine = simengine.SimEngine()
self.sim_topology = sim_topology
# local variables
self.data_lock = threading.Lock()
self.connections = {}
self.pending_tx_end = []
# logging
self.log = logging.getLogger('Propagation')
self.log.setLevel(logging.DEBUG)
self.log.addHandler(logging.NullHandler())
# initialize parents class
super(Propagation, self).__init__(
name='Propagation',
registrations=[
{
'sender': self.WILDCARD,
'signal': self.SIGNAL_WIRELESSTXSTART,
'callback': self._indicate_tx_start,
},
{
'sender': self.WILDCARD,
'signal': self.SIGNAL_WIRELESSTXEND,
'callback': self._indicate_tx_end,
},
],
)
# ======================== public ==========================================
def create_connection(self, from_mote, to_mote):
with self.data_lock:
if not self.sim_topology:
# ===== Pister-hack model
# retrieve position
mh_from = self.engine.get_mote_handler_by_id(from_mote)
(lat_from, lon_from) = mh_from.get_location()
mh_to = self.engine.get_mote_handler_by_id(to_mote)
(lat_to, lon_to) = mh_to.get_location()
# compute distance
lon_from, lat_from, lon_to, lat_to = map(radians, [lon_from, lat_from, lon_to, lat_to])
d_lon = lon_to - lon_from
d_lat = lat_to - lat_from
a = sin(d_lat / 2) ** 2 + cos(lat_from) * cos(lat_to) * sin(d_lon / 2) ** 2
c = 2 * asin(sqrt(a))
d_km = 6367 * c
# compute reception power (first Friis, then apply Pister-hack)
p_rx = self.TX_POWER_dBm - (20 * log10(d_km) + 20 * log10(self.FREQUENCY_GHz) + 92.45)
p_rx -= self.PISTER_HACK_LOSS * random.random()
# turn into PDR
if p_rx < self.SENSITIVITY_dBm:
pdr = 0.0
elif p_rx > self.SENSITIVITY_dBm + self.GREY_AREA_dB:
pdr = 1.0
else:
pdr = (p_rx - self.SENSITIVITY_dBm) / self.GREY_AREA_dB
elif self.sim_topology == 'linear':
# linear network
if from_mote == to_mote + 1:
pdr = 1.0
else:
pdr = 0.0
elif self.sim_topology == 'fully-meshed':
pdr = 1.0
else:
raise NotImplementedError('unsupported sim_topology={0}'.format(self.sim_topology))
# ==== create, update or delete connection
if pdr:
if from_mote not in self.connections:
self.connections[from_mote] = {}
self.connections[from_mote][to_mote] = pdr
if to_mote not in self.connections:
self.connections[to_mote] = {}
self.connections[to_mote][from_mote] = pdr
else:
self.delete_connection(to_mote, from_mote)
def retrieve_connections(self):
retrieved_connections = []
return_val = []
with self.data_lock:
for from_mote in self.connections:
for to_mote in self.connections[from_mote]:
if (to_mote, from_mote) not in retrieved_connections:
return_val += [
{
'fromMote': from_mote,
'toMote': to_mote,
'pdr': self.connections[from_mote][to_mote],
},
]
retrieved_connections += [(from_mote, to_mote)]
return return_val
def update_connection(self, from_mote, to_mote, pdr):
with self.data_lock:
self.connections[from_mote][to_mote] = pdr
self.connections[to_mote][from_mote] = pdr
def delete_connection(self, from_mote, to_mote):
with self.data_lock:
try:
del self.connections[from_mote][to_mote]
if not self.connections[from_mote]:
del self.connections[from_mote]
del self.connections[to_mote][from_mote]
if not self.connections[to_mote]:
del self.connections[to_mote]
except KeyError:
pass # did not exist
# ======================== indication from eventBus ========================
def _indicate_tx_start(self, sender, signal, data):
(from_mote, packet, channel) = data
if from_mote in self.connections:
for (to_mote, pdr) in self.connections[from_mote].items():
if random.random() <= pdr:
# indicate start of transmission
mh = self.engine.get_mote_handler_by_id(to_mote)
mh.bsp_radio.indicate_tx_start(from_mote, packet, channel)
# remember to signal end of transmission
self.pending_tx_end += [(from_mote, to_mote)]
def _indicate_tx_end(self, sender, signal, data):
from_mote = data
if from_mote in self.connections:
for (to_mote, pdr) in self.connections[from_mote].items():
try:
self.pending_tx_end.remove((from_mote, to_mote))
except ValueError:
pass
else:
mh = self.engine.get_mote_handler_by_id(to_mote)
mh.bsp_radio.indicate_tx_end(from_mote)
# ======================== private =========================================
# ======================== helpers =========================================
|
# ckwg +29
# Copyright 2019 by Kitware, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither name of Kitware, Inc. nor the names of any contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
from vital.algo import ImageObjectDetector
from vital.types import DetectedObjectSet, DetectedObject, BoundingBox
class SimpleImageObjectDetector(ImageObjectDetector):
"""
Implementation of ImageObjectDetector that creates a bounding box on the
coordinates specified by the user using configuration
Examples:
With default value of center(200, 200) and bbox dimension (100, 200)
>>> from vital.modules import modules
>>> modules.load_known_modules()
>>> from vital.algo import ImageObjectDetector
>>> image_detector = ImageObjectDetector.create("SimpleImageObjectDetector")
>>> from vital.types import Image, ImageContainer
>>> image = ImageContainer(Image())
>>> detection = image_detector.detect(image)
>>> print(str(detection[0].bounding_box()))
<BoundingBox(150.0, 100.0, 250.0, 300.0)>
With configuration that changes the center value
>>> modules.load_known_modules()
>>> from vital.algo import ImageObjectDetector
>>> image_detector = ImageObjectDetector.create("SimpleImageObjectDetector")
>>> from vital.types import Image, ImageContainer
>>> image = ImageContainer(Image())
>>> from sprokit.pipeline import config
>>> tc = config.empty_config()
>>> tc.set_value("center_x", "200")
>>> tc.set_value("center_y", "100")
>>> image_detector.check_configuration(tc)
False
>>> image_detector.set_configuration(tc)
>>> detection = image_detector.detect(image)
>>> print(detection[0].bounding_box())
<BoundingBox(150.0, 0.0, 250.0, 200.0)>
Using pipeline_runner from build/install directory
$ pipeline_runner -p examples/pipelines/example_pydetector_on_image.pipe
"""
def __init__(self):
ImageObjectDetector.__init__(self)
self.m_center_x = 200.0
self.m_center_y = 200.0
self.m_height = 200.0
self.m_width = 100.0
self.m_dx = 0
self.m_dy = 0
self.frame_ct = 0
def get_configuration(self):
# Inherit from the base class
cfg = super(ImageObjectDetector, self).get_configuration()
cfg.set_value( "center_x", str(self.m_center_x) )
cfg.set_value( "center_y", str(self.m_center_y) )
cfg.set_value( "height", str(self.m_height) )
cfg.set_value( "width", str(self.m_width) )
cfg.set_value( "dx", str(self.m_dx) )
cfg.set_value( "dy", str(self.m_dy) )
return cfg
def set_configuration( self, cfg_in ):
cfg = self.get_configuration()
cfg.merge_config(cfg_in)
self.m_center_x = float(cfg.get_value( "center_x" ))
self.m_center_y = float(cfg.get_value( "center_y" ))
self.m_height = float(cfg.get_value( "height" ))
self.m_width = float(cfg.get_value( "width" ))
self.m_dx = int(float(cfg.get_value( "dx" )))
self.m_dy = int(float(cfg.get_value( "dy" )))
def check_configuration( self, cfg):
if cfg.has_value("center_x") and not float(cfg.get_value( "center_x" ))==self.m_center_x:
return False
if cfg.has_value("center_y") and not float(cfg.get_value( "center_y" ))==self.m_center_y:
return False
if cfg.has_value("height") and not float(cfg.get_value( "height" ))==self.m_height:
return False
if cfg.has_value("width") and not float(cfg.get_value( "width" ))==self.m_width:
return False
if cfg.has_value("dx") and not float(cfg.get_value( "dx" ))==self.m_dx:
return False
if cfg.has_value("dy") and not float(cfg.get_value( "dy" ))==self.m_dy:
return False
return True
def detect(self, image_data):
dot = DetectedObjectSet([DetectedObject(
BoundingBox(self.m_center_x + self.frame_ct*self.m_dx - self.m_width/2.0,
self.m_center_y + self.frame_ct*self.m_dy - self.m_height/2.0,
self.m_center_x + self.frame_ct*self.m_dx + self.m_width/2.0,
self.m_center_y + self.frame_ct*self.m_dy + self.m_height/2.0))])
self.frame_ct+=1
return dot
def __vital_algorithm_register__():
from vital.algo import algorithm_factory
# Register Algorithm
implementation_name = "SimpleImageObjectDetector"
if algorithm_factory.has_algorithm_impl_name(
SimpleImageObjectDetector.static_type_name(),
implementation_name):
return
algorithm_factory.add_algorithm( implementation_name,
"test image object detector arrow",
SimpleImageObjectDetector )
algorithm_factory.mark_algorithm_as_loaded( implementation_name )
|
import { useRef } from 'react'
import Head from 'next/head';
import { faBalanceScale } from '@fortawesome/pro-light-svg-icons';
import DocPage from '../../components/DocPage'
import DocSection from '../../components/DocSection'
import Code from '../../components/Code'
var low = require('lowlight')
var tree = low.highlight('js', '"use strict";').value
// import stn from '../../images/SinkTargetNames.png'
export default function License() {
const toc =
[
{ id: "logary-licencing", title: "Logary licencing", ref: useRef(null) } ,
{ id: "terms-of-service", title: "Terms of Service (ToS)", ref: useRef(null) } ,
{ id: "commercial-license", title: "Commercial License", ref: useRef(null) }
]
return (
<DocPage name="all-target" title="License" faIcon={faBalanceScale} colour="orange" readingMinutes={1} toc={toc}>
<Head>
<title key="title">License</title>
</Head>
<DocSection {...toc[0]}>
<h2>Logary licencing</h2>
<p>This section defines the licenses for each subcomponent of Logary.</p>
<h3>Logary Library (Logary-lib)</h3>
<p>Present at src/Logary[,Tests,PerfTests] and src/adapters/*. Each being a .Net library that you link your own code/software to.</p>
<p>Non-profits and non-profit projects are free to use without payment, subject to the Terms of Service defined below.</p>
<p>For-profit or commercial entities must purchase licenses to use Logary for their production environments, subject to the Terms of Service defined below.</p>
<h3>Logary Dash</h3>
<p>Present at src/services/Dash and src/targets/Logary.Targets.SSE; a web app and an EventSource/SSE based target.</p>
<p>Non-profits and non-profit projects are free to use without payment, subject to the Terms of Service defined below.</p>
<p>For-profit or commercial entities must purchase licenses to use Dash for their production environments, subject to the Terms of Service defined below.</p>
<h3>Targets</h3>
<p>Present at src/targets/*, being .Net libraries that you link your own software to, or add as plugins to Rutta.</p>
<p>Targets incorporated into the Logary code-base cannot be used without using the Logary library, but their code is free to modify, extend and sell under the Apache 2.0 license, unless specified otherwise on a per target basis (see below).</p>
<p>The following targets' licenses must always be purchased, independent of the for-profit status of the purchasing entity:</p>
<ul>
<li>Mixpanel (under <span className="_code"> src/targets/Logary.Targets.Mixpanel </span>) — commercial user analytics, segmentation</li>
<li>Opsgenie (under <span className="_code"> src/targets/Logary.Targets.Opsgenie) </span> — commercial alerting and monitoring</li>
</ul>
<p>All other targets (and their code-bases) are licensed as Apache 2.0 (Appendix A contains links to Apache 2.0).</p>
<h3>Rutta</h3>
<p>Present at src/services/*, being an executable program and docker container.</p>
<p>Rutta is dual-licensed as GPL v3, or as a commercial license. Purchasing a commercial license for Rutta lets you use and extend its source code for your own commercial purposes and not open source that change.</p>
<p>Present at src/services/*, being an executable program and docker container.</p>
<p>GPL v3 means that when you want to extend Rutta for your own purposes, you must send a pull request with that change in order to make your improvement available for others to use. This is so that free software can continue to be free. If you don't want to send a pull request with your change, you must purchase the commercial license.</p>
<h3>Library Façade</h3>
<p></p>
</DocSection>
<DocSection {...toc[1]}>
<h2>Terms of Service (ToS)</h2>
<p>This section defines the licenses for each subcomponent of Logary.</p>
</DocSection>
<DocSection {...toc[2]}>
<h2>Commercial License</h2>
<p>This section defines the licenses for each subcomponent of Logary.</p>
</DocSection>
</DocPage>
)
}
|
import json
from abc import abstractmethod
from typing import (
Any,
Dict,
Union,
)
from tri_declarative import (
dispatch,
EMPTY,
Namespace,
Refinable,
)
from iommi._web_compat import (
get_template_from_string,
HttpResponse,
HttpResponseBase,
mark_safe,
Template,
)
from iommi.base import (
items,
MISSING,
NOT_BOUND_MESSAGE,
)
from iommi.debug import (
endpoint__debug_tree,
iommi_debug_on,
)
from iommi.endpoint import (
DISPATCH_PATH_SEPARATOR,
Endpoint,
InvalidEndpointPathException,
perform_ajax_dispatch,
perform_post_dispatch,
)
from iommi.member import (
bind_members,
collect_members,
)
from iommi.style import (
get_iommi_style_name,
get_style,
)
from iommi.traversable import (
EvaluatedRefinable,
Traversable,
)
from ._web_compat import (
QueryDict,
settings,
template_types,
)
from .reinvokable import reinvokable
from .sort_after import sort_after
class Part(Traversable):
"""
`Part` is the base class for parts of a page that can be rendered as html, and can respond to ajax and post.
See the `howto <https://docs.iommi.rocks/en/latest/howto.html#parts-pages>`_ for example usages.
"""
include: bool = Refinable() # This is evaluated, but first and in a special way
after: Union[int, str] = EvaluatedRefinable()
extra: Dict[str, Any] = Refinable()
extra_evaluated: Dict[str, Any] = Refinable() # not EvaluatedRefinable because this is an evaluated container so is special
endpoints: Namespace = Refinable()
# Only the assets used by this part
assets: Namespace = Refinable()
@reinvokable
@dispatch(
extra=EMPTY,
include=True,
)
def __init__(self, *, endpoints: Dict[str, Any] = None, assets: Dict[str, Any] = None, include, **kwargs):
from iommi.asset import Asset
super(Part, self).__init__(include=include, **kwargs)
collect_members(self, name='endpoints', items=endpoints, cls=Endpoint)
collect_members(self, name='assets', items=assets, cls=Asset)
if iommi_debug_on():
import inspect
self._instantiated_at_frame = inspect.currentframe().f_back
@dispatch(
render=EMPTY,
)
@abstractmethod
def __html__(self, *, render=None):
assert False, 'Not implemented' # pragma: no cover, no mutate
def __str__(self):
assert self._is_bound, NOT_BOUND_MESSAGE
return self.__html__()
def bind(self, *, parent=None, request=None):
result = super(Part, self).bind(parent=parent, request=request)
if result is None:
return None
del self
bind_members(result, name='endpoints')
bind_members(result, name='assets', lazy=False)
result.iommi_root()._iommi_collected_assets.update(result.assets)
return result
@dispatch
def render_to_response(self, **kwargs):
request = self.get_request()
req_data = request_data(request)
def dispatch_response_handler(r):
if isinstance(r, HttpResponseBase):
return r
elif isinstance(r, Part):
if not r._is_bound:
r = r.bind(request=request)
return HttpResponse(render_root(part=r, **kwargs))
else:
return HttpResponse(json.dumps(r), content_type='application/json')
if request.method == 'GET':
dispatch_prefix = DISPATCH_PATH_SEPARATOR
dispatcher = perform_ajax_dispatch
dispatch_error = 'Invalid endpoint path'
elif request.method == 'POST':
dispatch_prefix = '-'
dispatcher = perform_post_dispatch
dispatch_error = 'Invalid post path'
else: # pragma: no cover
assert False # This has already been checked in request_data()
dispatch_commands = {key: value for key, value in items(req_data) if key.startswith(dispatch_prefix)}
assert len(dispatch_commands) in (0, 1), 'You can only have one or no dispatch commands'
if dispatch_commands:
dispatch_target, value = next(iter(dispatch_commands.items()))
try:
result = dispatcher(root=self, path=dispatch_target, value=value)
except InvalidEndpointPathException:
if settings.DEBUG:
raise
result = dict(error=dispatch_error)
if result is not None:
return dispatch_response_handler(result)
else:
if request.method == 'POST':
assert False, 'This request was a POST, but there was no dispatch command present.'
return HttpResponse(render_root(part=self, **kwargs))
def iommi_collected_assets(self):
return sort_after(self.iommi_root()._iommi_collected_assets)
def get_title(part):
from iommi import Header
if isinstance(part, Header):
for text in part.children.values():
return text
title = getattr(part, 'title', None)
if title is None:
parts = getattr(part, 'parts', None)
if parts is not None:
for p in parts.values():
title = get_title(p)
if title is not None:
break
return title
@dispatch(
render=EMPTY,
context=EMPTY,
)
def render_root(*, part, context, **render):
assert part._is_bound
root_style_name = get_iommi_style_name(part)
root_style = get_style(root_style_name)
template_name = root_style.base_template
content_block_name = root_style.content_block
# Render early so that all the binds are forced before we look at all_assets,
# since they are populated as a side-effect
content = part.__html__(**render)
assets = part.iommi_collected_assets()
assert template_name, f"{root_style_name} doesn't have a base_template defined"
assert content_block_name, f"{root_style_name} doesn't have a content_block defined"
title = get_title(part)
from iommi.debug import iommi_debug_panel
from iommi import Page
from iommi.fragment import Container
context = dict(
container=Container(_name='Container').bind(parent=part),
content=content,
title=title if title not in (None, MISSING) else '',
iommi_debug_panel=iommi_debug_panel(part) if iommi_debug_on() else '',
assets=assets,
**(part.context if isinstance(part, Page) else {}),
**context,
)
template_string = '{% extends "' + template_name + '" %} {% block ' + \
content_block_name + \
' %}{{ iommi_debug_panel }}{{ content }}{% endblock %}'
return get_template_from_string(template_string).render(context=context, request=part.get_request())
PartType = Union[Part, str, Template]
def request_data(request):
if request is None:
return QueryDict()
if request.method == 'POST':
return request.POST
elif request.method == 'GET':
return request.GET
else:
assert False, f'unsupported request method {request.method}'
def as_html(*, request=None, part: PartType, context):
if isinstance(part, str):
return part
elif isinstance(part, template_types):
from django.template import RequestContext
assert not isinstance(context, RequestContext)
template = part
return mark_safe(template.render(context=RequestContext(request, context)))
elif hasattr(part, '__html__'):
return part.__html__()
else:
return str(part)
|
const winston = require('winston');
const info = (message) => {
winston.info(message);
};
const error = (message) => {
winston.error(message);
};
module.exports = {
info,
error
};
|
"""
Running Distributed Pytorch Training using KF PytorchOperator
-------------------------------------------------------------------
This example is adapted from the default example available on Kubeflow's pytorch site.
`here <https://github.com/kubeflow/pytorch-operator/blob/b7fef224fef1ef0117f6e74961b557270fcf4b04/examples/mnist/mnist.py>`_
It has been modified to show how to integrate it with Flyte and can be probably simplified and cleaned up.
"""
import os
import typing
from dataclasses import dataclass
import matplotlib.pyplot as plt
import torch
import torch.nn.functional as F
from dataclasses_json import dataclass_json
from flytekit import Resources, task, workflow
from flytekit.types.directory import TensorboardLogs
from flytekit.types.file import PNGImageFile, PythonPickledFile
from flytekitplugins.kfpytorch import PyTorch
from tensorboardX import SummaryWriter
from torch import distributed as dist
from torch import nn, optim
from torchvision import datasets, transforms
WORLD_SIZE = int(os.environ.get("WORLD_SIZE", 1))
# %%
# Actual model
# -------------
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4 * 4 * 50, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4 * 4 * 50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
# %%
# Trainer
# -------
def train(model, device, train_loader, optimizer, epoch, writer, log_interval):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if batch_idx % log_interval == 0:
print(
"Train Epoch: {} [{}/{} ({:.0f}%)]\tloss={:.4f}".format(
epoch,
batch_idx * len(data),
len(train_loader.dataset),
100.0 * batch_idx / len(train_loader),
loss.item(),
)
)
niter = epoch * len(train_loader) + batch_idx
writer.add_scalar("loss", loss.item(), niter)
# %%
# Test the model
# ----------------
def test(model, device, test_loader, writer, epoch):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(
output, target, reduction="sum"
).item() # sum up batch loss
pred = output.max(1, keepdim=True)[
1
] # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print("\naccuracy={:.4f}\n".format(float(correct) / len(test_loader.dataset)))
accuracy = float(correct) / len(test_loader.dataset)
writer.add_scalar("accuracy", accuracy, epoch)
return accuracy
def epoch_step(
model, device, train_loader, test_loader, optimizer, epoch, writer, log_interval
):
train(model, device, train_loader, optimizer, epoch, writer, log_interval)
return test(model, device, test_loader, writer, epoch)
def should_distribute():
return dist.is_available() and WORLD_SIZE > 1
def is_distributed():
return dist.is_available() and dist.is_initialized()
# %%
# Training Hyperparameters
# -------------------------
#
@dataclass_json
@dataclass
class Hyperparameters(object):
"""
Args:
batch_size: input batch size for training (default: 64)
test_batch_size: input batch size for testing (default: 1000)
epochs: number of epochs to train (default: 10)
learning_rate: learning rate (default: 0.01)
sgd_momentum: SGD momentum (default: 0.5)
seed: random seed (default: 1)
log_interval: how many batches to wait before logging training status
dir: directory where summary logs are stored
"""
backend: str = dist.Backend.GLOO
sgd_momentum: float = 0.5
seed: int = 1
log_interval: int = 10
batch_size: int = 64
test_batch_size: int = 1000
epochs: int = 10
learning_rate: float = 0.01
# %%
# Actual Training algorithm
# ---------------------------
# The output model using `torch.save` saves the `state_dict` as described
# `in pytorch docs <https://pytorch.org/tutorials/beginner/saving_loading_models.html#saving-and-loading-models>`_.
# A common convention is to have the ``.pt`` extension for the file
#
# Notice we are also generating an output variable called logs, these logs can be used to visualize the training in
# Tensorboard and are the output of the `SummaryWriter` interface
# Refer to section :ref:`pytorch_tensorboard` to visualize the outputs of this example.
TrainingOutputs = typing.NamedTuple(
"TrainingOutputs",
epoch_accuracies=typing.List[float],
model_state=PythonPickledFile,
logs=TensorboardLogs,
)
@task(
task_config=PyTorch(
num_workers=2,
per_replica_requests=Resources(cpu="500m", mem="4Gi", gpu="1"),
per_replica_limits=Resources(mem="8Gi", gpu="1"),
),
retries=2,
cache=True,
cache_version="1.0",
)
def mnist_pytorch_job(hp: Hyperparameters) -> TrainingOutputs:
log_dir = "logs"
writer = SummaryWriter(log_dir)
torch.manual_seed(hp.seed)
use_cuda = torch.cuda.is_available()
print(f"Use cuda {use_cuda}")
device = torch.device("cuda" if use_cuda else "cpu")
print("Using device: {}, world size: {}".format(device, WORLD_SIZE))
if should_distribute():
print("Using distributed PyTorch with {} backend".format(hp.backend))
dist.init_process_group(backend=hp.backend)
# LOAD Data
kwargs = {"num_workers": 1, "pin_memory": True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=True,
download=True,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
),
),
batch_size=hp.batch_size,
shuffle=True,
**kwargs,
)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=False,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
),
),
batch_size=hp.test_batch_size,
shuffle=False,
**kwargs,
)
# Train the model
model = Net().to(device)
if is_distributed():
Distributor = (
nn.parallel.DistributedDataParallel
if use_cuda
else nn.parallel.DistributedDataParallelCPU
)
model = Distributor(model)
optimizer = optim.SGD(
model.parameters(), lr=hp.learning_rate, momentum=hp.sgd_momentum
)
accuracies = [
epoch_step(
model,
device,
train_loader,
test_loader,
optimizer,
epoch,
writer,
hp.log_interval,
)
for epoch in range(1, hp.epochs + 1)
]
# Save the model
model_file = "mnist_cnn.pt"
torch.save(model.state_dict(), model_file)
return TrainingOutputs(
epoch_accuracies=accuracies,
model_state=PythonPickledFile(model_file),
logs=TensorboardLogs(log_dir),
)
# %%
# Let us plot the accuracy
# -------------------------
# We will output the accuracy plot as a PNG image
@task
def plot_accuracy(epoch_accuracies: typing.List[float]) -> PNGImageFile:
# summarize history for accuracy
plt.plot(epoch_accuracies)
plt.title("Accuracy")
plt.ylabel("accuracy")
plt.xlabel("epoch")
accuracy_plot = "accuracy.png"
plt.savefig(accuracy_plot)
return PNGImageFile(accuracy_plot)
# %%
# Create a pipeline
# ------------------
# now the training and the plotting can be together put into a pipeline, in which case the training is performed first
# followed by the plotting of the accuracy. Data is passed between them and the workflow itself outputs the image and
# the serialize model
@workflow
def pytorch_training_wf(
hp: Hyperparameters,
) -> (PythonPickledFile, PNGImageFile, TensorboardLogs):
accuracies, model, logs = mnist_pytorch_job(hp=hp)
plot = plot_accuracy(epoch_accuracies=accuracies)
return model, plot, logs
# %%
# Run the model locally
# ----------------------
# It is possible to run the model locally with almost no modifications (as long as the code takes care of the resolving
# if distributed or not)
if __name__ == "__main__":
model, plot, logs = pytorch_training_wf(
hp=Hyperparameters(epochs=2, batch_size=128)
)
print(f"Model: {model}, plot PNG: {plot}, Tensorboard Log Dir: {logs}")
# %%
#
# .. _pytorch_tensorboard:
#
# Rendering the output logs in tensorboard
# -----------------------------------------
# When running locally, the output of execution looks like
#
# .. code-block::
#
# Model: /tmp/flyte/20210110_214129/mock_remote/8421ae4d041f76488e245edf3f4360d5/my_model.h5, plot PNG: /tmp/flyte/20210110_214129/mock_remote/cf6a2cd9d3ded89ed814278a8fb3678c/accuracy.png, Tensorboard Log Dir: /tmp/flyte/20210110_214129/mock_remote/a4b04e58e21f26f08f81df24094d6446/
#
# You can use the ``Tensorboard Log Dir: /tmp/flyte/20210110_214129/mock_remote/a4b04e58e21f26f08f81df24094d6446/`` as
# an input to tensorboard to visualize the training as follows
#
# .. prompt:: bash
#
# tensorboard --logdir /tmp/flyte/20210110_214129/mock_remote/a4b04e58e21f26f08f81df24094d6446/
#
#
# If running remotely (executing on Flyte hosted environment), the workflow execution outputs can be retrieved.
# Refer to .. TODO.
# You can retrieve the outputs - which will be a path to a blob store like S3, GCS, minio, etc. Tensorboad can be
# pointed to on your local laptop to visualize the results.
|
#!/usr/bin/env python
#
# Copyright 2014 tigmi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class KubernetesError(Exception):
'''Base class for Kubernetes errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
@property
def code(self):
if len(self.args) >= 2:
return self.args[1]
else:
return -1
class ReqNotSupportedError(Exception):
pass
|
/**
* @author Don McCurdy / https://www.donmccurdy.com
*/
import {
BufferAttribute,
BufferGeometry,
FileLoader,
Loader
} from "../../../build/three.module.js";
var DRACOLoader = function ( manager ) {
Loader.call( this, manager );
this.decoderPath = '';
this.decoderConfig = {};
this.decoderBinary = null;
this.decoderPending = null;
this.workerLimit = 4;
this.workerPool = [];
this.workerNextTaskID = 1;
this.workerSourceURL = '';
this.defaultAttributeIDs = {
position: 'POSITION',
normal: 'NORMAL',
color: 'COLOR',
uv: 'TEX_COORD'
};
this.defaultAttributeTypes = {
position: 'Float32Array',
normal: 'Float32Array',
color: 'Float32Array',
uv: 'Float32Array'
};
};
DRACOLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
constructor: DRACOLoader,
setDecoderPath: function ( path ) {
this.decoderPath = path;
return this;
},
setDecoderConfig: function ( config ) {
this.decoderConfig = config;
return this;
},
setWorkerLimit: function ( workerLimit ) {
this.workerLimit = workerLimit;
return this;
},
/** @deprecated */
setVerbosity: function () {
console.warn( 'THREE.DRACOLoader: The .setVerbosity() method has been removed.' );
},
/** @deprecated */
setDrawMode: function () {
console.warn( 'THREE.DRACOLoader: The .setDrawMode() method has been removed.' );
},
/** @deprecated */
setSkipDequantization: function () {
console.warn( 'THREE.DRACOLoader: The .setSkipDequantization() method has been removed.' );
},
load: function ( url, onLoad, onProgress, onError ) {
var loader = new FileLoader( this.manager );
loader.setPath( this.path );
loader.setResponseType( 'arraybuffer' );
loader.setRequestHeader( this.requestHeader );
if ( this.crossOrigin === 'use-credentials' ) {
loader.setWithCredentials( true );
}
loader.load( url, ( buffer ) => {
var taskConfig = {
attributeIDs: this.defaultAttributeIDs,
attributeTypes: this.defaultAttributeTypes,
useUniqueIDs: false
};
this.decodeGeometry( buffer, taskConfig )
.then( onLoad )
.catch( onError );
}, onProgress, onError );
},
/** @deprecated Kept for backward-compatibility with previous DRACOLoader versions. */
decodeDracoFile: function ( buffer, callback, attributeIDs, attributeTypes ) {
var taskConfig = {
attributeIDs: attributeIDs || this.defaultAttributeIDs,
attributeTypes: attributeTypes || this.defaultAttributeTypes,
useUniqueIDs: !! attributeIDs
};
this.decodeGeometry( buffer, taskConfig ).then( callback );
},
decodeGeometry: function ( buffer, taskConfig ) {
// TODO: For backward-compatibility, support 'attributeTypes' objects containing
// references (rather than names) to typed array constructors. These must be
// serialized before sending them to the worker.
for ( var attribute in taskConfig.attributeTypes ) {
var type = taskConfig.attributeTypes[ attribute ];
if ( type.BYTES_PER_ELEMENT !== undefined ) {
taskConfig.attributeTypes[ attribute ] = type.name;
}
}
//
var taskKey = JSON.stringify( taskConfig );
// Check for an existing task using this buffer. A transferred buffer cannot be transferred
// again from this thread.
if ( DRACOLoader.taskCache.has( buffer ) ) {
var cachedTask = DRACOLoader.taskCache.get( buffer );
if ( cachedTask.key === taskKey ) {
return cachedTask.promise;
} else if ( buffer.byteLength === 0 ) {
// Technically, it would be possible to wait for the previous task to complete,
// transfer the buffer back, and decode again with the second configuration. That
// is complex, and I don't know of any reason to decode a Draco buffer twice in
// different ways, so this is left unimplemented.
throw new Error(
'THREE.DRACOLoader: Unable to re-decode a buffer with different ' +
'settings. Buffer has already been transferred.'
);
}
}
//
var worker;
var taskID = this.workerNextTaskID ++;
var taskCost = buffer.byteLength;
// Obtain a worker and assign a task, and construct a geometry instance
// when the task completes.
var geometryPending = this._getWorker( taskID, taskCost )
.then( ( _worker ) => {
worker = _worker;
return new Promise( ( resolve, reject ) => {
worker._callbacks[ taskID ] = { resolve, reject };
worker.postMessage( { type: 'decode', id: taskID, taskConfig, buffer }, [ buffer ] );
// this.debug();
} );
} )
.then( ( message ) => this._createGeometry( message.geometry ) );
// Remove task from the task list.
// Note: replaced '.finally()' with '.catch().then()' block - iOS 11 support (#19416)
geometryPending
.catch( () => true )
.then( () => {
if ( worker && taskID ) {
this._releaseTask( worker, taskID );
// this.debug();
}
} );
// Cache the task result.
DRACOLoader.taskCache.set( buffer, {
key: taskKey,
promise: geometryPending
} );
return geometryPending;
},
_createGeometry: function ( geometryData ) {
var geometry = new BufferGeometry();
if ( geometryData.index ) {
geometry.setIndex( new BufferAttribute( geometryData.index.array, 1 ) );
}
for ( var i = 0; i < geometryData.attributes.length; i ++ ) {
var attribute = geometryData.attributes[ i ];
var name = attribute.name;
var array = attribute.array;
var itemSize = attribute.itemSize;
geometry.setAttribute( name, new BufferAttribute( array, itemSize ) );
}
return geometry;
},
_loadLibrary: function ( url, responseType ) {
var loader = new FileLoader( this.manager );
loader.setPath( this.decoderPath );
loader.setResponseType( responseType );
return new Promise( ( resolve, reject ) => {
loader.load( url, resolve, undefined, reject );
} );
},
preload: function () {
this._initDecoder();
return this;
},
_initDecoder: function () {
if ( this.decoderPending ) return this.decoderPending;
var useJS = typeof WebAssembly !== 'object' || this.decoderConfig.type === 'js';
var librariesPending = [];
if ( useJS ) {
librariesPending.push( this._loadLibrary( 'draco_decoder.js', 'text' ) );
} else {
librariesPending.push( this._loadLibrary( 'draco_wasm_wrapper.js', 'text' ) );
librariesPending.push( this._loadLibrary( 'draco_decoder.wasm', 'arraybuffer' ) );
}
this.decoderPending = Promise.all( librariesPending )
.then( ( libraries ) => {
var jsContent = libraries[ 0 ];
if ( ! useJS ) {
this.decoderConfig.wasmBinary = libraries[ 1 ];
}
var fn = DRACOLoader.DRACOWorker.toString();
var body = [
'/* draco decoder */',
jsContent,
'',
'/* worker */',
fn.substring( fn.indexOf( '{' ) + 1, fn.lastIndexOf( '}' ) )
].join( '\n' );
this.workerSourceURL = URL.createObjectURL( new Blob( [ body ] ) );
} );
return this.decoderPending;
},
_getWorker: function ( taskID, taskCost ) {
return this._initDecoder().then( () => {
if ( this.workerPool.length < this.workerLimit ) {
var worker = new Worker( this.workerSourceURL );
worker._callbacks = {};
worker._taskCosts = {};
worker._taskLoad = 0;
worker.postMessage( { type: 'init', decoderConfig: this.decoderConfig } );
worker.onmessage = function ( e ) {
var message = e.data;
switch ( message.type ) {
case 'decode':
worker._callbacks[ message.id ].resolve( message );
break;
case 'error':
worker._callbacks[ message.id ].reject( message );
break;
default:
console.error( 'THREE.DRACOLoader: Unexpected message, "' + message.type + '"' );
}
};
this.workerPool.push( worker );
} else {
this.workerPool.sort( function ( a, b ) {
return a._taskLoad > b._taskLoad ? - 1 : 1;
} );
}
var worker = this.workerPool[ this.workerPool.length - 1 ];
worker._taskCosts[ taskID ] = taskCost;
worker._taskLoad += taskCost;
return worker;
} );
},
_releaseTask: function ( worker, taskID ) {
worker._taskLoad -= worker._taskCosts[ taskID ];
delete worker._callbacks[ taskID ];
delete worker._taskCosts[ taskID ];
},
debug: function () {
console.log( 'Task load: ', this.workerPool.map( ( worker ) => worker._taskLoad ) );
},
dispose: function () {
for ( var i = 0; i < this.workerPool.length; ++ i ) {
this.workerPool[ i ].terminate();
}
this.workerPool.length = 0;
return this;
}
} );
/* WEB WORKER */
DRACOLoader.DRACOWorker = function () {
var decoderConfig;
var decoderPending;
onmessage = function ( e ) {
var message = e.data;
switch ( message.type ) {
case 'init':
decoderConfig = message.decoderConfig;
decoderPending = new Promise( function ( resolve/*, reject*/ ) {
decoderConfig.onModuleLoaded = function ( draco ) {
// Module is Promise-like. Wrap before resolving to avoid loop.
resolve( { draco: draco } );
};
DracoDecoderModule( decoderConfig );
} );
break;
case 'decode':
var buffer = message.buffer;
var taskConfig = message.taskConfig;
decoderPending.then( ( module ) => {
var draco = module.draco;
var decoder = new draco.Decoder();
var decoderBuffer = new draco.DecoderBuffer();
decoderBuffer.Init( new Int8Array( buffer ), buffer.byteLength );
try {
var geometry = decodeGeometry( draco, decoder, decoderBuffer, taskConfig );
var buffers = geometry.attributes.map( ( attr ) => attr.array.buffer );
if ( geometry.index ) buffers.push( geometry.index.array.buffer );
self.postMessage( { type: 'decode', id: message.id, geometry }, buffers );
} catch ( error ) {
console.error( error );
self.postMessage( { type: 'error', id: message.id, error: error.message } );
} finally {
draco.destroy( decoderBuffer );
draco.destroy( decoder );
}
} );
break;
}
};
function decodeGeometry( draco, decoder, decoderBuffer, taskConfig ) {
var attributeIDs = taskConfig.attributeIDs;
var attributeTypes = taskConfig.attributeTypes;
var dracoGeometry;
var decodingStatus;
var geometryType = decoder.GetEncodedGeometryType( decoderBuffer );
if ( geometryType === draco.TRIANGULAR_MESH ) {
dracoGeometry = new draco.Mesh();
decodingStatus = decoder.DecodeBufferToMesh( decoderBuffer, dracoGeometry );
} else if ( geometryType === draco.POINT_CLOUD ) {
dracoGeometry = new draco.PointCloud();
decodingStatus = decoder.DecodeBufferToPointCloud( decoderBuffer, dracoGeometry );
} else {
throw new Error( 'THREE.DRACOLoader: Unexpected geometry type.' );
}
if ( ! decodingStatus.ok() || dracoGeometry.ptr === 0 ) {
throw new Error( 'THREE.DRACOLoader: Decoding failed: ' + decodingStatus.error_msg() );
}
var geometry = { index: null, attributes: [] };
// Gather all vertex attributes.
for ( var attributeName in attributeIDs ) {
var attributeType = self[ attributeTypes[ attributeName ] ];
var attribute;
var attributeID;
// A Draco file may be created with default vertex attributes, whose attribute IDs
// are mapped 1:1 from their semantic name (POSITION, NORMAL, ...). Alternatively,
// a Draco file may contain a custom set of attributes, identified by known unique
// IDs. glTF files always do the latter, and `.drc` files typically do the former.
if ( taskConfig.useUniqueIDs ) {
attributeID = attributeIDs[ attributeName ];
attribute = decoder.GetAttributeByUniqueId( dracoGeometry, attributeID );
} else {
attributeID = decoder.GetAttributeId( dracoGeometry, draco[ attributeIDs[ attributeName ] ] );
if ( attributeID === - 1 ) continue;
attribute = decoder.GetAttribute( dracoGeometry, attributeID );
}
geometry.attributes.push( decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) );
}
// Add index.
if ( geometryType === draco.TRIANGULAR_MESH ) {
// Generate mesh faces.
var numFaces = dracoGeometry.num_faces();
var numIndices = numFaces * 3;
var index = new Uint32Array( numIndices );
var indexArray = new draco.DracoInt32Array();
for ( var i = 0; i < numFaces; ++ i ) {
decoder.GetFaceFromMesh( dracoGeometry, i, indexArray );
for ( var j = 0; j < 3; ++ j ) {
index[ i * 3 + j ] = indexArray.GetValue( j );
}
}
geometry.index = { array: index, itemSize: 1 };
draco.destroy( indexArray );
}
draco.destroy( dracoGeometry );
return geometry;
}
function decodeAttribute( draco, decoder, dracoGeometry, attributeName, attributeType, attribute ) {
var numComponents = attribute.num_components();
var numPoints = dracoGeometry.num_points();
var numValues = numPoints * numComponents;
var dracoArray;
var array;
switch ( attributeType ) {
case Float32Array:
dracoArray = new draco.DracoFloat32Array();
decoder.GetAttributeFloatForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Float32Array( numValues );
break;
case Int8Array:
dracoArray = new draco.DracoInt8Array();
decoder.GetAttributeInt8ForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Int8Array( numValues );
break;
case Int16Array:
dracoArray = new draco.DracoInt16Array();
decoder.GetAttributeInt16ForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Int16Array( numValues );
break;
case Int32Array:
dracoArray = new draco.DracoInt32Array();
decoder.GetAttributeInt32ForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Int32Array( numValues );
break;
case Uint8Array:
dracoArray = new draco.DracoUInt8Array();
decoder.GetAttributeUInt8ForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Uint8Array( numValues );
break;
case Uint16Array:
dracoArray = new draco.DracoUInt16Array();
decoder.GetAttributeUInt16ForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Uint16Array( numValues );
break;
case Uint32Array:
dracoArray = new draco.DracoUInt32Array();
decoder.GetAttributeUInt32ForAllPoints( dracoGeometry, attribute, dracoArray );
array = new Uint32Array( numValues );
break;
default:
throw new Error( 'THREE.DRACOLoader: Unexpected attribute type.' );
}
for ( var i = 0; i < numValues; i ++ ) {
array[ i ] = dracoArray.GetValue( i );
}
draco.destroy( dracoArray );
return {
name: attributeName,
array: array,
itemSize: numComponents
};
}
};
DRACOLoader.taskCache = new WeakMap();
/** Deprecated static methods */
/** @deprecated */
DRACOLoader.setDecoderPath = function () {
console.warn( 'THREE.DRACOLoader: The .setDecoderPath() method has been removed. Use instance methods.' );
};
/** @deprecated */
DRACOLoader.setDecoderConfig = function () {
console.warn( 'THREE.DRACOLoader: The .setDecoderConfig() method has been removed. Use instance methods.' );
};
/** @deprecated */
DRACOLoader.releaseDecoderModule = function () {
console.warn( 'THREE.DRACOLoader: The .releaseDecoderModule() method has been removed. Use instance methods.' );
};
/** @deprecated */
DRACOLoader.getDecoderModule = function () {
console.warn( 'THREE.DRACOLoader: The .getDecoderModule() method has been removed. Use instance methods.' );
};
export { DRACOLoader };
|
from lib.utils import *
from intervaltree import IntervalTree
def load(virus):
if virus == 'h1':
escape_fname = ('target/flu/semantics/cache/'
'analyze_semantics_flu_h1_bilstm_512.txt')
region_fname = 'data/influenza/h1_regions.txt'
elif virus == 'h3':
escape_fname = ('target/flu/semantics/cache/'
'analyze_semantics_flu_h3_bilstm_512.txt')
region_fname = 'data/influenza/h3_regions.txt'
elif virus == 'hiv':
escape_fname = ('target/hiv/semantics/cache/'
'analyze_semantics_hiv_bilstm_512.txt')
region_fname = 'data/hiv/bg505_regions.txt'
elif virus == 'sarscov2':
escape_fname = ('target/cov/semantics/cache/'
'analyze_semantics_cov_bilstm_512.txt')
region_fname = 'data/cov/sarscov2_regions.txt'
else:
raise ValueError('Virus {} not supported'.format(virus))
return escape_fname, region_fname
def regional_escape(virus, beta=1., n_permutations=100000):
escape_fname, region_fname = load(virus)
# Parse protein regions, keep track of intervals,
# sizes and scores.
regions = IntervalTree()
name2size, name2escape = {}, {}
with open(region_fname) as f:
f.readline()
for line in f:
[ start, end, name ] = line.rstrip().split()
start, end = int(start) - 1, int(end) - 1
regions[start:(end + 1)] = name
if name not in name2escape:
name2escape[name] = []
name2size[name] = 0
name2size[name] += end - start + 1
# Load semantic data into memory.
data = []
with open(escape_fname) as f:
columns = f.readline().rstrip().split()
for line in f:
data.append(line.rstrip().split('\t'))
df_all = pd.DataFrame(data, columns=columns)
df_all['pos'] = pd.to_numeric(df_all['pos'])
df_all['prob'] = pd.to_numeric(df_all['prob'])
df_all['change'] = pd.to_numeric(df_all['change'])
df_all['acquisition'] = ss.rankdata(df_all.change) + \
(beta * ss.rankdata(df_all.prob))
# Reformat data for easy plotting and P-value computation.
plot_data = []
pos2scores = {}
for i in range(len(df_all)):
pos = df_all['pos'][i]
acquisition = df_all['acquisition'][i]
names = regions[pos]
for name in names:
name2escape[name.data].append(acquisition)
plot_data.append([ name.data, acquisition ])
if pos not in pos2scores:
pos2scores[pos] = []
pos2scores[pos].append(acquisition)
# Compute permutation-based P-value for each region.
seq_start = min(df_all['pos'])
seq_end = max(df_all['pos'])
all_pos = list(range(seq_start, seq_end + 1))
plot_data = []
for name in name2escape:
real_score = np.mean(name2escape[name])
size = name2size[name]
null_distribution = []
for perm in range(n_permutations):
rand_positions = np.random.choice(all_pos, size=size,
replace=False)
null_score = np.concatenate([
np.array(pos2scores[pos]) for pos in rand_positions
]).mean()
null_distribution.append(null_score)
null_distribution = np.array(null_distribution)
tprint('Enriched for escapes:')
p_val = (sum(null_distribution >= real_score)) / \
(n_permutations)
if p_val == 0:
p_val = 1. / n_permutations
tprint('{}, P < {}'.format(name, p_val))
else:
tprint('{}, P = {}'.format(name, p_val))
plot_data.append([ name, -np.log10(p_val), 'enriched' ])
tprint('Depleted for escapes:')
p_val = (sum(null_distribution <= real_score)) / \
(n_permutations)
if p_val == 0:
p_val = 1. / n_permutations
tprint('{}, P < {}'.format(name, p_val))
else:
tprint('{}, P = {}'.format(name, p_val))
plot_data.append([ name, -np.log10(p_val), 'depleted' ])
tprint('')
# Plot each region in bar plot.
plot_data = pd.DataFrame(plot_data,
columns=[ 'region', 'score', 'direction' ])
plt.figure()
sns.barplot(data=plot_data, x='region', y='score', hue='direction',
order=sorted(set(plot_data['region'])))
fdr = 0.05 / len(sorted(set(plot_data['region'])))
plt.axhline(y=-np.log10(fdr), color='gray', linestyle='--')
plt.xticks(rotation=60)
plt.savefig('figures/regional_escape_{}.svg'.format(virus))
if __name__ == '__main__':
virus = sys.argv[1]
regional_escape(virus)
|
module.exports = {
'E2E': process.env.E2E,
'projectOverrides': JSON.stringify({
bulletTrain: process.env.BULLET_TRAIN,
ga: process.env.GA,
crispChat: process.env.CRISP_CHAT,
mixpanel: process.env.MIXPANEL,
sentry: process.env.SENTRY,
api: process.env.API_URL,
assetURL: process.env.ASSET_URL,
}),
};
|
import unittest
from scrapydd.webhook import *
from scrapydd.models import WebhookJob
from six import StringIO
import tornado
import tornado.web
from tornado.testing import AsyncTestCase, AsyncHTTPTestCase
import json
class WebhookRequestHandler(tornado.web.RequestHandler):
def initialize(self, test):
self.test = test
def post(self):
rows = []
for key, values in self.request.body_arguments.items():
if len(rows) == 0:
rows = [{} for x in range(len(values))]
if len(rows) != len(values):
raise Exception('rows are not aligned')
for i, value in enumerate(values):
rows[i][key] = value
self.test.batches.append(rows)
@unittest.skip
class WebhookJobExecutorTest(AsyncHTTPTestCase):
def setUp(self):
super(WebhookJobExecutorTest, self).setUp()
self.batches = []
os.environ['ASYNC_TEST_TIMEOUT'] = '30'
def get_app(self):
return tornado.web.Application([
('/update$', WebhookRequestHandler, {'test':self}),
])
@tornado.testing.gen_test
def test_execute(self):
data = {'a':1}
item_file = StringIO(json.dumps(data))
job = WebhookJob()
job.id=1
job.items_file=item_file
job.job_id=1
http_port = self.get_http_port()
job.payload_url='http://localhost:%s/update' % http_port
target = WebhookJobExecutor(job, item_file, 1000)
actual = yield target.start()
self.assertEqual(job, actual)
# only one post
self.assertEqual(len(self.batches), 1)
# the first post has one row
self.assertEqual(len(self.batches[0]), 1)
# the data of first post
self.assertEqual(self.batches[0][0]['a'],'1')
@unittest.skip
@tornado.testing.gen_test
def test_execute_over_memory_limit(self):
item_file = StringIO('{"a":1}')
job = WebhookJob()
job.id=1
job.items_file=item_file
job.job_id=1
http_port = self.get_http_port()
job.payload_url='http://localhost:%s/update' % http_port
target = WebhookJobExecutor(job, item_file, 1)
try:
job = yield target.start()
self.fail('WebhookMemoryLimitError not catched')
except WebhookJobOverMemoryLimitError as e:
self.assertEqual(e.executor.job, job)
# no data actually posted
self.assertEqual(len(self.batches), 0)
@unittest.skip
@tornado.testing.gen_test
def test_execute_jl_decoding_error(self):
item_file = StringIO('{"a":11,12,3,123,}')
job = WebhookJob()
job.id=1
job.items_file=item_file
job.job_id=1
http_port = self.get_http_port()
job.payload_url='http://localhost:%s/update' % http_port
target = WebhookJobExecutor(job, item_file, 10000)
try:
job = yield target.start()
self.fail('WebhookJobJlDecodeError not catched')
except WebhookJobJlDecodeError as e:
self.assertEqual(job, e.executor.job)
self.assertEqual(e.message, 'Error when decoding jl file')
self.assertEqual(len(self.batches), 0)
@unittest.skip
@tornado.testing.gen_test
def test_execute_webhook_address_not_reachable(self):
item_file = StringIO('{"a":1}')
job = WebhookJob()
job.id=1
job.items_file=item_file
job.job_id=1
job.payload_url='http://notreachable/update'
target = WebhookJobExecutor(job, item_file, 10000)
try:
job = yield target.start()
self.fail('WebhookJobJlDecodeError not catched')
except Exception as e:
#self.assertEqual(job, e.executor.job)
logging.debug(e.message)
self.assertIsNotNone(e.message)
self.assertIsNotNone(str(e))
self.assertEqual(len(self.batches), 0)
@tornado.testing.gen_test
def test_execute_batch_1(self):
data = [{'a':1},
{'a':2},
{'a':3}]
item_file = StringIO()
batch_size = 1
for row in data:
item_file.write(json.dumps(row) + os.linesep)
item_file.seek(0)
job = WebhookJob()
job.id = 1
job.items_file=item_file
job.job_id = 1
http_port = self.get_http_port()
job.payload_url = 'http://localhost:%s/update' % http_port
target = WebhookJobExecutor(job, item_file, 10000, max_batch_size=batch_size)
yield target.start()
self.assertEqual(len(self.batches), 3)
self.assertEqual(len(self.batches[0]), 1)
self.assertEqual(self.batches[0][0]['a'], '1')
self.assertEqual(len(self.batches[1]), 1)
self.assertEqual(self.batches[1][0]['a'], '2')
self.assertEqual(len(self.batches[1]), 1)
self.assertEqual(self.batches[2][0]['a'], '3')
class TestJobFinishTriggerWebhook:
pass
|
/*=========================================================================
Program: Visualization Toolkit
Module: vtkCocoaRenderWindow.h
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
/**
* @class vtkCocoaRenderWindow
* @brief Cocoa OpenGL rendering window
*
*
* vtkCocoaRenderWindow is a concrete implementation of the abstract
* class vtkOpenGLRenderWindow. It is only available on Mac OS X.
* To use this class, build VTK with VTK_USE_COCOA turned ON (this is
* the default).
* This class can be used by 32 and 64 bit processes, and either in
* garbage collected or reference counted modes. ARC is not yet supported.
* vtkCocoaRenderWindow uses Objective-C++, and the OpenGL and
* Cocoa APIs. This class's default behaviour is to create an NSWindow and
* a vtkCocoaGLView which are used together to draw all VTK content.
* If you already have an NSWindow and vtkCocoaGLView and you want this
* class to use them you must call both SetRootWindow() and SetWindowId(),
* respectively, early on (before WindowInitialize() is executed).
*
* @sa
* vtkOpenGLRenderWindow vtkCocoaGLView
*
* @warning
* This header must be in C++ only because it is included by .cxx files.
* That means no Objective-C may be used. That's why some instance variables
* are void* instead of what they really should be.
*/
#ifndef vtkCocoaRenderWindow_h
#define vtkCocoaRenderWindow_h
#include "vtkRenderingOpenGL2Module.h" // For export macro
#include <stack> // for ivar
#include "vtkOpenGLRenderWindow.h"
class VTKRENDERINGOPENGL2_EXPORT vtkCocoaRenderWindow : public vtkOpenGLRenderWindow
{
public:
static vtkCocoaRenderWindow *New();
vtkTypeMacro(vtkCocoaRenderWindow,vtkOpenGLRenderWindow);
void PrintSelf(ostream& os, vtkIndent indent) override;
/**
* Begin the rendering process.
*/
void Start() override;
/**
* Finish the rendering process.
*/
void Frame() override;
/**
* Specify various window parameters.
*/
virtual void WindowConfigure();
/**
* Initialize the window for rendering.
* virtual void WindowInitialize();
*/
/**
* Initialize the rendering window.
*/
void Initialize() override;
/**
* Change the window to fill the entire screen. This is only partially
* implemented for the vtkCocoaRenderWindow. It can only be called
* before the window has been created, and it might not work on all
* versions of OS X.
*/
void SetFullScreen(vtkTypeBool) override;
/**
* Remap the window. This is not implemented for the vtkCocoaRenderWindow.
*/
void WindowRemap() override;
/**
* Set the preferred window size to full screen. This is not implemented
* for the vtkCocoaRenderWindow.
*/
virtual void PrefFullScreen();
//@{
/**
* Set the size of the window in pixels.
*/
void SetSize(int a[2]) override;
void SetSize(int,int) override;
//@}
/**
* Get the current size of the window in pixels.
*/
int *GetSize() VTK_SIZEHINT(2) override;
//@{
/**
* Set the position of the window.
*/
void SetPosition(int a[2]) override;
void SetPosition(int,int) override;
//@}
/**
* Get the current size of the screen in pixels.
*/
int *GetScreenSize() VTK_SIZEHINT(2) override;
/**
* Get the position in screen coordinates of the window.
*/
int *GetPosition() VTK_SIZEHINT(2) override;
/**
* Set the name of the window. This appears at the top of the window
* normally.
*/
void SetWindowName(const char *) override;
void SetNextWindowInfo(char *) override
{
vtkWarningMacro("SetNextWindowInfo not implemented (WindowRemap not implemented).");
}
void* GetGenericDrawable() override
{
vtkWarningMacro("Method not implemented.");
return nullptr;
}
void SetDisplayId(void*) override
{
vtkWarningMacro("Method not implemented.");
}
void *GetGenericDisplayId() override
{
vtkWarningMacro("Method not implemented.");
return nullptr;
}
/**
* Set this RenderWindow's window id to a pre-existing window.
* The parameter is an ASCII string of a decimal number representing
* a pointer to the window.
*/
void SetWindowInfo(char*) override;
/**
* See the documentation for SetParentId(). This method allows the ParentId
* to be set as an ASCII string of a decimal number that is the memory
* address of the parent NSView.
*/
void SetParentInfo(char*) override;
void SetNextWindowId(void*) override
{
vtkWarningMacro("SetNextWindowId not implemented (WindowRemap not implemented).");
}
/**
* Initialize the render window from the information associated
* with the currently activated OpenGL context.
*/
bool InitializeFromCurrentContext() override;
/**
* Does this platform support render window data sharing.
*/
bool GetPlatformSupportsRenderWindowSharing() override { return true; };
/**
* Prescribe that the window be created in a stereo-capable mode. This
* method must be called before the window is realized. This method
* overrrides the superclass method since this class can actually check
* whether the window has been realized yet.
*/
void SetStereoCapableWindow(vtkTypeBool capable) override;
/**
* Make this windows OpenGL context the current context.
*/
void MakeCurrent() override;
/**
* Tells if this window is the current OpenGL context for the calling thread.
*/
bool IsCurrent() override;
/**
* Test if the window has a valid drawable. This is
* currently only an issue on Mac OS X Cocoa where rendering
* to an invalid drawable results in all OpenGL calls to fail
* with "invalid framebuffer operation".
*/
bool IsDrawable() override;
/**
* Update this window's OpenGL context, e.g. when the window is resized.
*/
void UpdateContext();
/**
* Get report of capabilities for the render window
*/
const char *ReportCapabilities() override;
/**
* Is this render window using hardware acceleration? 0-false, 1-true
*/
int IsDirect() override;
/**
* If called, allow MakeCurrent() to skip cache-check when called.
* MakeCurrent() reverts to original behavior of cache-checking
* on the next render.
*/
void SetForceMakeCurrent() override;
/**
* Check to see if an event is pending for this window.
* This is a useful check to abort a long render.
*/
int GetEventPending() override;
//@{
/**
* Initialize OpenGL for this window.
*/
virtual void SetupPalette(void *hDC);
virtual void SetupPixelFormat(void *hDC, void *dwFlags, int debug,
int bpp=16, int zbpp=16);
//@}
/**
* Clean up device contexts, rendering contexts, etc.
*/
void Finalize() override;
//@{
/**
* Hide or Show the mouse cursor, it is nice to be able to hide the
* default cursor if you want VTK to display a 3D cursor instead.
* Set cursor position in window (note that (0,0) is the lower left
* corner).
*/
void HideCursor() override;
void ShowCursor() override;
void SetCursorPosition(int x, int y) override;
//@}
/**
* Change the shape of the cursor.
*/
void SetCurrentCursor(int) override;
/**
* Get the ViewCreated flag. It is 1 if this object created an instance
* of NSView, 0 otherwise.
*/
virtual int GetViewCreated();
/**
* Get the WindowCreated flag. It is 1 if this object created an instance
* of NSWindow, 0 otherwise.
*/
virtual int GetWindowCreated();
//@{
/**
* Accessors for the OpenGL context (Really an NSOpenGLContext*).
*/
void SetContextId(void *);
void *GetContextId();
void *GetGenericContext() override {return this->GetContextId();}
//@}
/**
* Sets the NSWindow* associated with this vtkRenderWindow.
* This class' default behaviour, that is, if you never call
* SetWindowId()/SetRootWindow() is to create an NSWindow and a
* vtkCocoaGLView (NSView subclass) which are used together to draw
* all vtk stuff into. If you already have an NSWindow and NSView and
* you want this class to use them you must call both SetRootWindow()
* and SetWindowId(), respectively, early on (before WindowInitialize()
* is executed). In the case of Java, you should call only SetWindowId().
*/
virtual void SetRootWindow(void *);
/**
* Returns the NSWindow* associated with this vtkRenderWindow.
*/
virtual void *GetRootWindow();
/**
* Sets the NSView* associated with this vtkRenderWindow.
* This class' default behaviour, that is, if you never call this
* SetWindowId()/SetRootWindow() is to create an NSWindow and a
* vtkCocoaGLView (NSView subclass) which are used together to draw all
* vtk stuff into. If you already have an NSWindow and NSView and you
* want this class to use them you must call both SetRootWindow()
* and SetWindowId(), respectively, early on (before WindowInitialize()
* is executed). In the case of Java, you should call only SetWindowId().
*/
void SetWindowId(void *) override;
/**
* Returns the NSView* associated with this vtkRenderWindow.
*/
virtual void *GetWindowId();
void *GetGenericWindowId() override {return this->GetWindowId();}
/**
* Set the NSView* for the vtkRenderWindow to be parented within. The
* Position and Size of the RenderWindow will set the rectangle of the
* NSView that the vtkRenderWindow will create within this parent.
* If you set the WindowId, then this ParentId will be ignored.
*/
void SetParentId(void *nsview) override;
/**
* Get the parent NSView* for this vtkRenderWindow. This method will
* return "NULL" if the parent was not set with SetParentId() or
* SetParentInfo().
*/
virtual void *GetParentId();
void *GetGenericParentId() override { return this->GetParentId(); }
/**
* Set to true if you want to force NSViews created by this object to
* have their wantsBestResolutionOpenGLSurface property set to YES.
* Otherwise, the bundle's Info.plist will be checked for the
* "NSHighResolutionCapable" key, if it is present and YES,
* wantsBestResolutionOpenGLSurface will be set to YES. In all other cases,
* setWantsBestResolutionOpenGLSurface: is not invoked at all. Notably,
* setWantsBestResolutionOpenGLSurface: is never invoked on NSViews not created
* by VTK itself.
*/
void SetWantsBestResolution(bool wantsBest);
bool GetWantsBestResolution();
//@{
/**
* Accessors for the pixel format object (Really an NSOpenGLPixelFormat*).
*/
void SetPixelFormat(void *pixelFormat);
void *GetPixelFormat();
//@}
//@{
/**
* Ability to push and pop this window's context
* as the current context. The idea being to
* if needed make this window's context current
* and when done releasing resources restore
* the prior context
*/
void PushContext() override;
void PopContext() override;
//@}
protected:
vtkCocoaRenderWindow();
~vtkCocoaRenderWindow() override;
std::stack<void *> ContextStack;
void CreateGLContext();
void CreateAWindow() override;
void DestroyWindow() override;
void DestroyOffScreenWindow();
int OffScreenInitialized;
int OnScreenInitialized;
//@{
/**
* Accessors for the cocoa manager (Really an NSMutableDictionary*).
* It manages all Cocoa objects in this C++ class.
*/
void SetCocoaManager(void *manager);
void *GetCocoaManager();
//@}
void SetCocoaServer(void *server); // Really a vtkCocoaServer*
void *GetCocoaServer();
private:
vtkCocoaRenderWindow(const vtkCocoaRenderWindow&) = delete;
void operator=(const vtkCocoaRenderWindow&) = delete;
private:
// Important: this class cannot contain Objective-C instance
// variables for 2 reasons:
// 1) C++ files include this header
// 2) because of garbage collection (the GC scanner does not scan objects create by C++'s new)
// Instead, use the CocoaManager dictionary to keep a collection
// of what would otherwise be Objective-C instance variables.
void *CocoaManager; // Really an NSMutableDictionary*
int WindowCreated;
int ViewCreated;
int CursorHidden;
int ForceMakeCurrent;
bool WantsBestResolution;
};
#endif
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Update (create if not existed) YAML 'lastmod' in posts
according to their last git log date.
Dependencies:
- git
- ruamel.yaml
© 2018-2019 Cotes Chung
Licensed under MIT
"""
import sys
import glob
import os
import subprocess
import shutil
from utils.frontmatter_getter import get_yaml
from ruamel.yaml import YAML
POSTS_PATH = "_posts"
def update_lastmod(verbose, commit):
count = 0
yaml = YAML()
for post in glob.glob(os.path.join(POSTS_PATH, "*.md")):
ps = subprocess.Popen(("git", "log", "--pretty=%ad", post),
stdout=subprocess.PIPE)
git_log_count = subprocess.check_output(('wc', '-l'), stdin=ps.stdout)
ps.wait()
if git_log_count.strip() == "1":
continue
git_lastmod = subprocess.check_output([
"git", "log", "-1", "--pretty=%ad", "--date=iso", post]).strip()
if not git_lastmod:
continue
frontmatter, line_num = get_yaml(post)
meta = yaml.load(frontmatter)
if 'seo' in meta and 'date_modified' in meta['seo']:
if meta['seo']['date_modified'] == git_lastmod:
continue
else:
meta['seo']['date_modified'] = git_lastmod
else:
meta.insert(line_num, 'seo',
dict(date_modified=git_lastmod))
output = 'new.md'
if os.path.isfile(output):
os.remove(output)
with open(output, 'w') as new, open(post, 'r') as old:
new.write("---\n")
yaml.dump(meta, new)
new.write("---\n")
line_num += 2
lines = old.readlines()
for line in lines:
if line_num > 0:
line_num -= 1
continue
else:
new.write(line)
shutil.move(output, post)
count += 1
if verbose:
print "[INFO] update 'lastmod' for:" + post
print ("[INFO] Success to update lastmod for {} post(s).").format(count)
if commit:
if count > 0:
subprocess.call(["git", "add", POSTS_PATH])
subprocess.call(["git", "commit", "-m",
"[Automation] Updated lastmod for post(s)."])
def help():
print("Usage: "
"python update_posts_lastmod.py [ -v | --verbose ]\n\n"
"Optional arguments:\n"
"-v, --verbose Print verbose logs\n")
def main():
verbose = False
commit = False
if len(sys.argv) > 1:
for arg in sys.argv:
if arg == sys.argv[0]:
continue
else:
if arg == '-v' or arg == '--verbose':
verbose = True
elif arg == '-c' or arg == '--commit':
commit = True
else:
print("Oops! Unknown argument: '{}'\n".format(arg))
help()
return
update_lastmod(verbose, commit)
main()
|
$(function () {
"use strict";
// chart 1
var options = {
series: [{
name: 'Sessions',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'line',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 2.5,
curve: 'smooth'
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
fill: {
opacity: 1
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart1"), options);
chart.render();
// chart 2
var options = {
series: [{
name: 'Total Users',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'bar',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '40%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 2.5,
curve: 'smooth'
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
fill: {
opacity: 1
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart2"), options);
chart.render();
// chart 3
var options = {
series: [{
name: 'Page Views',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'area',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 2.5,
curve: 'smooth'
},
fill: {
type: 'gradient',
gradient: {
shade: 'light',
type: "vertical",
shadeIntensity: 0.5,
gradientToColors: ["#fff"],
inverseColors: true,
opacityFrom: 0.2,
opacityTo: 0.5,
stops: [0, 50, 100],
colorStops: []
}
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart3"), options);
chart.render();
// chart 4
var options = {
series: [{
name: 'Bounce Rate',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'bar',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '40%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 2.4,
curve: 'smooth'
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
fill: {
opacity: 1
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart4"), options);
chart.render();
// chart 5
var options = {
series: [{
name: 'Avg. Session Duration',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'line',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 2.5,
curve: 'smooth'
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
fill: {
opacity: 1
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart5"), options);
chart.render();
// chart 6
var options = {
series: [{
name: 'Sales',
data: [4, 8, 6, 9, 6, 7, 4, 5, 2.5, 3]
}],
chart: {
type: 'area',
foreColor: "rgba(255, 255, 255, 0.65)",
height: 250,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#224d89',
},
sparkline: {
enabled: false
}
},
markers: {
size: 0,
colors: ["#224d89"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 3,
curve: 'smooth'
},
fill: {
type: 'gradient',
gradient: {
shade: 'light',
type: 'vertical',
shadeIntensity: 0.5,
gradientToColors: ['#fff'],
inverseColors: false,
opacityFrom: 0.8,
opacityTo: 0.5,
stops: [0, 100]
}
},
colors: ["#fff"],
grid: {
borderColor: 'rgba(255, 255, 255, 0.12)',
show: true,
},
yaxis: {
labels: {
formatter: function (value) {
return value + "K";
}
},
},
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct'],
},
tooltip: {
theme: 'dark',
y: {
formatter: function (val) {
return "" + val + "K"
}
}
}
};
var chart = new ApexCharts(document.querySelector("#chart6"), options);
chart.render();
// chart 7
var options = {
series: [{
name: 'New Visitors',
data: [66, 76, 85, 101, 65, 87, 105, 91, 86]
}, {
name: 'Old Visitors',
data: [55, 44, 55, 57, 56, 61, 58, 63, 60]
}],
chart: {
foreColor: "rgba(255, 255, 255, 0.65)",
type: 'bar',
height: 260,
stacked: false,
toolbar: {
show: false
},
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
legend: {
show: false,
position: 'top',
horizontalAlign: 'left',
offsetX: -20
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 3,
colors: ['transparent']
},
fill: {
type: "gradient",
gradient: {
shade: "light",
type: "horizontal",
shadeIntensity: .5,
gradientToColors: ["#fff", "rgba(255, 255, 255, 0.50)"],
inverseColors: !1,
opacityFrom: 1,
opacityTo: 1,
stops: [0, 100]
}
},
colors: ["#fff", "rgba(255, 255, 255, 0.50)"],
yaxis: {
labels: {
formatter: function (value) {
return value + "K";
}
},
},
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep'],
},
grid: {
borderColor: 'rgba(255, 255, 255, 0.12)',
show: true,
},
tooltip: {
theme: 'dark',
y: {
formatter: function (val) {
return "" + val + "K"
}
}
}
};
var chart = new ApexCharts(document.querySelector("#chart7"), options);
chart.render();
// chart 8
var options = {
series: [{
name: 'Sessions',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'bar',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#224d89',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#224d89"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 3,
// curve: 'smooth'
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
fill: {
opacity: 1
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart8"), options);
chart.render();
// chart 9
var options = {
series: [{
name: 'Sessions',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'area',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 3,
// curve: 'smooth'
},
fill: {
type: "gradient",
gradient: {
shade: "light",
type: "horizontal",
shadeIntensity: .5,
gradientToColors: ["#fff"],
inverseColors: !1,
opacityFrom:0.5,
opacityTo: 0.2,
stops: [0, 100]
}
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart9"), options);
chart.render();
// chart 10
var options = {
series: [{
name: 'Sessions',
data: [414, 555, 257, 901, 613, 727, 414, 555, 257]
}],
chart: {
type: 'area',
height: 60,
toolbar: {
show: false
},
zoom: {
enabled: false
},
dropShadow: {
enabled: false,
top: 3,
left: 14,
blur: 4,
opacity: 0.12,
color: '#fff',
},
sparkline: {
enabled: true
}
},
markers: {
size: 0,
colors: ["#fff"],
strokeColors: "#fff",
strokeWidth: 2,
hover: {
size: 7,
}
},
plotOptions: {
bar: {
horizontal: false,
columnWidth: '45%',
endingShape: 'rounded'
},
},
dataLabels: {
enabled: false
},
stroke: {
show: true,
width: 3,
// curve: 'smooth'
},
fill: {
type: "gradient",
gradient: {
shade: "light",
type: "horizontal",
shadeIntensity: .5,
gradientToColors: ["#fff"],
inverseColors: !1,
opacityFrom:0.5,
opacityTo: 0.2,
stops: [0, 100]
}
},
colors: ["#fff"],
xaxis: {
categories: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
},
tooltip: {
theme: 'dark',
fixed: {
enabled: false
},
x: {
show: false
},
y: {
title: {
formatter: function (seriesName) {
return ''
}
}
},
marker: {
show: false
}
}
};
var chart = new ApexCharts(document.querySelector("#chart10"), options);
chart.render();
// chart 11
var options = {
chart: {
height: 330,
type: 'radialBar',
toolbar: {
show: false
}
},
plotOptions: {
radialBar: {
startAngle: -130,
endAngle: 130,
hollow: {
margin: 0,
size: '78%',
//background: '#fff',
image: undefined,
imageOffsetX: 0,
imageOffsetY: 0,
position: 'front',
dropShadow: {
enabled: false,
top: 3,
left: 0,
blur: 4,
color: 'rgba(0, 169, 255, 0.25)',
opacity: 0.65
}
},
track: {
background: 'rgba(255, 255, 255, 0.12)',
//strokeWidth: '67%',
margin: 0, // margin is in pixels
dropShadow: {
enabled: false,
top: -3,
left: 0,
blur: 4,
color: 'rgba(0, 169, 255, 0.12)',
opacity: 0.65
}
},
dataLabels: {
showOn: 'always',
name: {
offsetY: -25,
show: true,
color: '#fff',
fontSize: '16px'
},
value: {
formatter: function (val) {
return val + "%";
},
color: '#fff',
fontSize: '45px',
show: true,
offsetY: 10,
}
}
}
},
fill: {
type: 'gradient',
gradient: {
shade: 'dark',
type: 'horizontal',
shadeIntensity: 0.5,
gradientToColors: ['#fff'],
inverseColors: false,
opacityFrom: 1,
opacityTo: 1,
stops: [0, 100]
}
},
colors: ["#fff"],
series: [84],
stroke: {
lineCap: 'round',
//dashArray: 4
},
labels: ['Dynamics Today'],
}
var chart = new ApexCharts(document.querySelector("#chart11"), options);
chart.render();
// chart 12
Highcharts.chart('chart12', {
chart: {
width: '190',
height: '190',
plotBackgroundColor: null,
plotBorderWidth: null,
plotShadow: false,
type: 'pie',
styledMode: true
},
credits: {
enabled: false
},
exporting: {
buttons: {
contextButton: {
enabled: false,
}
}
},
title: {
text: ''
},
tooltip: {
pointFormat: '{series.name}: <b>{point.percentage:.1f}%</b>'
},
accessibility: {
point: {
valueSuffix: '%'
}
},
plotOptions: {
pie: {
allowPointSelect: true,
cursor: 'pointer',
dataLabels: {
enabled: false
},
showInLegend: false
}
},
series: [{
name: 'Users',
colorByPoint: true,
data: [{
name: 'Male',
y: 61.41,
sliced: true,
selected: true
}, {
name: 'Female',
y: 11.84
}]
}]
});
// chart 13
// Create the chart
Highcharts.chart('chart13', {
chart: {
height: 360,
type: 'column',
styledMode: true
},
credits: {
enabled: false
},
title: {
text: 'Traffic Sources Status. January, 2021'
},
accessibility: {
announceNewData: {
enabled: true
}
},
xAxis: {
type: 'category'
},
yAxis: {
title: {
text: 'Traffic Sources Status'
}
},
legend: {
enabled: false
},
plotOptions: {
series: {
borderWidth: 0,
dataLabels: {
enabled: true,
format: '{point.y:.1f}%'
}
}
},
tooltip: {
headerFormat: '<span style="font-size:11px">{series.name}</span><br>',
pointFormat: '<span style="color:{point.color}">{point.name}</span>: <b>{point.y:.2f}%</b> of total<br/>'
},
series: [{
name: "Traffic Sources",
colorByPoint: true,
data: [{
name: "Organic Search",
y: 62.74,
drilldown: "Organic Search"
}, {
name: "Direct",
y: 40.57,
drilldown: "Direct"
}, {
name: "Referral",
y: 25.23,
drilldown: "Referral"
}, {
name: "Others",
y: 10.58,
drilldown: "Others"
}]
}],
});
// chart 14
// Create the chart
Highcharts.chart('chart14', {
chart: {
height: 360,
type: 'column',
styledMode: true
},
credits: {
enabled: false
},
title: {
text: 'Visitor Age Group Status'
},
accessibility: {
announceNewData: {
enabled: true
}
},
xAxis: {
type: 'category'
},
yAxis: {
title: {
text: 'Age Group Status'
}
},
legend: {
enabled: false
},
plotOptions: {
series: {
borderWidth: 0,
dataLabels: {
enabled: true,
format: '{point.y:.1f}K'
}
}
},
tooltip: {
headerFormat: '<span style="font-size:11px">{series.name}</span><br>',
pointFormat: '<span style="color:{point.color}">{point.name}</span>: <b>{point.y:.2f}%</b> of total<br/>'
},
series: [{
name: "Age Group",
colorByPoint: true,
data: [{
name: "18-24",
y: 35.74,
//drilldown: "Organic Search"
}, {
name: "25-34",
y: 65.57,
//drilldown: "Direct"
}, {
name: "35-44",
y: 30.23,
//drilldown: "Referral"
}, {
name: "45-54",
y: 20.58,
//drilldown: "Others"
}, {
name: "55-64",
y: 15.58,
//drilldown: "Others"
}, {
name: "65-80",
y: 8.58,
//drilldown: "Others"
}]
}],
});
// world map
jQuery('#geographic-map-2').vectorMap({
map: 'world_mill_en',
backgroundColor: 'transparent',
borderColor: '#818181',
borderOpacity: 0.25,
borderWidth: 1,
zoomOnScroll: false,
color: '#009efb',
regionStyle: {
initial: {
fill: 'rgba(255, 255, 255, 1)'
}
},
markerStyle: {
initial: {
r: 9,
'fill': '#fff',
'fill-opacity': 1,
'stroke': '#000',
'stroke-width': 5,
'stroke-opacity': 0.4
},
},
enableZoom: true,
hoverColor: '#fff',
markers: [{
latLng: [21.00, 78.00],
name: 'I Love My India'
}],
series: {
regions: [{
values: {
IN: 'rgba(255, 255, 255, 0.50)',
US: 'rgba(255, 255, 255, 0.50)',
CN: 'rgba(255, 255, 255, 0.50)',
CA: 'rgba(255, 255, 255, 0.50)',
AU: 'rgba(255, 255, 255, 0.50)'
}
}]
},
hoverOpacity: null,
normalizeFunction: 'linear',
scaleColors: ['#b6d6ff', '#005ace'],
selectedColor: '#c9dfaf',
selectedRegions: [],
showTooltip: true,
onRegionClick: function (element, code, region) {
var message = 'You clicked "' + region + '" which has the code: ' + code.toUpperCase();
alert(message);
}
});
});
|
const gps = require('.');
describe("gps fn test", () => {
it("passes testing fn1", () => {
var x = [0.0, 0.23, 0.46, 0.69, 0.92, 1.15, 1.38, 1.61];
var s = 20;
var u = 41;
expect( Math.floor(gps(s ,x)) ).toBe(u);
});
it("passes testing fn2", () => {
var x = [0.0, 0.11, 0.22, 0.33, 0.44, 0.65, 1.08, 1.26, 1.68, 1.89, 2.1, 2.31, 2.52, 3.25];
var s = 12;
var u = 219;
expect( Math.floor(gps(s ,x)) ).toBe(u);
});
it("passes testing fn3", () => {
var x = [0.0, 0.18, 0.36, 0.54, 0.72, 1.05, 1.26, 1.47, 1.92, 2.16, 2.4, 2.64, 2.88, 3.12, 3.36, 3.6, 3.84];
var s = 20;
var u = 80;
expect( Math.floor(gps(s ,x)) ).toBe(u);
});
it("passes testing fn4", () => {
var x = [0.0, 0.01, 0.36, 0.6, 0.84, 1.05, 1.26, 1.47, 1.68, 1.89, 2.1, 2.31, 2.52, 2.73, 2.94, 3.15];
var s = 14;
var u = 90;
expect( Math.floor(gps(s ,x)) ).toBe(u);
});
it("passes testing fn5", () => {
var x = [0.0, 0.02, 0.36, 0.54, 0.72, 0.9, 1.08, 1.26, 1.44, 1.62, 1.8];
var s = 17;
var u = 72;
expect( Math.floor(gps(s ,x)) ).toBe(u);
});
});
|
from django.conf.urls import url
from django.contrib.auth.views import login,logout
from appPortas.views import *
urlpatterns = [
url(r'^porta/list$', porta_list, name='porta_list'),
url(r'^porta/detail/(?P<pk>\d+)$',porta_detail, name='porta_detail'),
url(r'^porta/new/$', porta_new, name='porta_new'),
url(r'^porta/update/(?P<pk>\d+)$',porta_update, name='porta_update'),
url(r'^porta/delete/(?P<pk>\d+)$',porta_delete, name='porta_delete'),
url(r'^porta/usuarios/(?P<pk>\d+)$', porta_delete, name='porta_delete'),
url(r'^grupo/list$', grupo_list, name='grupo_list'),
url(r'^grupo/detail/(?P<pk>\d+)$',grupo_detail, name='grupo_detail'),
url(r'^grupo/new/$', grupo_new, name='grupo_new'),
url(r'^grupo/update/(?P<pk>\d+)$',grupo_update, name='grupo_update'),
url(r'^grupo/delete/(?P<pk>\d+)$',grupo_delete, name='grupo_delete'),
url(r'^edit/grupo/$', edit_grupo, name='edit_grupo'),
url(r'^usuario/acesso/grupo/(?P<pk>\d+)$', usuario_acesso_grupo, name='usuario_acesso_grupo'),
url(r'^usuario/sem_acesso/grupo/(?P<pk>\d+)$', usuario_sem_acesso_grupo, name='usuario_sem_acesso_grupo'),
url(r'^porta/no_grupo/(?P<pk>\d+)$', porta_no_grupo, name='porta_no_grupo'),
url(r'^porta/nao_grupo/(?P<pk>\d+)$', porta_nao_grupo, name='porta_nao_grupo'),
url(r'^portas/$', portas, name='portas'),
url(r'^porta/busca/(?P<pk>\d+)$', busca_porta, name='busca_porta'),
url(r'^busca/porta_frequencia/$', busca_porta_frequencia, name='busca_frequencia_porta'),
url(r'^frequencia_porta_acesso/$', frequencia_porta_acesso, name='frequencia_porta_acesso'),
url(r'^porta/frequencia_acesso/(?P<pk>\d+)$', porta_frequencias, name='porta_frequencias'),
]
|
class Solution:
def XXX(self, root: TreeNode) -> List[List[int]]:
ans = []
if not root:
return ans
stack = [(root,0)]
while stack:
node,i = stack.pop()
if len(ans) == i:
ans.append([])
ans[i].append(node.val)
#需要先pop左边, 因此先添加右边,再添加左边
if node.right:
stack.append((node.right,i+1))
if node.left:
stack.append((node.left,i+1))
i+=1
return ans
undefined
for (i = 0; i < document.getElementsByTagName("code").length; i++) { console.log(document.getElementsByTagName("code")[i].innerText); }
|
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/awstransfer/Transfer_EXPORTS.h>
#include <aws/awstransfer/TransferRequest.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/awstransfer/model/HomeDirectoryType.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <aws/awstransfer/model/HomeDirectoryMapEntry.h>
#include <aws/awstransfer/model/Tag.h>
#include <utility>
namespace Aws
{
namespace Transfer
{
namespace Model
{
/**
*/
class AWS_TRANSFER_API CreateUserRequest : public TransferRequest
{
public:
CreateUserRequest();
// Service request name is the Operation name which will send this request out,
// each operation should has unique request name, so that we can get operation's name from this request.
// Note: this is not true for response, multiple operations may have the same response name,
// so we can not get operation's name from response.
inline virtual const char* GetServiceRequestName() const override { return "CreateUser"; }
Aws::String SerializePayload() const override;
Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override;
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline const Aws::String& GetHomeDirectory() const{ return m_homeDirectory; }
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline bool HomeDirectoryHasBeenSet() const { return m_homeDirectoryHasBeenSet; }
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline void SetHomeDirectory(const Aws::String& value) { m_homeDirectoryHasBeenSet = true; m_homeDirectory = value; }
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline void SetHomeDirectory(Aws::String&& value) { m_homeDirectoryHasBeenSet = true; m_homeDirectory = std::move(value); }
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline void SetHomeDirectory(const char* value) { m_homeDirectoryHasBeenSet = true; m_homeDirectory.assign(value); }
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline CreateUserRequest& WithHomeDirectory(const Aws::String& value) { SetHomeDirectory(value); return *this;}
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline CreateUserRequest& WithHomeDirectory(Aws::String&& value) { SetHomeDirectory(std::move(value)); return *this;}
/**
* <p>The landing directory (folder) for a user when they log in to the server
* using the client.</p> <p>An example is <i>
* <code>your-Amazon-S3-bucket-name>/home/username</code> </i>.</p>
*/
inline CreateUserRequest& WithHomeDirectory(const char* value) { SetHomeDirectory(value); return *this;}
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline const HomeDirectoryType& GetHomeDirectoryType() const{ return m_homeDirectoryType; }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline bool HomeDirectoryTypeHasBeenSet() const { return m_homeDirectoryTypeHasBeenSet; }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline void SetHomeDirectoryType(const HomeDirectoryType& value) { m_homeDirectoryTypeHasBeenSet = true; m_homeDirectoryType = value; }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline void SetHomeDirectoryType(HomeDirectoryType&& value) { m_homeDirectoryTypeHasBeenSet = true; m_homeDirectoryType = std::move(value); }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline CreateUserRequest& WithHomeDirectoryType(const HomeDirectoryType& value) { SetHomeDirectoryType(value); return *this;}
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline CreateUserRequest& WithHomeDirectoryType(HomeDirectoryType&& value) { SetHomeDirectoryType(std::move(value)); return *this;}
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline const Aws::Vector<HomeDirectoryMapEntry>& GetHomeDirectoryMappings() const{ return m_homeDirectoryMappings; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline bool HomeDirectoryMappingsHasBeenSet() const { return m_homeDirectoryMappingsHasBeenSet; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline void SetHomeDirectoryMappings(const Aws::Vector<HomeDirectoryMapEntry>& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings = value; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline void SetHomeDirectoryMappings(Aws::Vector<HomeDirectoryMapEntry>&& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings = std::move(value); }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline CreateUserRequest& WithHomeDirectoryMappings(const Aws::Vector<HomeDirectoryMapEntry>& value) { SetHomeDirectoryMappings(value); return *this;}
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline CreateUserRequest& WithHomeDirectoryMappings(Aws::Vector<HomeDirectoryMapEntry>&& value) { SetHomeDirectoryMappings(std::move(value)); return *this;}
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline CreateUserRequest& AddHomeDirectoryMappings(const HomeDirectoryMapEntry& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings.push_back(value); return *this; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a '/' for it to be considered a
* folder.</p>
*/
inline CreateUserRequest& AddHomeDirectoryMappings(HomeDirectoryMapEntry&& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings.push_back(std::move(value)); return *this; }
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline const Aws::String& GetPolicy() const{ return m_policy; }
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline bool PolicyHasBeenSet() const { return m_policyHasBeenSet; }
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline void SetPolicy(const Aws::String& value) { m_policyHasBeenSet = true; m_policy = value; }
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline void SetPolicy(Aws::String&& value) { m_policyHasBeenSet = true; m_policy = std::move(value); }
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline void SetPolicy(const char* value) { m_policyHasBeenSet = true; m_policy.assign(value); }
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline CreateUserRequest& WithPolicy(const Aws::String& value) { SetPolicy(value); return *this;}
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline CreateUserRequest& WithPolicy(Aws::String&& value) { SetPolicy(std::move(value)); return *this;}
/**
* <p>A scope-down policy for your user so you can use the same IAM role across
* multiple users. This policy scopes down user access to portions of their Amazon
* S3 bucket. Variables that you can use inside this policy include
* <code>${Transfer:UserName}</code>, <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline CreateUserRequest& WithPolicy(const char* value) { SetPolicy(value); return *this;}
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline const Aws::String& GetRole() const{ return m_role; }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline bool RoleHasBeenSet() const { return m_roleHasBeenSet; }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline void SetRole(const Aws::String& value) { m_roleHasBeenSet = true; m_role = value; }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline void SetRole(Aws::String&& value) { m_roleHasBeenSet = true; m_role = std::move(value); }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline void SetRole(const char* value) { m_roleHasBeenSet = true; m_role.assign(value); }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline CreateUserRequest& WithRole(const Aws::String& value) { SetRole(value); return *this;}
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline CreateUserRequest& WithRole(Aws::String&& value) { SetRole(std::move(value)); return *this;}
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline CreateUserRequest& WithRole(const char* value) { SetRole(value); return *this;}
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline const Aws::String& GetServerId() const{ return m_serverId; }
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline bool ServerIdHasBeenSet() const { return m_serverIdHasBeenSet; }
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline void SetServerId(const Aws::String& value) { m_serverIdHasBeenSet = true; m_serverId = value; }
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline void SetServerId(Aws::String&& value) { m_serverIdHasBeenSet = true; m_serverId = std::move(value); }
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline void SetServerId(const char* value) { m_serverIdHasBeenSet = true; m_serverId.assign(value); }
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline CreateUserRequest& WithServerId(const Aws::String& value) { SetServerId(value); return *this;}
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline CreateUserRequest& WithServerId(Aws::String&& value) { SetServerId(std::move(value)); return *this;}
/**
* <p>A system-assigned unique identifier for a server instance. This is the
* specific server that you added your user to.</p>
*/
inline CreateUserRequest& WithServerId(const char* value) { SetServerId(value); return *this;}
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline const Aws::String& GetSshPublicKeyBody() const{ return m_sshPublicKeyBody; }
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline bool SshPublicKeyBodyHasBeenSet() const { return m_sshPublicKeyBodyHasBeenSet; }
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline void SetSshPublicKeyBody(const Aws::String& value) { m_sshPublicKeyBodyHasBeenSet = true; m_sshPublicKeyBody = value; }
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline void SetSshPublicKeyBody(Aws::String&& value) { m_sshPublicKeyBodyHasBeenSet = true; m_sshPublicKeyBody = std::move(value); }
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline void SetSshPublicKeyBody(const char* value) { m_sshPublicKeyBodyHasBeenSet = true; m_sshPublicKeyBody.assign(value); }
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline CreateUserRequest& WithSshPublicKeyBody(const Aws::String& value) { SetSshPublicKeyBody(value); return *this;}
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline CreateUserRequest& WithSshPublicKeyBody(Aws::String&& value) { SetSshPublicKeyBody(std::move(value)); return *this;}
/**
* <p>The public portion of the Secure Shell (SSH) key used to authenticate the
* user to the server.</p>
*/
inline CreateUserRequest& WithSshPublicKeyBody(const char* value) { SetSshPublicKeyBody(value); return *this;}
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline const Aws::Vector<Tag>& GetTags() const{ return m_tags; }
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline bool TagsHasBeenSet() const { return m_tagsHasBeenSet; }
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline void SetTags(const Aws::Vector<Tag>& value) { m_tagsHasBeenSet = true; m_tags = value; }
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline void SetTags(Aws::Vector<Tag>&& value) { m_tagsHasBeenSet = true; m_tags = std::move(value); }
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline CreateUserRequest& WithTags(const Aws::Vector<Tag>& value) { SetTags(value); return *this;}
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline CreateUserRequest& WithTags(Aws::Vector<Tag>&& value) { SetTags(std::move(value)); return *this;}
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline CreateUserRequest& AddTags(const Tag& value) { m_tagsHasBeenSet = true; m_tags.push_back(value); return *this; }
/**
* <p>Key-value pairs that can be used to group and search for users. Tags are
* metadata attached to users for any purpose.</p>
*/
inline CreateUserRequest& AddTags(Tag&& value) { m_tagsHasBeenSet = true; m_tags.push_back(std::move(value)); return *this; }
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline const Aws::String& GetUserName() const{ return m_userName; }
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline bool UserNameHasBeenSet() const { return m_userNameHasBeenSet; }
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline void SetUserName(const Aws::String& value) { m_userNameHasBeenSet = true; m_userName = value; }
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline void SetUserName(Aws::String&& value) { m_userNameHasBeenSet = true; m_userName = std::move(value); }
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline void SetUserName(const char* value) { m_userNameHasBeenSet = true; m_userName.assign(value); }
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline CreateUserRequest& WithUserName(const Aws::String& value) { SetUserName(value); return *this;}
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline CreateUserRequest& WithUserName(Aws::String&& value) { SetUserName(std::move(value)); return *this;}
/**
* <p>A unique string that identifies a user and is associated with a as specified
* by the <code>ServerId</code>. This user name must be a minimum of 3 and a
* maximum of 100 characters long. The following are valid characters: a-z, A-Z,
* 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline CreateUserRequest& WithUserName(const char* value) { SetUserName(value); return *this;}
private:
Aws::String m_homeDirectory;
bool m_homeDirectoryHasBeenSet;
HomeDirectoryType m_homeDirectoryType;
bool m_homeDirectoryTypeHasBeenSet;
Aws::Vector<HomeDirectoryMapEntry> m_homeDirectoryMappings;
bool m_homeDirectoryMappingsHasBeenSet;
Aws::String m_policy;
bool m_policyHasBeenSet;
Aws::String m_role;
bool m_roleHasBeenSet;
Aws::String m_serverId;
bool m_serverIdHasBeenSet;
Aws::String m_sshPublicKeyBody;
bool m_sshPublicKeyBodyHasBeenSet;
Aws::Vector<Tag> m_tags;
bool m_tagsHasBeenSet;
Aws::String m_userName;
bool m_userNameHasBeenSet;
};
} // namespace Model
} // namespace Transfer
} // namespace Aws
|
# Generated by Django 4.0 on 2021-12-13 15:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
('main', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='auth.user'),
),
]
|
# Copyright 2012 Jeff Trawick, http://emptyhammock.com/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import re
import sys
from stacktraces.native import collect
import stacktraces.process_model
class Gdb:
def __init__(self, **kwargs):
self.corefile = kwargs.get('corefile')
self.exe = kwargs.get('exe')
self.hdr = None
self.gdbout = kwargs.get('debuglog')
self.proc = kwargs.get('proc')
if not self.proc:
self.proc = stacktraces.process_model.Process()
self.pid = self.proc.get_pid()
def parse(self):
if not self.gdbout:
self.get_output()
thr = None
fr = None
pending = None
if collect.is_hdr(self.gdbout[0]):
self.hdr = self.gdbout[0]
self.gdbout = self.gdbout[1:]
if not self.pid:
self.pid = collect.get_pid(self.hdr)
if not self.exe:
self.exe = collect.get_exe(self.hdr)
for line in self.gdbout:
if line:
line = line.rstrip('\r\n')
if not line:
continue
if '---Type <return' in line:
continue
if pending:
line = pending + line
pending = None
if line[0] == '#' and (line[-1:] == ',' or line[-2:] == ', ' or
line[-17:] == 'is not available.' or line[-2:] == ' ('):
if line[-2:] == ' (':
pending = line
else:
pending = line[:-1]
continue
if 'Attaching to program:' in line:
m = re.search('Attaching to program: .([^\']+)\', process (\d+)', line)
if m:
if not self.exe:
self.exe = m.group(1)
if not self.pid:
self.pid = m.group(2)
continue
if line[:7] == 'Thread ':
m = re.search('Thread (\d+) ', line)
gdbtid = m.group(1)
thr = self.proc.find_thread(gdbtid)
if not thr:
thr = stacktraces.process_model.Thread(gdbtid)
self.proc.add_thread(thr)
fr = None
elif thr and line[:1] == '#':
m = re.search('signal handler called', line)
if m:
# XXX Mark thread as crashed.
continue
m = re.search('#(\d+) +((0x[\da-f]+) in )?([^ ]+) (\([^)]*\))', line)
if m:
frameno = m.group(1)
addr = m.group(3)
fn = m.group(4)
fnargs = m.group(5)
# filter out frames with address 0 (seen on both Linux and FreeBSD)
if addr and int(addr, 16) == 0:
continue
fr = stacktraces.process_model.Frame(frameno, fn, fnargs)
thr.add_frame(fr)
continue
# try again; make sure to handle
# #5 0xdeadbeef in Foo::Parse(SynTree&, int&) () from /path/to/lib
m = re.search('#(\d+) +((0x[\da-f]+) in )?(.*)$', line)
if m:
frameno = m.group(1)
addr = m.group(3)
rest = m.group(4)
# filter out frames with address 0 (seen on both Linux and FreeBSD)
if addr and int(addr, 16) == 0:
continue
m = re.match('(.*) (\([^)]*\)) (from .*)?', rest)
if m:
fn = m.group(1)
fnargs = m.group(2)
fr = stacktraces.process_model.Frame(frameno, fn, fnargs)
thr.add_frame(fr)
continue
print('could not parse >%s<' % line, file=sys.stderr)
sys.exit(1)
elif fr:
m = re.search('^[ \t]+([^ ]+) = (.*)$', line)
if m:
fr.add_var(m.group(1), m.group(2))
if self.pid and not self.proc.pid:
self.proc.pid = self.pid
if self.exe and not self.proc.exe:
self.proc.exe = self.exe
def get_output(self):
self.gdbout = collect.gdb_collect(None, self.pid, self.corefile, self.exe)
|
require('./bootstrap');
window.Vue = require('vue');
// Vue.component('example-component', require('./components/ExampleComponent.vue').default);
Vue.component('sign-component', require('./components/SignComponent.vue').default);
Vue.component('ce-component', require('./components/CEComponent.vue').default);
Vue.component('c1m-component', require('./components/C1MComponent.vue').default);
const app = new Vue({
el: '#app',
});
|
import Component from "@ember/component";
import { computed } from '@ember/object';
export default Component.extend({
threads: computed(function(){ return []; })
});
|
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Sachin Mane and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class BackgroundJobConfig(Document):
@staticmethod
def get_invalidate_key():
return f'invalidate_background_job_config'
def invalidate(self):
from latte.utils.caching import invalidate
invalidate(self.get_invalidate_key())
on_update = invalidate
on_trash = invalidate
|
/* Copyright (C) 2011-2014 Povilas Kanapickas <povilas@radix.lt>
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef LIBSIMDPP_SIMDPP_DETAIL_INSN_ZIP_HI_H
#define LIBSIMDPP_SIMDPP_DETAIL_INSN_ZIP_HI_H
#ifndef LIBSIMDPP_SIMD_H
#error "This file must be included through simd.h"
#endif
#include <simdpp/types.h>
#include <simdpp/detail/neon/shuffle.h>
#include <simdpp/detail/null/shuffle.h>
namespace simdpp {
namespace SIMDPP_ARCH_NAMESPACE {
namespace detail {
namespace insn {
static SIMDPP_INL
uint8x16 i_zip16_hi(const uint8x16& a, const uint8x16& b)
{
#if SIMDPP_USE_NULL
return detail::null::zip16_hi(a, b);
#elif SIMDPP_USE_SSE2
return _mm_unpackhi_epi8(a.native(), b.native());
#elif SIMDPP_USE_NEON
// the compiler will optimize multiple vzip instructions if both zip_hi
// and zip_hi are used on the same arguments
return vzipq_u8(a.native(), b.native()).val[1];
#elif SIMDPP_USE_ALTIVEC
return vec_mergel(a.native(), b.native());
#elif SIMDPP_USE_MSA
return (v16u8) __msa_ilvl_b((v16i8) b.native(), (v16i8) a.native());
#endif
}
#if SIMDPP_USE_AVX2
static SIMDPP_INL
uint8x32 i_zip16_hi(const uint8x32& a, const uint8x32& b)
{
return _mm256_unpackhi_epi8(a.native(), b.native());
}
#endif
#if SIMDPP_USE_AVX512BW
SIMDPP_INL uint8<64> i_zip16_hi(const uint8<64>& a, const uint8<64>& b)
{
return _mm512_unpackhi_epi8(a.native(), b.native());
}
#endif
template<unsigned N> SIMDPP_INL
uint8<N> i_zip16_hi(const uint8<N>& a, const uint8<N>& b)
{
SIMDPP_VEC_ARRAY_IMPL2(uint8<N>, i_zip16_hi, a, b)
}
// -----------------------------------------------------------------------------
static SIMDPP_INL
uint16x8 i_zip8_hi(const uint16x8& a, const uint16x8& b)
{
#if SIMDPP_USE_NULL
return detail::null::zip8_hi(a, b);
#elif SIMDPP_USE_SSE2
return _mm_unpackhi_epi16(a.native(), b.native());
#elif SIMDPP_USE_NEON
return vzipq_u16(a.native(), b.native()).val[1];
#elif SIMDPP_USE_ALTIVEC
return vec_mergel(a.native(), b.native());
#elif SIMDPP_USE_MSA
return (v8u16) __msa_ilvl_h((v8i16) b.native(), (v8i16) a.native());
#endif
}
#if SIMDPP_USE_AVX2
static SIMDPP_INL
uint16x16 i_zip8_hi(const uint16x16& a, const uint16x16& b)
{
return _mm256_unpackhi_epi16(a.native(), b.native());
}
#endif
#if SIMDPP_USE_AVX512BW
SIMDPP_INL uint16<32> i_zip8_hi(const uint16<32>& a, const uint16<32>& b)
{
return _mm512_unpackhi_epi16(a.native(), b.native());
}
#endif
template<unsigned N> SIMDPP_INL
uint16<N> i_zip8_hi(const uint16<N>& a, const uint16<N>& b)
{
SIMDPP_VEC_ARRAY_IMPL2(uint16<N>, i_zip8_hi, a, b)
}
// -----------------------------------------------------------------------------
static SIMDPP_INL
uint32x4 i_zip4_hi(const uint32x4& a, const uint32x4& b)
{
#if SIMDPP_USE_NULL
return detail::null::zip4_hi(a, b);
#elif SIMDPP_USE_SSE2
return _mm_unpackhi_epi32(a.native(), b.native());
#elif SIMDPP_USE_NEON
return vzipq_u32(a.native(), b.native()).val[1];
#elif SIMDPP_USE_ALTIVEC
return vec_mergel(a.native(), b.native());
#elif SIMDPP_USE_MSA
return (v4u32) __msa_ilvl_w((v4i32) b.native(), (v4i32) a.native());
#endif
}
#if SIMDPP_USE_AVX2
static SIMDPP_INL
uint32x8 i_zip4_hi(const uint32x8& a, const uint32x8& b)
{
return _mm256_unpackhi_epi32(a.native(), b.native());
}
#endif
#if SIMDPP_USE_AVX512F
static SIMDPP_INL
uint32<16> i_zip4_hi(const uint32<16>& a, const uint32<16>& b)
{
return _mm512_unpackhi_epi32(a.native(), b.native());
}
#endif
template<unsigned N> SIMDPP_INL
uint32<N> i_zip4_hi(const uint32<N>& a, const uint32<N>& b)
{
SIMDPP_VEC_ARRAY_IMPL2(uint32<N>, i_zip4_hi, a, b)
}
// -----------------------------------------------------------------------------
static SIMDPP_INL
uint64x2 i_zip2_hi(const uint64x2& a, const uint64x2& b)
{
#if SIMDPP_USE_SSE2
return _mm_unpackhi_epi64(a.native(), b.native());
#elif SIMDPP_USE_NEON
return neon::zip2_hi(a, b);
#elif SIMDPP_USE_VSX_207
return vec_mergel(a.native(), b.native());
#elif SIMDPP_USE_NULL || SIMDPP_USE_ALTIVEC
return detail::null::zip2_hi(a, b);
#elif SIMDPP_USE_MSA
return (v2u64) __msa_ilvl_d((v2i64) b.native(), (v2i64) a.native());
#endif
}
#if SIMDPP_USE_AVX2
static SIMDPP_INL
uint64x4 i_zip2_hi(const uint64x4& a, const uint64x4& b)
{
return _mm256_unpackhi_epi64(a.native(), b.native());
}
#endif
#if SIMDPP_USE_AVX512F
static SIMDPP_INL
uint64<8> i_zip2_hi(const uint64<8>& a, const uint64<8>& b)
{
return _mm512_unpackhi_epi64(a.native(), b.native());
}
#endif
template<unsigned N> SIMDPP_INL
uint64<N> i_zip2_hi(const uint64<N>& a, const uint64<N>& b)
{
SIMDPP_VEC_ARRAY_IMPL2(uint64<N>, i_zip2_hi, a, b)
}
// -----------------------------------------------------------------------------
static SIMDPP_INL
float32x4 i_zip4_hi(const float32x4& a, const float32x4& b)
{
#if SIMDPP_USE_NULL || SIMDPP_USE_NEON_NO_FLT_SP
return detail::null::zip4_hi(a, b);
#elif SIMDPP_USE_SSE2
return _mm_unpackhi_ps(a.native(), b.native());
#elif SIMDPP_USE_NEON
return vzipq_f32(a.native(), b.native()).val[1];
#elif SIMDPP_USE_ALTIVEC
return vec_mergel(a.native(), b.native());
#elif SIMDPP_USE_MSA
return (v4f32) __msa_ilvl_w((v4i32) b.native(), (v4i32) a.native());
#endif
}
#if SIMDPP_USE_AVX
static SIMDPP_INL
float32x8 i_zip4_hi(const float32x8& a, const float32x8& b)
{
return _mm256_unpackhi_ps(a.native(), b.native());
}
#endif
#if SIMDPP_USE_AVX512F
static SIMDPP_INL
float32<16> i_zip4_hi(const float32<16>& a, const float32<16>& b)
{
return _mm512_unpackhi_ps(a.native(), b.native());
}
#endif
template<unsigned N> SIMDPP_INL
float32<N> i_zip4_hi(const float32<N>& a, const float32<N>& b)
{
SIMDPP_VEC_ARRAY_IMPL2(float32<N>, i_zip4_hi, a, b)
}
// -----------------------------------------------------------------------------
static SIMDPP_INL
float64x2 i_zip2_hi(const float64x2& a, const float64x2& b)
{
#if SIMDPP_USE_SSE2
return _mm_castps_pd(_mm_movehl_ps(_mm_castpd_ps(b.native()),
_mm_castpd_ps(a.native())));
#elif SIMDPP_USE_NEON64
return vtrn2q_f64(a.native(), b.native());
#elif SIMDPP_USE_VSX_206
return (__vector double) vec_mergel((__vector uint64_t)a.native(),
(__vector uint64_t)b.native());
#elif SIMDPP_USE_NULL || SIMDPP_USE_ALTIVEC || SIMDPP_USE_NEON
return detail::null::zip2_hi(a, b);
#elif SIMDPP_USE_MSA
return (v2f64) __msa_ilvl_d((v2i64) b.native(), (v2i64) a.native());
#endif
}
#if SIMDPP_USE_AVX
static SIMDPP_INL
float64x4 i_zip2_hi(const float64x4& a, const float64x4& b)
{
return _mm256_unpackhi_pd(a.native(), b.native());
}
#endif
#if SIMDPP_USE_AVX512F
static SIMDPP_INL
float64<8> i_zip2_hi(const float64<8>& a, const float64<8>& b)
{
return _mm512_unpackhi_pd(a.native(), b.native());
}
#endif
template<unsigned N> SIMDPP_INL
float64<N> i_zip2_hi(const float64<N>& a, const float64<N>& b)
{
SIMDPP_VEC_ARRAY_IMPL2(float64<N>, i_zip2_hi, a, b)
}
} // namespace insn
} // namespace detail
} // namespace SIMDPP_ARCH_NAMESPACE
} // namespace simdpp
#endif
|
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', '{{ site.google_analytics }}', 'auto');
ga('set', 'anonymizeIp', true);
ga('send', 'pageview', {
'page': '{{ page.url }}',
'title': '{{ page.title | replace: "'", "\\'" }}'
});
|
from d3m import index
from d3m.metadata.base import ArgumentType
from d3m.metadata.pipeline import Pipeline, PrimitiveStep
# Creating pipeline
pipeline_description = Pipeline()
pipeline_description.add_input(name='inputs')
# Step 0: dataset_to_dataframe
primitive_0 = index.get_primitive('d3m.primitives.tods.data_processing.dataset_to_dataframe')
step_0 = PrimitiveStep(primitive=primitive_0)
step_0.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='inputs.0')
step_0.add_output('produce')
pipeline_description.add_step(step_0)
# Step 1: column_parser
primitive_1 = index.get_primitive('d3m.primitives.tods.data_processing.column_parser')
step_1 = PrimitiveStep(primitive=primitive_1)
step_1.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.0.produce')
step_1.add_output('produce')
pipeline_description.add_step(step_1)
# Step 2: extract_columns_by_semantic_types(attributes)
step_2 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.extract_columns_by_semantic_types'))
step_2.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.1.produce')
step_2.add_output('produce')
step_2.add_hyperparameter(name='semantic_types', argument_type=ArgumentType.VALUE,
data=['https://metadata.datadrivendiscovery.org/types/Attribute'])
pipeline_description.add_step(step_2)
# Step 3: standard_scaler
step_3 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.timeseries_processing.transformation.standard_scaler'))
step_3.add_hyperparameter(name='use_semantic_types', argument_type=ArgumentType.VALUE, data=True)
step_3.add_hyperparameter(name='use_columns', argument_type=ArgumentType.VALUE, data=(2,))
step_3.add_hyperparameter(name='return_result', argument_type=ArgumentType.VALUE, data='append')
step_3.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.2.produce')
step_3.add_output('produce')
pipeline_description.add_step(step_3)
# Final Output
pipeline_description.add_output(name='output predictions', data_reference='steps.3.produce')
# Output to JSON
data = pipeline_description.to_json()
with open('example_pipeline.json', 'w') as f:
f.write(data)
print(data)
|
# Copyright (c) 2015 Aptira Pty Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import math
import time
from xml.dom import minidom
from xml.parsers import expat
from lxml import etree
from oslo_log import log as logging
from oslo_log import versionutils
from oslo_serialization import jsonutils
from oslo_utils import excutils
import six
import webob
from guts import exception
from guts import i18n
from guts.i18n import _, _LE, _LI
from guts import utils
from guts.wsgi import common as wsgi
XML_NS_V1 = 'http://docs.openstack.org/api/openstack-migration/1.0/content'
XML_WARNING = False
LOG = logging.getLogger(__name__)
SUPPORTED_CONTENT_TYPES = (
'application/json',
'application/vnd.openstack.migration+json',
'application/xml',
'application/vnd.openstack.migration+xml',
)
_MEDIA_TYPE_MAP = {
'application/vnd.openstack.migration+json': 'json',
'application/json': 'json',
'application/vnd.openstack.migration+xml': 'xml',
'application/xml': 'xml',
'application/atom+xml': 'atom',
}
class Request(webob.Request):
"""Add some OpenStack API-specific logic to the base webob.Request."""
def __init__(self, *args, **kwargs):
super(Request, self).__init__(*args, **kwargs)
self._resource_cache = {}
def cache_resource(self, resource_to_cache, id_attribute='id', name=None):
"""Cache the given resource.
Allow API methods to cache objects, such as results from a DB query,
to be used by API extensions within the same API request.
The resource_to_cache can be a list or an individual resource,
but ultimately resources are cached individually using the given
id_attribute.
Different resources types might need to be cached during the same
request, they can be cached using the name parameter. For example:
Controller 1:
request.cache_resource(db_migrations, 'migrations')
Controller 2:
db_migrations = request.cached_resource('migrations')
If no name is given, a default name will be used for the resource.
An instance of this class only lives for the lifetime of a
single API request, so there's no need to implement full
cache management.
"""
if not isinstance(resource_to_cache, list):
resource_to_cache = [resource_to_cache]
if not name:
name = self.path
cached_resources = self._resource_cache.setdefault(name, {})
for resource in resource_to_cache:
cached_resources[resource[id_attribute]] = resource
def cached_resource(self, name=None):
"""Get the cached resources cached under the given resource name.
Allow an API extension to get previously stored objects within
the same API request.
Note that the object data will be slightly stale.
:returns: a dict of id_attribute to the resource from the cached
resources, an empty map if an empty collection was cached,
or None if nothing has been cached yet under this name
"""
if not name:
name = self.path
if name not in self._resource_cache:
# Nothing has been cached for this key yet
return None
return self._resource_cache[name]
def cached_resource_by_id(self, resource_id, name=None):
"""Get a resource by ID cached under the given resource name.
Allow an API extension to get a previously stored object
within the same API request. This is basically a convenience method
to lookup by ID on the dictionary of all cached resources.
Note that the object data will be slightly stale.
:returns: the cached resource or None if the item is not in the cache
"""
resources = self.cached_resource(name)
if not resources:
# Nothing has been cached yet for this key yet
return None
return resources.get(resource_id)
def cache_db_items(self, key, items, item_key='id'):
"""Get cached database items.
Allow API methods to store objects from a DB query to be
used by API extensions within the same API request.
An instance of this class only lives for the lifetime of a
single API request, so there's no need to implement full
cache management.
"""
self.cache_resource(items, item_key, key)
def get_db_items(self, key):
"""Get database items.
Allow an API extension to get previously stored objects within
the same API request.
Note that the object data will be slightly stale.
"""
return self.cached_resource(key)
def get_db_item(self, key, item_key):
"""Get database item.
Allow an API extension to get a previously stored object
within the same API request.
Note that the object data will be slightly stale.
"""
return self.get_db_items(key).get(item_key)
def cache_db_migrations(self, migrations):
# NOTE(mgagne) Cache it twice for backward compatibility reasons
self.cache_db_items('migrations', migrations, 'id')
self.cache_db_items(self.path, migrations, 'id')
def cache_db_migration(self, migration):
# NOTE(mgagne) Cache it twice for backward compatibility reasons
self.cache_db_items('migrations', [migration], 'id')
self.cache_db_items(self.path, [migration], 'id')
def get_db_migrations(self):
return (self.get_db_items('migrations') or
self.get_db_items(self.path))
def get_db_migration(self, migration_id):
return (self.get_db_item('migrations', migration_id) or
self.get_db_item(self.path, migration_id))
def cache_db_migration_types(self, migration_types):
self.cache_db_items('migration_types', migration_types, 'id')
def cache_db_migration_type(self, migration_type):
self.cache_db_items('migration_types', [migration_type], 'id')
def get_db_migration_types(self):
return self.get_db_items('migration_types')
def get_db_migration_type(self, migration_type_id):
return self.get_db_item('migration_types', migration_type_id)
def cache_db_snapshots(self, snapshots):
self.cache_db_items('snapshots', snapshots, 'id')
def cache_db_snapshot(self, snapshot):
self.cache_db_items('snapshots', [snapshot], 'id')
def get_db_snapshots(self):
return self.get_db_items('snapshots')
def get_db_snapshot(self, snapshot_id):
return self.get_db_item('snapshots', snapshot_id)
def cache_db_backups(self, backups):
self.cache_db_items('backups', backups, 'id')
def cache_db_backup(self, backup):
self.cache_db_items('backups', [backup], 'id')
def get_db_backups(self):
return self.get_db_items('backups')
def get_db_backup(self, backup_id):
return self.get_db_item('backups', backup_id)
def best_match_content_type(self):
"""Determine the requested response content-type."""
if 'guts.best_content_type' not in self.environ:
# Calculate the best MIME type
content_type = None
# Check URL path suffix
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
possible_type = 'application/' + parts[1]
if possible_type in SUPPORTED_CONTENT_TYPES:
content_type = possible_type
if not content_type:
content_type = self.accept.best_match(SUPPORTED_CONTENT_TYPES)
self.environ['guts.best_content_type'] = (content_type or
'application/json')
return self.environ['guts.best_content_type']
def get_content_type(self):
"""Determine content type of the request body.
Does not do any body introspection, only checks header
"""
if "Content-Type" not in self.headers:
return None
allowed_types = SUPPORTED_CONTENT_TYPES
content_type = self.content_type
if content_type not in allowed_types:
raise exception.InvalidContentType(content_type=content_type)
return content_type
def best_match_language(self):
"""Determines best available locale from the Accept-Language header.
:returns: the best language match or None if the 'Accept-Language'
header was not available in the request.
"""
if not self.accept_language:
return None
all_languages = i18n.get_available_languages()
return self.accept_language.best_match(all_languages)
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class TextDeserializer(ActionDispatcher):
"""Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None):
"""Initialize XMLDeserializer.
:param metadata: information needed to deserialize xml into
a dictionary.
"""
super(XMLDeserializer, self).__init__()
self.metadata = metadata or {}
def _from_xml(self, datastring):
plurals = set(self.metadata.get('plurals', {}))
try:
node = utils.safe_minidom_parse_string(datastring).childNodes[0]
return {node.nodeName: self._from_xml_node(node, plurals)}
except expat.ExpatError:
msg = _("cannot understand XML")
raise exception.MalformedRequestBody(reason=msg)
def _from_xml_node(self, node, listnames):
"""Convert a minidom node to a simple Python type.
:param listnames: list of XML node names whose subnodes should
be considered list items.
"""
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
return node.childNodes[0].nodeValue
elif node.nodeName in listnames:
return [self._from_xml_node(n, listnames) for n in node.childNodes]
else:
result = dict()
for attr in node.attributes.keys():
result[attr] = node.attributes[attr].nodeValue
for child in node.childNodes:
if child.nodeType != node.TEXT_NODE:
result[child.nodeName] = self._from_xml_node(child,
listnames)
return result
def find_first_child_named_in_namespace(self, parent, namespace, name):
"""Search a nodes children for the first child with a given name."""
for node in parent.childNodes:
if (node.localName == name and
node.namespaceURI and
node.namespaceURI == namespace):
return node
return None
def find_first_child_named(self, parent, name):
"""Search a nodes children for the first child with a given name."""
for node in parent.childNodes:
if node.nodeName == name:
return node
return None
def find_children_named(self, parent, name):
"""Return all of a nodes children who have the given name."""
for node in parent.childNodes:
if node.nodeName == name:
yield node
def extract_text(self, node):
"""Get the text field contained by the given node."""
text = []
# Cannot assume entire text will be in a single child node because SAX
# parsers may split contiguous character data into multiple chunks
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE:
text.append(child.nodeValue)
return ''.join(text)
def default(self, datastring):
return {'body': self._from_xml(datastring)}
class MetadataXMLDeserializer(XMLDeserializer):
def extract_metadata(self, metadata_node):
"""Marshal the metadata attribute of a parsed request."""
metadata = {}
if metadata_node is not None:
for meta_node in self.find_children_named(metadata_node, "meta"):
key = meta_node.getAttribute("key")
metadata[key] = self.extract_text(meta_node)
return metadata
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
return jsonutils.dumps(data)
class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None):
"""Initialize XMLDictSerializer.
:param metadata: information needed to deserialize xml into
a dictionary.
:param xmlns: XML namespace to include with serialized xml
"""
super(XMLDictSerializer, self).__init__()
self.metadata = metadata or {}
self.xmlns = xmlns
def default(self, data):
# We expect data to contain a single key which is the XML root.
root_key = list(data.keys())[0]
doc = minidom.Document()
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
return self.to_xml_string(node)
def to_xml_string(self, node, has_atom=False):
self._add_xmlns(node, has_atom)
return node.toxml('UTF-8')
def _add_xmlns(self, node, has_atom=False):
if self.xmlns is not None:
node.setAttribute('xmlns', self.xmlns)
if has_atom:
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
def _to_xml_node(self, doc, metadata, nodename, data):
"""Recursive method to convert data members to XML nodes."""
result = doc.createElement(nodename)
# Set the xml namespace if one is specified
# TODO(justinsb): We could also use prefixes on the keys
xmlns = metadata.get('xmlns', None)
if xmlns:
result.setAttribute('xmlns', xmlns)
# TODO(bcwaldon): accomplish this without a type-check
if isinstance(data, list):
collections = metadata.get('list_collections', {})
if nodename in collections:
metadata = collections[nodename]
for item in data:
node = doc.createElement(metadata['item_name'])
node.setAttribute(metadata['item_key'], str(item))
result.appendChild(node)
return result
singular = metadata.get('plurals', {}).get(nodename, None)
if singular is None:
if nodename.endswith('s'):
singular = nodename[:-1]
else:
singular = 'item'
for item in data:
node = self._to_xml_node(doc, metadata, singular, item)
result.appendChild(node)
elif isinstance(data, dict):
collections = metadata.get('dict_collections', {})
if nodename in collections:
metadata = collections[nodename]
for k, v in data.items():
node = doc.createElement(metadata['item_name'])
node.setAttribute(metadata['item_key'], str(k))
text = doc.createTextNode(str(v))
node.appendChild(text)
result.appendChild(node)
return result
attrs = metadata.get('attributes', {}).get(nodename, {})
for k, v in data.items():
if k in attrs:
result.setAttribute(k, str(v))
else:
node = self._to_xml_node(doc, metadata, k, v)
result.appendChild(node)
else:
# Type is atom
node = doc.createTextNode(str(data))
result.appendChild(node)
return result
def _create_link_nodes(self, xml_doc, links):
link_nodes = []
for link in links:
link_node = xml_doc.createElement('atom:link')
link_node.setAttribute('rel', link['rel'])
link_node.setAttribute('href', link['href'])
if 'type' in link:
link_node.setAttribute('type', link['type'])
link_nodes.append(link_node)
return link_nodes
def _to_xml(self, root):
"""Convert the xml object to an xml string."""
return etree.tostring(root, encoding='UTF-8', xml_declaration=True)
def serializers(**serializers):
"""Attaches serializers to a method.
This decorator associates a dictionary of serializers with a
method. Note that the function attributes are directly
manipulated; the method is not wrapped.
"""
def decorator(func):
if not hasattr(func, 'wsgi_serializers'):
func.wsgi_serializers = {}
func.wsgi_serializers.update(serializers)
return func
return decorator
def deserializers(**deserializers):
"""Attaches deserializers to a method.
This decorator associates a dictionary of deserializers with a
method. Note that the function attributes are directly
manipulated; the method is not wrapped.
"""
def decorator(func):
if not hasattr(func, 'wsgi_deserializers'):
func.wsgi_deserializers = {}
func.wsgi_deserializers.update(deserializers)
return func
return decorator
def response(code):
"""Attaches response code to a method.
This decorator associates a response code with a method. Note
that the function attributes are directly manipulated; the method
is not wrapped.
"""
def decorator(func):
func.wsgi_code = code
return func
return decorator
class ResponseObject(object):
"""Bundles a response object with appropriate serializers.
Object that app methods may return in order to bind alternate
serializers with a response object to be serialized. Its use is
optional.
"""
def __init__(self, obj, code=None, **serializers):
"""Binds serializers with an object.
Takes keyword arguments akin to the @serializer() decorator
for specifying serializers. Serializers specified will be
given preference over default serializers or method-specific
serializers on return.
"""
self.obj = obj
self.serializers = serializers
self._default_code = 200
self._code = code
self._headers = {}
self.serializer = None
self.media_type = None
def __getitem__(self, key):
"""Retrieves a header with the given name."""
return self._headers[key.lower()]
def __setitem__(self, key, value):
"""Sets a header with the given name to the given value."""
self._headers[key.lower()] = value
def __delitem__(self, key):
"""Deletes the header with the given name."""
del self._headers[key.lower()]
def _bind_method_serializers(self, meth_serializers):
"""Binds method serializers with the response object.
Binds the method serializers with the response object.
Serializers specified to the constructor will take precedence
over serializers specified to this method.
:param meth_serializers: A dictionary with keys mapping to
response types and values containing
serializer objects.
"""
# We can't use update because that would be the wrong
# precedence
for mtype, serializer in meth_serializers.items():
self.serializers.setdefault(mtype, serializer)
def get_serializer(self, content_type, default_serializers=None):
"""Returns the serializer for the wrapped object.
Returns the serializer for the wrapped object subject to the
indicated content type. If no serializer matching the content
type is attached, an appropriate serializer drawn from the
default serializers will be used. If no appropriate
serializer is available, raises InvalidContentType.
"""
default_serializers = default_serializers or {}
try:
mtype = _MEDIA_TYPE_MAP.get(content_type, content_type)
if mtype in self.serializers:
return mtype, self.serializers[mtype]
else:
return mtype, default_serializers[mtype]
except (KeyError, TypeError):
raise exception.InvalidContentType(content_type=content_type)
def preserialize(self, content_type, default_serializers=None):
"""Prepares the serializer that will be used to serialize.
Determines the serializer that will be used and prepares an
instance of it for later call. This allows the serializer to
be accessed by extensions for, e.g., template extension.
"""
mtype, serializer = self.get_serializer(content_type,
default_serializers)
self.media_type = mtype
self.serializer = serializer()
def attach(self, **kwargs):
"""Attach slave templates to serializers."""
if self.media_type in kwargs:
self.serializer.attach(kwargs[self.media_type])
def serialize(self, request, content_type, default_serializers=None):
"""Serializes the wrapped object.
Utility method for serializing the wrapped object. Returns a
webob.Response object.
"""
if self.serializer:
serializer = self.serializer
else:
_mtype, _serializer = self.get_serializer(content_type,
default_serializers)
serializer = _serializer()
response = webob.Response()
response.status_int = self.code
for hdr, value in self._headers.items():
response.headers[hdr] = value
response.headers['Content-Type'] = content_type
if self.obj is not None:
body = serializer.serialize(self.obj)
if isinstance(body, six.text_type):
body = body.encode('utf-8')
response.body = body
return response
@property
def code(self):
"""Retrieve the response status."""
return self._code or self._default_code
@property
def headers(self):
"""Retrieve the headers."""
return self._headers.copy()
def action_peek_json(body):
"""Determine action to invoke."""
try:
decoded = jsonutils.loads(body)
except ValueError:
msg = _("cannot understand JSON")
raise exception.MalformedRequestBody(reason=msg)
# Make sure there's exactly one key...
if len(decoded) != 1:
msg = _("too many body keys")
raise exception.MalformedRequestBody(reason=msg)
# Return the action and the decoded body...
return list(decoded.keys())[0]
def action_peek_xml(body):
"""Determine action to invoke."""
dom = utils.safe_minidom_parse_string(body)
action_node = dom.childNodes[0]
return action_node.tagName
class ResourceExceptionHandler(object):
"""Context manager to handle Resource exceptions.
Used when processing exceptions generated by API implementation
methods (or their extensions). Converts most exceptions to Fault
exceptions, with the appropriate logging.
"""
def __enter__(self):
return None
def __exit__(self, ex_type, ex_value, ex_traceback):
if not ex_value:
return True
if isinstance(ex_value, exception.NotAuthorized):
raise Fault(webob.exc.HTTPForbidden(explanation=ex_value.msg))
elif isinstance(ex_value, exception.Invalid):
raise Fault(exception.ConvertedException(
code=ex_value.code, explanation=ex_value.msg))
elif isinstance(ex_value, TypeError):
exc_info = (ex_type, ex_value, ex_traceback)
LOG.error(_LE(
'Exception handling resource: %s'),
ex_value, exc_info=exc_info)
raise Fault(webob.exc.HTTPBadRequest())
elif isinstance(ex_value, Fault):
LOG.info(_LI("Fault thrown: %s"), ex_value)
raise ex_value
elif isinstance(ex_value, webob.exc.HTTPException):
LOG.info(_LI("HTTP exception thrown: %s"), ex_value)
raise Fault(ex_value)
# We didn't handle the exception
return False
class Resource(wsgi.Application):
"""WSGI app that handles (de)serialization and controller dispatch.
WSGI app that reads routing information supplied by RoutesMiddleware
and calls the requested action method upon its controller. All
controller action methods must accept a 'req' argument, which is the
incoming wsgi.Request. If the operation is a PUT or POST, the controller
method must also accept a 'body' argument (the deserialized request body).
They may raise a webob.exc exception or return a dict, which will be
serialized by requested content type.
Exceptions derived from webob.exc.HTTPException will be automatically
wrapped in Fault() to provide API friendly error responses.
"""
def __init__(self, controller, action_peek=None, **deserializers):
"""Initialize Resource.
:param controller: object that implement methods created by routes lib
:param action_peek: dictionary of routines for peeking into an action
request body to determine the desired action
"""
self.controller = controller
default_deserializers = dict(xml=XMLDeserializer,
json=JSONDeserializer)
default_deserializers.update(deserializers)
self.default_deserializers = default_deserializers
self.default_serializers = dict(xml=XMLDictSerializer,
json=JSONDictSerializer)
self.action_peek = dict(xml=action_peek_xml,
json=action_peek_json)
self.action_peek.update(action_peek or {})
# Copy over the actions dictionary
self.wsgi_actions = {}
if controller:
self.register_actions(controller)
# Save a mapping of extensions
self.wsgi_extensions = {}
self.wsgi_action_extensions = {}
def register_actions(self, controller):
"""Registers controller actions with this resource."""
actions = getattr(controller, 'wsgi_actions', {})
for key, method_name in actions.items():
self.wsgi_actions[key] = getattr(controller, method_name)
def register_extensions(self, controller):
"""Registers controller extensions with this resource."""
extensions = getattr(controller, 'wsgi_extensions', [])
for method_name, action_name in extensions:
# Look up the extending method
extension = getattr(controller, method_name)
if action_name:
# Extending an action...
if action_name not in self.wsgi_action_extensions:
self.wsgi_action_extensions[action_name] = []
self.wsgi_action_extensions[action_name].append(extension)
else:
# Extending a regular method
if method_name not in self.wsgi_extensions:
self.wsgi_extensions[method_name] = []
self.wsgi_extensions[method_name].append(extension)
def get_action_args(self, request_environment):
"""Parse dictionary created by routes library."""
if hasattr(self.controller, 'get_action_args'):
return self.controller.get_action_args(request_environment)
try:
args = request_environment['wsgiorg.routing_args'][1].copy()
except (KeyError, IndexError, AttributeError):
return {}
try:
del args['controller']
except KeyError:
pass
try:
del args['format']
except KeyError:
pass
return args
def get_body(self, request):
if len(request.body) == 0:
LOG.debug("Empty body provided in request")
return None, ''
try:
content_type = request.get_content_type()
except exception.InvalidContentType:
LOG.debug("Unrecognized Content-Type provided in request")
return None, ''
if not content_type:
LOG.debug("No Content-Type provided in request")
return None, ''
return content_type, request.body
def deserialize(self, meth, content_type, body):
meth_deserializers = getattr(meth, 'wsgi_deserializers', {})
try:
mtype = _MEDIA_TYPE_MAP.get(content_type, content_type)
if mtype in meth_deserializers:
deserializer = meth_deserializers[mtype]
else:
deserializer = self.default_deserializers[mtype]
except (KeyError, TypeError):
raise exception.InvalidContentType(content_type=content_type)
return deserializer().deserialize(body)
def pre_process_extensions(self, extensions, request, action_args):
# List of callables for post-processing extensions
post = []
for ext in extensions:
if inspect.isgeneratorfunction(ext):
response = None
# If it's a generator function, the part before the
# yield is the preprocessing stage
try:
with ResourceExceptionHandler():
gen = ext(req=request, **action_args)
response = next(gen)
except Fault as ex:
response = ex
# We had a response...
if response:
return response, []
# No response, queue up generator for post-processing
post.append(gen)
else:
# Regular functions only perform post-processing
post.append(ext)
# Run post-processing in the reverse order
return None, reversed(post)
def post_process_extensions(self, extensions, resp_obj, request,
action_args):
for ext in extensions:
response = None
if inspect.isgenerator(ext):
# If it's a generator, run the second half of
# processing
try:
with ResourceExceptionHandler():
response = ext.send(resp_obj)
except StopIteration:
# Normal exit of generator
continue
except Fault as ex:
response = ex
else:
# Regular functions get post-processing...
try:
with ResourceExceptionHandler():
response = ext(req=request, resp_obj=resp_obj,
**action_args)
except Fault as ex:
response = ex
# We had a response...
if response:
return response
return None
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""WSGI method that controls (de)serialization and method dispatch."""
LOG.info(_LI("%(method)s %(url)s"),
{"method": request.method,
"url": request.url})
# Identify the action, its arguments, and the requested
# content type
action_args = self.get_action_args(request.environ)
action = action_args.pop('action', None)
content_type, body = self.get_body(request)
accept = request.best_match_content_type()
return self._process_stack(request, action, action_args,
content_type, body, accept)
def _process_stack(self, request, action, action_args,
content_type, body, accept):
"""Implement the processing stack."""
# Get the implementing method
try:
meth, extensions = self.get_method(request, action,
content_type, body)
except (AttributeError, TypeError):
return Fault(webob.exc.HTTPNotFound())
except KeyError as ex:
msg = _("There is no such action: %s") % ex.args[0]
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except exception.MalformedRequestBody:
msg = _("Malformed request body")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
# Now, deserialize the request body...
try:
if content_type:
contents = self.deserialize(meth, content_type, body)
else:
contents = {}
except exception.InvalidContentType:
msg = _("Unsupported Content-Type")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
except exception.MalformedRequestBody:
msg = _("Malformed request body")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
# Update the action args
action_args.update(contents)
project_id = action_args.pop("project_id", None)
context = request.environ.get('guts.context')
if (context and project_id and (project_id != context.project_id)):
msg = _("Malformed request url")
return Fault(webob.exc.HTTPBadRequest(explanation=msg))
# Run pre-processing extensions
response, post = self.pre_process_extensions(extensions,
request, action_args)
if not response:
try:
with ResourceExceptionHandler():
action_result = self.dispatch(meth, request, action_args)
except Fault as ex:
response = ex
if not response:
# No exceptions; convert action_result into a
# ResponseObject
resp_obj = None
if type(action_result) is dict or action_result is None:
resp_obj = ResponseObject(action_result)
elif isinstance(action_result, ResponseObject):
resp_obj = action_result
else:
response = action_result
# Run post-processing extensions
if resp_obj:
_set_request_id_header(request, resp_obj)
# Do a preserialize to set up the response object
serializers = getattr(meth, 'wsgi_serializers', {})
resp_obj._bind_method_serializers(serializers)
if hasattr(meth, 'wsgi_code'):
resp_obj._default_code = meth.wsgi_code
resp_obj.preserialize(accept, self.default_serializers)
# Process post-processing extensions
response = self.post_process_extensions(post, resp_obj,
request, action_args)
if resp_obj and not response:
response = resp_obj.serialize(request, accept,
self.default_serializers)
try:
msg_dict = dict(url=request.url, status=response.status_int)
msg = _LI("%(url)s returned with HTTP %(status)d")
except AttributeError as e:
msg_dict = dict(url=request.url, e=e)
msg = _LI("%(url)s returned a fault: %(e)s")
LOG.info(msg, msg_dict)
return response
def get_method(self, request, action, content_type, body):
"""Look up the action-specific method and its extensions."""
try:
if not self.controller:
meth = getattr(self, action)
else:
meth = getattr(self.controller, action)
except AttributeError as e:
with excutils.save_and_reraise_exception(e) as ctxt:
if (not self.wsgi_actions or action not in ['action',
'create',
'delete',
'update']):
LOG.exception(_LE('Get method error.'))
else:
ctxt.reraise = False
else:
return meth, self.wsgi_extensions.get(action, [])
if action == 'action':
# OK, it's an action; figure out which action...
mtype = _MEDIA_TYPE_MAP.get(content_type)
action_name = self.action_peek[mtype](body)
LOG.debug("Action body: %s", body)
else:
action_name = action
# Look up the action method
return (self.wsgi_actions[action_name],
self.wsgi_action_extensions.get(action_name, []))
def dispatch(self, method, request, action_args):
"""Dispatch a call to the action-specific method."""
return method(req=request, **action_args)
def action(name):
"""Mark a function as an action.
The given name will be taken as the action key in the body.
This is also overloaded to allow extensions to provide
non-extending definitions of create and delete operations.
"""
def decorator(func):
func.wsgi_action = name
return func
return decorator
def extends(*args, **kwargs):
"""Indicate a function extends an operation.
Can be used as either::
@extends
def index(...):
pass
or as::
@extends(action='resize')
def _action_resize(...):
pass
"""
def decorator(func):
# Store enough information to find what we're extending
func.wsgi_extends = (func.__name__, kwargs.get('action'))
return func
# If we have positional arguments, call the decorator
if args:
return decorator(*args)
# OK, return the decorator instead
return decorator
class ControllerMetaclass(type):
"""Controller metaclass.
This metaclass automates the task of assembling a dictionary
mapping action keys to method names.
"""
def __new__(mcs, name, bases, cls_dict):
"""Adds the wsgi_actions dictionary to the class."""
# Find all actions
actions = {}
extensions = []
# start with wsgi actions from base classes
for base in bases:
actions.update(getattr(base, 'wsgi_actions', {}))
for key, value in cls_dict.items():
if not callable(value):
continue
if getattr(value, 'wsgi_action', None):
actions[value.wsgi_action] = key
elif getattr(value, 'wsgi_extends', None):
extensions.append(value.wsgi_extends)
# Add the actions and extensions to the class dict
cls_dict['wsgi_actions'] = actions
cls_dict['wsgi_extensions'] = extensions
return super(ControllerMetaclass, mcs).__new__(mcs, name, bases,
cls_dict)
@six.add_metaclass(ControllerMetaclass)
class Controller(object):
"""Default controller."""
_view_builder_class = None
def __init__(self, view_builder=None):
"""Initialize controller with a view builder instance."""
if view_builder:
self._view_builder = view_builder
elif self._view_builder_class:
self._view_builder = self._view_builder_class()
else:
self._view_builder = None
@staticmethod
def is_valid_body(body, entity_name):
if not (body and entity_name in body):
return False
def is_dict(d):
try:
d.get(None)
return True
except AttributeError:
return False
if not is_dict(body[entity_name]):
return False
return True
@staticmethod
def assert_valid_body(body, entity_name):
# NOTE: After v1 api is deprecated need to merge 'is_valid_body' and
# 'assert_valid_body' in to one method. Right now it is not
# possible to modify 'is_valid_body' to raise exception because
# in case of V1 api when 'is_valid_body' return False,
# 'HTTPUnprocessableEntity' exception is getting raised and in
# V2 api 'HTTPBadRequest' exception is getting raised.
if not Controller.is_valid_body(body, entity_name):
raise webob.exc.HTTPBadRequest(
explanation=_("Missing required element '%s' in "
"request body.") % entity_name)
@staticmethod
def validate_name_and_description(body):
name = body.get('name')
if name is not None:
if isinstance(name, six.string_types):
body['name'] = name.strip()
try:
utils.check_string_length(body['name'], 'Name',
min_length=0, max_length=255)
except exception.InvalidInput as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
description = body.get('description')
if description is not None:
try:
utils.check_string_length(description, 'Description',
min_length=0, max_length=255)
except exception.InvalidInput as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
@staticmethod
def validate_string_length(value, entity_name, min_length=0,
max_length=None, remove_whitespaces=False):
"""Check the length of specified string.
:param value: the value of the string
:param entity_name: the name of the string
:param min_length: the min_length of the string
:param max_length: the max_length of the string
:param remove_whitespaces: True if trimming whitespaces is needed
else False
"""
if isinstance(value, six.string_types) and remove_whitespaces:
value = value.strip()
try:
utils.check_string_length(value, entity_name,
min_length=min_length,
max_length=max_length)
except exception.InvalidInput as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
@staticmethod
def validate_integer(value, name, min_value=None, max_value=None):
"""Make sure that value is a valid integer, potentially within range.
:param value: the value of the integer
:param name: the name of the integer
:param min_length: the min_length of the integer
:param max_length: the max_length of the integer
:returns: integer
"""
try:
value = int(value)
except (TypeError, ValueError, UnicodeEncodeError):
raise webob.exc.HTTPBadRequest(explanation=(
_('%s must be an integer.') % name))
if min_value is not None and value < min_value:
raise webob.exc.HTTPBadRequest(
explanation=(_('%(value_name)s must be >= %(min_value)d') %
{'value_name': name, 'min_value': min_value}))
if max_value is not None and value > max_value:
raise webob.exc.HTTPBadRequest(
explanation=(_('%(value_name)s must be <= %(max_value)d') %
{'value_name': name, 'max_value': max_value}))
return value
class Fault(webob.exc.HTTPException):
"""Wrap webob.exc.HTTPException to provide API friendly response."""
_fault_names = {400: "badRequest",
401: "unauthorized",
403: "forbidden",
404: "itemNotFound",
405: "badMethod",
409: "conflictingRequest",
413: "overLimit",
415: "badMediaType",
501: "notImplemented",
503: "serviceUnavailable"}
def __init__(self, exception):
"""Create a Fault for the given webob.exc.exception."""
self.wrapped_exc = exception
self.status_int = exception.status_int
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
"""Generate a WSGI response based on the exception passed to ctor."""
# Replace the body with fault details.
locale = req.best_match_language()
code = self.wrapped_exc.status_int
fault_name = self._fault_names.get(code, "computeFault")
explanation = self.wrapped_exc.explanation
fault_data = {
fault_name: {
'code': code,
'message': i18n.translate(explanation, locale)}}
if code == 413:
retry = self.wrapped_exc.headers.get('Retry-After', None)
if retry:
fault_data[fault_name]['retryAfter'] = retry
# 'code' is an attribute on the fault tag itself
metadata = {'attributes': {fault_name: 'code'}}
xml_serializer = XMLDictSerializer(metadata, XML_NS_V1)
content_type = req.best_match_content_type()
serializer = {
'application/xml': xml_serializer,
'application/json': JSONDictSerializer(),
}[content_type]
if content_type == 'application/xml':
global XML_WARNING
if not XML_WARNING:
msg = _('XML support has been deprecated and will be removed '
'in the N release.')
versionutils.report_deprecated_feature(LOG, msg)
XML_WARNING = True
body = serializer.serialize(fault_data)
if isinstance(body, six.text_type):
body = body.encode('utf-8')
self.wrapped_exc.body = body
self.wrapped_exc.content_type = content_type
_set_request_id_header(req, self.wrapped_exc.headers)
return self.wrapped_exc
def __str__(self):
return self.wrapped_exc.__str__()
def _set_request_id_header(req, headers):
context = req.environ.get('guts.context')
if context:
headers['x-compute-request-id'] = context.request_id
class OverLimitFault(webob.exc.HTTPException):
"""Rate-limited request response."""
def __init__(self, message, details, retry_time):
"""Initialize new `OverLimitFault` with relevant information."""
hdrs = OverLimitFault._retry_after(retry_time)
self.wrapped_exc = webob.exc.HTTPRequestEntityTooLarge(headers=hdrs)
self.content = {
"overLimitFault": {
"code": self.wrapped_exc.status_int,
"message": message,
"details": details,
},
}
@staticmethod
def _retry_after(retry_time):
delay = int(math.ceil(retry_time - time.time()))
retry_after = delay if delay > 0 else 0
headers = {'Retry-After': '%d' % retry_after}
return headers
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""Serializes the wrapped exception conforming to our error format."""
content_type = request.best_match_content_type()
metadata = {"attributes": {"overLimitFault": "code"}}
def translate(msg):
locale = request.best_match_language()
return i18n.translate(msg, locale)
self.content['overLimitFault']['message'] = \
translate(self.content['overLimitFault']['message'])
self.content['overLimitFault']['details'] = \
translate(self.content['overLimitFault']['details'])
xml_serializer = XMLDictSerializer(metadata, XML_NS_V1)
serializer = {
'application/xml': xml_serializer,
'application/json': JSONDictSerializer(),
}[content_type]
content = serializer.serialize(self.content)
self.wrapped_exc.body = content
return self.wrapped_exc
|
(function () {
"use strict";
/**
* Callback function when the environment is ready.
* @callback ready
* @param {Object} envObj - environment object (in environment.js).
*/
/**
* Callback function when the environment is failing.
* @callback fail
* @param {string} message - the reason why the environment is failing.
*/
/**
* Class for setting the environment.
* This class is used for PERISCOPE tool specific settings.
* @public
* @class
* @param {Object.<string, *>} [settings] - environment settings.
* @param {ready} [settings.ready] - callback function when the environment is ready.
* @param {fail} [settings.fail] - callback function when the environment is failing.
*/
var Environment = function (settings) {
settings = safeGet(settings, {});
var ready = settings["ready"];
var fail = settings["fail"];
var thisObj = this;
var userToken;
var userData;
var tracker;
/**
* A helper for getting data safely with a default value.
* @private
* @param {*} v - the original value.
* @param {*} defaultVal - the default value to return when the original one is undefined.
* @returns {*} - the original value (if not undefined) or the default value.
*/
function safeGet(v, defaultVal) {
if (typeof defaultVal === "undefined") defaultVal = "";
return (typeof v === "undefined") ? defaultVal : v;
}
/**
* Get the payload part of a JWT (JSON Web Token).
* @private
* @param {string} jwt - the JSON Web Token.
* @returns {Object.<string, *>} - the payload part of the JWT.
*/
function getJwtPayload(jwt) {
return JSON.parse(window.atob(jwt.split(".")[1]));
}
/**
* Get the user data.
* @public
* @returns {Object.<string, *>} - the user data (i.e., payload of the decoded user JWT).
*/
this.getUserData = function () {
return userData;
};
/**
* Get the API root URL.
* @public
* @returns {string} - the back-end API root URL.
*/
var getApiRootUrl = function () {
var urlHostName = window.location.hostname;
var url;
if (urlHostName.indexOf("145.38.198.35") !== -1) {
// staging back-end
url = "http://145.38.198.35/api";
} else if (urlHostName.indexOf("staging") !== -1) {
// staging back-end
url = "https://staging.api.periscope.io.tudelft.nl";
} else if (urlHostName.indexOf("periscope.io.tudelft.nl") !== -1) {
// production back-end
url = "https://api.periscope.io.tudelft.nl";
} else if (urlHostName.indexOf("localhost") !== -1) {
// developement back-end
url = "http://localhost:5000";
}
return url;
};
this.getApiRootUrl = getApiRootUrl;
/**
* Initialize the UI for the account dialog.
* @private
*/
function initAccountUI() {
var accountObj = new periscope.Account({
"signInSuccess": function (accountObj, googleUserObj) {
getUserTokenWrapper(googleUserObj, function () {
handleGoogleSignInSuccessUI(accountObj);
});
},
"signOutSuccess": function (accountObj) {
getUserTokenWrapper(undefined, function () {
handleGoogleSignOutSuccessUI(accountObj);
});
}
});
$("#sign-in-prompt").on("click", function () {
accountObj.getDialog().dialog("open");
});
return accountObj;
}
/**
* Get user token from the back-end.
* @private
* @param {Object.<string, *>} data - the data object to give to the back-end.
* @param {string} [data.google_id_token] - the token returned by the Google Sign-In API.
* @param {string} [data.client_id] - the returned Google Analytics client ID or created by the tracker object.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function getUserToken(data, success, error) {
generalRequest("POST", "/login/", data, function (returnData) {
userToken = returnData["user_token"];
userData = getJwtPayload(userToken);
if (typeof success === "function") {
success(returnData);
}
}, function () {
console.error("ERROR when getting user token.");
if (typeof error === "function") {
error();
}
});
}
/**
* General function for the HTTP request.
* @private
* @param {string} requestType - type for the request ("GET", "POST", "PATCH", or "DELETE").
* @param {string} path - path for the request.
* @param {Object.<string, *>} [data] - data for the request.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function generalRequest(requestType, path, data, success, error) {
var request = {
"url": getApiRootUrl() + path,
"type": requestType,
"dataType": "json",
"success": function (returnData) {
if (typeof success === "function") {
success(returnData);
}
},
"error": function (xhr) {
console.error(xhr);
if (typeof error === "function") {
error();
}
showErrorPage();
}
};
if (requestType != "GET") {
request["data"] = JSON.stringify(data);
request["contentType"] = "application/json";
request["dataType"] = "json";
}
$.ajax(request);
}
/**
* General function for the GET request.
* @private
* @param {string} path - path for the GET request.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function generalGet(path, success, error) {
generalRequest("GET", path, undefined, success, error);
}
/**
* General function for the DELETE request.
* @private
* @param {string} path - path for the DELETE request.
* @param {Object.<string, *>} data - data for the DELETE request.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function generalDelete(path, data, success, error) {
data["user_token"] = userToken;
generalRequest("DELETE", path, data, success, error);
}
/**
* General function for the POST request.
* @private
* @param {string} path - path for the POST request.
* @param {Object.<string, *>} data - data for the POST request.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function generalPost(path, data, success, error) {
data["user_token"] = userToken;
generalRequest("POST", path, data, success, error);
}
/**
* General function for the PATCH request.
* @private
* @param {string} path - path for the PATCH request.
* @param {Object.<string, *>} data - data for the PATCH request.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function generalPatch(path, data, success, error) {
data["user_token"] = userToken;
generalRequest("PATCH", path, data, success, error);
}
/**
* Get a list of all topics.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllTopic = function (success, error) {
generalGet("/topic/", success, error);
};
/**
* Get a topic by ID.
* @public
* @param {number} topicId - ID of the topic that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getTopicById = function (topicId, success, error) {
generalGet("/topic/?topic_id=" + topicId, success, error);
};
/**
* Create a topic.
* @public
* @param {string} title - title of the topic.
* @param {string} description - description of the topic.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createTopic = function (title, description, success, error) {
var data = {
"title": title,
"description": description
};
generalPost("/topic/", data, success, error);
};
/**
* Update a topic.
* @public
* @param {number} topicId - ID of the topic that we wish to update.
* @param {string} [title] - title of the topic.
* @param {string} [description] - description of the topic.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.updateTopic = function (topicId, title, description, success, error) {
var data = {
"topic_id": topicId
};
if (typeof title !== "undefined") {
data["title"] = title;
}
if (typeof description !== "undefined") {
data["description"] = description;
}
generalPatch("/topic/", data, success, error);
};
/**
* Delete a topic by ID.
* @public
* @param {number} topicId - ID of the topic.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteTopic = function (topicId, success, error) {
var data = {
"topic_id": topicId
};
generalDelete("/topic/", data, success, error);
};
/**
* Get a list of all scenarios.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllScenario = function (success, error) {
generalGet("/scenario/", success, error);
};
/**
* Get a list of scenarios by topic ID.
* @public
* @param {number} topicId - topic ID of scenarios that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getScenarioByTopicId = function (topicId, success, error) {
generalGet("/scenario/?topic_id=" + topicId, success, error);
};
/**
* Get a scenario by ID.
* @public
* @param {number} scenarioId - ID of the scenario that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getScenarioById = function (scenarioId, success, error) {
generalGet("/scenario/?scenario_id=" + scenarioId, success, error);
};
/**
* Create a scenario.
* @public
* @param {string} title - title of the scenario.
* @param {string} description - description of the scenario.
* @param {string} image - image URL of the scenario.
* @param {number} topicId - topic ID that the scenario is in.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createScenario = function (title, description, image, topicId, success, error) {
var data = {
"title": title,
"description": description,
"image": image,
"topic_id": topicId
};
generalPost("/scenario/", data, success, error);
};
/**
* Update a scenario.
* @public
* @param {number} scenarioId - ID of the scenario that we wish to update.
* @param {string} [title] - title of the scenario.
* @param {string} [description] - description of the scenario.
* @param {string} [image] - image URL of the scenario.
* @param {string} [topicId] - topic ID that the scenario is in.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.updateScenario = function (scenarioId, title, description, image, topicId, success, error) {
var data = {
"scenario_id": scenarioId
};
if (typeof title !== "undefined") {
data["title"] = title;
}
if (typeof description !== "undefined") {
data["description"] = description;
}
if (typeof image !== "undefined") {
data["image"] = image;
}
if (typeof topicId !== "undefined") {
data["topic_id"] = topicId;
}
generalPatch("/scenario/", data, success, error);
};
/**
* Delete a scenario by ID.
* @public
* @param {number} scenarioId - ID of the scenario.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteScenario = function (scenarioId, success, error) {
var data = {
"scenario_id": scenarioId
};
generalDelete("/scenario/", data, success, error);
};
/**
* Get a list of all questions.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllQuestion = function (success, error) {
generalGet("/question/", success, error);
};
/**
* Get a list of questions by topic ID.
* @public
* @param {number} topicId - topic ID of questions that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getQuestionByTopicId = function (topicId, success, error) {
generalGet("/question/?topic_id=" + topicId, success, error);
};
/**
* Get a list of questions by scenario ID.
* @public
* @param {number} scenarioId - scenario ID of questions that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getQuestionByScenarioId = function (scenarioId, success, error) {
generalGet("/question/?scenario_id=" + scenarioId, success, error);
};
/**
* Get a question by ID.
* @public
* @param {number} questionId - ID of the question that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getQuestionById = function (questionId, success, error) {
generalGet("/question/?question_id=" + questionId, success, error);
};
/**
* The object for the "Choice" database table.
* @typedef {Object} Choice
* @property {string} text - text of the choice.
* @property {number} value - value of the choice.
*/
/**
* Create a question.
* @private
* @param {string} text - text of the question.
* @param {Choice[]} [choices] - choices of the question.
* @param {string} [topicId] - topic ID that the question is in (for demographic questions).
* @param {string} [scenarioId] - scenario ID that the question is in (for scenario quesions).
* @param {boolean} [isMulitpleChoice] - indicate if the question allows multiple choices.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createQuestion = function (text, choices, topicId, scenarioId, isMulitpleChoice, success, error) {
var data = {
"text": text
};
if (typeof choices !== "undefined") {
data["choices"] = choices;
}
if (typeof topicId !== "undefined") {
data["topic_id"] = topicId;
}
if (typeof scenarioId !== "undefined") {
data["scenario_id"] = scenarioId;
}
if (typeof isMulitpleChoice !== "undefined") {
data["is_mulitple_choice"] = isMulitpleChoice;
}
generalPost("/question/", data, success, error);
};
/**
* Update a question.
* @private
* @param {number} questionId - ID of the question.
* @param {string} [text] - text of the question.
* @param {Choice[]} [choices] - choices of the question.
* @param {string} [topicId] - topic ID that the question is in (for demographic questions).
* @param {string} [scenarioId] - scenario ID that the question is in (for scenario quesions).
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.updateQuestion = function (questionId, text, choices, topicId, scenarioId, success, error) {
var data = {
"question_id": questionId
};
if (typeof text !== "undefined") {
data["text"] = text;
}
if (typeof choices !== "undefined") {
data["choices"] = choices;
}
if (typeof topicId !== "undefined") {
data["topic_id"] = topicId;
}
if (typeof scenarioId !== "undefined") {
data["scenario_id"] = scenarioId;
}
generalPatch("/question/", data, success, error);
};
/**
* Delete a question by ID.
* @public
* @param {number} questionId - ID of the question.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteQuestion = function (questionId, success, error) {
var data = {
"question_id": questionId
};
generalDelete("/question/", data, success, error);
};
/**
* Get a list of all moods.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllMood = function (success, error) {
generalGet("/mood/", success, error);
};
/**
* Get a mood by ID.
* @public
* @param {number} moodId - ID of the mood that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getMoodById = function (moodId, success, error) {
generalGet("/mood/?mood_id=" + moodId, success, error);
};
/**
* Create a mood.
* @public
* @param {string} name - name of the mood.
* @param {string} [image] - image of the mood.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createMood = function (name, image, success, error) {
var data = {
"name": name
};
if (typeof image !== "undefined") {
data["image"] = image;
}
generalPost("/mood/", data, success, error);
};
/**
* Update a mood.
* @public
* @param {number} moodId - ID of the mood that we wish to update.
* @param {string} [name] - name of the mood.
* @param {string} [image] - image of the mood.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.updateMood = function (moodId, name, image, success, error) {
var data = {
"mood_id": moodId
};
if (typeof name !== "undefined") {
data["name"] = name;
}
if (typeof image !== "undefined") {
data["image"] = image;
}
generalPatch("/mood/", data, success, error);
};
/**
* Delete a mood by ID.
* @public
* @param {number} moodId - ID of the mood.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteMood = function (moodId, success, error) {
var data = {
"mood_id": moodId
};
generalDelete("/mood/", data, success, error);
};
/**
* Get a list of all visions.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllVision = function (success, error) {
generalGet("/vision/?paginate=0", success, error);
};
/**
* Get a list of visions by scenario ID.
* @public
* @param {number} scenarioId - scenario ID of visions that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getVisionByScenarioId = function (scenarioId, success, error) {
generalGet("/vision/?paginate=0&scenario_id=" + scenarioId, success, error);
};
/**
* Get a list of visions by user ID.
* @public
* @param {number} userId - user ID of visions that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getVisionByUserId = function (userId, success, error) {
generalGet("/vision/?paginate=0&user_id=" + userId, success, error);
};
/**
* Get a vision by ID.
* @public
* @param {number} visionId - ID of the vision that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getVisionById = function (visionId, success, error) {
generalGet("/vision/?vision_id=" + visionId, success, error);
};
/**
* Create a vision.
* @public
* @param {number} moodId - mood ID of a vision.
* @param {number} scenarioId - scenario ID of a vision.
* @param {string} description - description of a vision.
* @param {string} url - image URL of a vision.
* @param {string} unsplashImageId - image ID of the unsplash image (https://unsplash.com/photos/[unsplashImageId]).
* @param {string} unsplashCreatorName - creator Name of the unsplash image.
* @param {string} unsplashCreatorUrl - creator URL of the unsplash image (e.g., https://unsplash.com/@xxx).
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createVision = function (moodId, scenarioId, description, url, unsplashImageId, unsplashCreatorName, unsplashCreatorUrl, success, error) {
var data = {
"mood_id": moodId,
"medias": [{
"description": description,
"type": "IMAGE",
"url": url,
"unsplash_image_id": unsplashImageId,
"unsplash_creator_name": unsplashCreatorName,
"unsplash_creator_url": unsplashCreatorUrl
}],
"scenario_id": scenarioId,
};
generalPost("/vision/", data, success, error);
};
/**
* Update a vision.
* @public
* @param {number} visionId - ID of the vision that we wish to update.
* @param {string} [moodId] - mood ID of a vision.
* @param {string} [description] - description of a vision.
* @param {string} [url] - image URL of a vision.
* @param {string} [unsplashImageId] - image ID of the unsplash image (https://unsplash.com/photos/[unsplashImageId]).
* @param {string} [unsplashCreatorName] - creator Name of the unsplash image.
* @param {string} [unsplashCreatorUrl] - creator URL of the unsplash image (e.g., https://unsplash.com/@xxx).
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.updateVision = function (visionId, moodId, description, url, unsplashImageId, unsplashCreatorName, unsplashCreatorUrl, success, error) {
var data = {
"vision_id": visionId
};
if (typeof moodId !== "undefined") {
data["mood_id"] = moodId;
}
if (typeof description !== "undefined" && typeof url !== "undefined" && typeof unsplashImageId !== "undefined" && typeof unsplashCreatorName !== "undefined" && typeof unsplashCreatorUrl !== "undefined") {
data["medias"] = [{
"description": description,
"type": "IMAGE",
"url": url,
"unsplash_image_id": unsplashImageId,
"unsplash_creator_name": unsplashCreatorName,
"unsplash_creator_url": unsplashCreatorUrl
}];
} else {
console.warn("Field 'description' is ignored.");
console.warn("Field 'url' is ignored.");
console.warn("Field 'unsplashImageId' is ignored.");
console.warn("Field 'unsplashCreatorName' is ignored.");
console.warn("Field 'unsplashCreatorUrl' is ignored.");
console.warn("Must have all of the above ignored fields.");
}
generalPatch("/vision/", data, success, error);
};
/**
* Delete a vision by ID.
* @public
* @param {number} visionId - ID of the vision.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteVision = function (visionId, success, error) {
var data = {
"vision_id": visionId
};
generalDelete("/vision/", data, success, error);
};
/**
* Get a list of all answers.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllAnswer = function (success, error) {
generalGet("/answer/", success, error);
};
/**
* Get a list of answers by scenario ID.
* @public
* @param {number} scenarioId - scenario ID of answers that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerByScenarioId = function (scenarioId, success, error) {
generalGet("/answer/?scenario_id=" + scenarioId, success, error);
};
/**
* Get a list of answers by question ID.
* @public
* @param {number} questionId - question ID of answers that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerByQuestionId = function (questionId, success, error) {
generalGet("/answer/?question_id=" + questionId, success, error);
};
/**
* Get a list of answers of the current user by scenario ID.
* @public
* @param {number} scenarioId - scenario ID of answers that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerOfCurrentUserByScenarioId = function (scenarioId, success, error) {
generalGet("/answer/?scenario_id=" + scenarioId + "&user_id=" + userData["user_id"], success, error);
};
/**
* Get a list of answers by topic ID.
* @public
* @param {number} topicId - topic ID of answers that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerByTopicId = function (topicId, success, error) {
generalGet("/answer/?topic_id=" + topicId, success, error);
};
/**
* Get a list of answers of the current user by topic ID.
* @public
* @param {number} topicId - topic ID of answers that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerOfCurrentUserByTopicId = function (topicId, success, error) {
generalGet("/answer/?topic_id=" + topicId + "&user_id=" + userData["user_id"], success, error);
};
/**
* Get a list of answers by user ID.
* @public
* @param {number} userId - user ID of answers that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerByUserId = function (userId, success, error) {
generalGet("/answer/?user_id=" + userId, success, error);
};
/**
* Get a list of answers by the current user ID.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerByCurrentUserId = function (success, error) {
generalGet("/answer/?user_id=" + userData["user_id"], success, error);
};
/**
* Get an answer by its ID.
* @public
* @param {number} answerId - answer ID that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAnswerById = function (answerId, success, error) {
generalGet("/answer/?answer_id=" + answerId, success, error);
};
/**
* The object for the "Answer" database table.
* @typedef {Object} Answer
* @param {number} questionId - ID of the question that we want to fill in the answer.
* @param {string} [text] - text of the answer.
* @param {number[]} [choiceIdList] - array of the IDs of the selected choice objects.
*/
/**
* Create answers in the specified order.
* @private
* @param {Object} envObj - environment object (in environment.js).
* @param {Answer[]} answers - list of answers that we want to create.
* @param {Object[]} answerList - a list to collect the answer objects returned from the server.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
var createAnswersInOrder = function (envObj, answers, answerList, success, error) {
if (answers.length == 0) {
if (typeof success === "function") success(answerList);
return true;
} else {
var a = answers[0];
envObj.createAnswer(a["questionId"], a["text"], a["choiceIdList"], function (data) {
answerList.push(data["data"]);
createAnswersInOrder(envObj, answers.slice(1), answerList, success, error);
}, function () {
if (typeof error === "function") error();
return false;
});
}
};
this.createAnswersInOrder = createAnswersInOrder;
/**
* Create an answer.
* @private
* @param {number} questionId - ID of the question that we want to fill in the answer.
* @param {string} [text] - text of the answer.
* @param {number[]} [choiceIdList] - array of the IDs of the selected choice objects.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createAnswer = function (questionId, text, choiceIdList, success, error) {
var data = {
"question_id": questionId
};
if (typeof text !== "undefined") {
data["text"] = text;
}
if (typeof choiceIdList !== "undefined") {
data["choices"] = choiceIdList;
}
generalPost("/answer/", data, success, error);
};
/**
* Delete an answer by ID.
* @public
* @param {number} answerId - ID of the answer.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteAnswer = function (answerId, success, error) {
var data = {
"answer_id": answerId
};
generalDelete("/answer/", data, success, error);
};
/**
* Get a list of all games.
* @public
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getAllGame = function (success, error) {
generalGet("/game/", success, error);
};
/**
* Get a list of games by user ID.
* @public
* @param {number} userId - user ID of games that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getGameByUserId = function (userId, success, error) {
generalGet("/game/?user_id=" + userId, success, error);
};
/**
* Get a list of games by vision ID.
* @public
* @param {number} visionId - vision ID of games that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getGameByVisionId = function (visionId, success, error) {
generalGet("/game/?vision_id=" + visionId, success, error);
};
/**
* Get a game by its ID.
* @public
* @param {number} gameId - game ID that we wish to get.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.getGameById = function (gameId, success, error) {
generalGet("/game/?game_id=" + gameId, success, error);
};
/**
* Create a random game.
* @private
* @param {number} [scenarioId] - ID of the scenario that we wish to get the visions for the game.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.createRandomGame = function (scenarioId, success, error) {
var data = {};
if (typeof scenarioId !== "undefined") {
data["scenario_id"] = scenarioId;
}
generalPost("/game/", data, success, error);
};
/**
* Submit and update a game.
* @public
* @param {number} gameId - ID of the game that we wish to update.
* @param {number[]} [moods] - list of mood IDs that the user guesses.
* @param {string} [feedback] - text feedback of the vision in a game.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.updateGame = function (gameId, moods, feedback, success, error) {
var data = {
"game_id": gameId
};
if (typeof moods !== "undefined") {
data["moods"] = moods;
}
if (typeof feedback !== "undefined") {
data["feedback"] = feedback;
}
generalPatch("/game/", data, success, error);
};
/**
* Delete a game by its ID.
* @public
* @param {number} gameId - ID of the game.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
this.deleteGame = function (gameId, success, error) {
var data = {
"game_id": gameId
};
generalDelete("/game/", data, success, error);
};
/**
* Send a Google Analytics tracker event.
* @public
* @param {string} action - the action of the tracker (e.g., "page_view").
* @param {Object.<string, string>} [data] - the data of the tracker (e.g., {"user_id": "1"}).
*/
var sendTrackerEvent = function (action, data) {
if (typeof tracker !== "undefined") {
tracker.sendEvent(action, data);
}
};
this.sendTrackerEvent = sendTrackerEvent;
/**
* Handle the UI changes for a successful Google sign-in.
* @private
* @param {Object} accountObj - account object (in account.js).
*/
function handleGoogleSignInSuccessUI(accountObj) {
// Change the text of the sign-in button and remove the pulsing effect from it
var $signInPrompt = $("#sign-in-prompt");
if ($signInPrompt.length > 0) {
$signInPrompt.find("span").text("Sign Out");
if ($signInPrompt.hasClass("pulse-primary")) {
$signInPrompt.removeClass("pulse-primary");
}
}
// Update the user ID
if (typeof userData !== "undefined") {
accountObj.updateUserId(userData["user_id"]);
}
// Send a login event
sendTrackerEvent("login", {
"method": "GoogleLogIn"
});
}
/**
* Handle the UI changes for a successful Google sign-out.
* @private
* @param {Object} accountObj - account object (in account.js).
*/
function handleGoogleSignOutSuccessUI(accountObj) {
// Change the text of the sign-in button and add the pulsing effect to it
var $signInPrompt = $("#sign-in-prompt");
if ($signInPrompt.length > 0) {
$signInPrompt.find("span").text("Sign In");
if (!$signInPrompt.hasClass("pulse-primary")) {
$signInPrompt.addClass("pulse-primary")
}
}
// Hide the user ID
accountObj.updateUserId();
// Send a logout event
sendTrackerEvent("login", {
"method": "GoogleLogOut"
});
}
/**
* A wrapper of the getUserToken function to make it easier to use.
* @private
* @param {Object} googleUserObj - user object returned by the Google Sign-In API.
* @param {function} [success] - callback function when the operation is successful.
* @param {function} [error] - callback function when the operation is failing.
*/
function getUserTokenWrapper(googleUserObj, success, error) {
if (typeof periscope.Tracker === "undefined") {
// This means that some plugin blocks the tracker.js file so that the tracker object cannot be created
console.warn("Failed to initialize the tracker object (maybe blocked by a third-party plugin).");
if (typeof googleUserObj === "undefined") {
// This means that the user did not sign in with Google
// In this case, we need to manually generate the client ID to log in to the back-end
getUserToken({
"client_id": "custom.cid." + new Date().getTime() + "." + Math.random().toString(36).substring(2)
}, success, error);
} else {
// This means that the user has signed in with Google
// In this case, we need to use the Google user token to log in to the back-end
getUserToken({
"google_id_token": googleUserObj.getAuthResponse().id_token
}, success, error);
}
} else {
// This means that we can create the tracker (and it is not blocked)
// The tracker object will handle the case if the Google Analytics script is blocked
if (typeof tracker === "undefined") {
if (typeof googleUserObj === "undefined") {
// This means that the tracker is not created yet
// And the user did not sign in with Google
// For example, initially when loading the application without Google sign-in
// In this case, we need to use the Google Analytics client ID to log in to the back-end
// We also need to create the tracker
tracker = new periscope.Tracker({
"ready": function (trackerObj) {
getUserToken({
"client_id": trackerObj.getClientId()
}, success, error);
}
});
} else {
// This means that the tracker is not created yet
// And the user has signed in with Google
// For example, initially when loading the application with Google sign-in
// In this case, we need to use the Google user token to log in to the back-end
// We also need to create the tracker
tracker = new periscope.Tracker({
"ready": function () {
getUserToken({
"google_id_token": googleUserObj.getAuthResponse().id_token
}, success, error);
}
});
}
} else {
if (typeof googleUserObj === "undefined") {
// This means that the tracker is already created
// And the user did not sign in with Google
// For example, when user signed out with Google on the account dialog
// In this case, we need to use the Google Analytics client ID to log in to the back-end
getUserToken({
"client_id": tracker.getClientId()
}, success, error);
} else {
// This means that the tracker is already created
// And the user has signed in with Google
// For example, when user signed in with Google on the account dialog
// In this case, we need to use the Google user token to log in to the back-end
getUserToken({
"google_id_token": googleUserObj.getAuthResponse().id_token
}, success, error);
}
}
}
}
/**
* Create the html elements when there is an error on the back-end server.
* @private
* @param {string} errorMessage - the error message to show on the page.
* @returns {Object} - a jQuery DOM object.
*/
function createErrorHTML(errorMessage) {
var html = "";
html += '<img src="https://images.unsplash.com/photo-1555861496-0666c8981751?ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&ixlib=rb-1.2.1&auto=format&fit=crop&w=1350&q=80" />';
html += '<p class="server-error-text">';
if (typeof errorMessage === "undefined") {
html += ' Something is wrong (sad face)';
} else {
html += errorMessage;
}
html += '</p>';
return $(html);
}
/**
* Show an error page.
* @public
*/
var showErrorPage = function (errorMessage) {
var $container = $("#main-content-container");
if (!$container.hasClass("error")) {
$("#main-content-container").addClass("error").empty().append(createErrorHTML(errorMessage)).show();
}
};
this.showErrorPage = showErrorPage;
/**
* Show the normal page.
* @public
*/
var showPage = function () {
$("#main-content-container").show();
};
this.showPage = showPage;
/**
* Class constructor.
* @constructor
* @private
*/
function Environment() {
var accountObj = initAccountUI();
var userTokenSuccess = function () {
ready(thisObj);
};
var userTokenError = function () {
fail("Back-end server error.");
};
accountObj.silentSignInWithGoogle(function (isUserSignedInWithGoogle, googleUserObj) {
if (isUserSignedInWithGoogle) {
getUserTokenWrapper(googleUserObj, function () {
handleGoogleSignInSuccessUI(accountObj);
userTokenSuccess();
}, userTokenError);
} else {
getUserTokenWrapper(undefined, function () {
userTokenSuccess();
}, userTokenError);
}
});
}
Environment();
};
// Register the class to window
if (window.periscope) {
window.periscope.Environment = Environment;
} else {
window.periscope = {};
window.periscope.Environment = Environment;
}
})();
|
(window["webpackJsonp"]=window["webpackJsonp"]||[]).push([["pages-user-login"],{"173f":function(e,t,n){"use strict";n.r(t);var i=n("429f"),a=n("208d");for(var o in a)"default"!==o&&function(e){n.d(t,e,(function(){return a[e]}))}(o);n("a338");var c,s=n("f0c5"),r=Object(s["a"])(a["default"],i["b"],i["c"],!1,null,"f44ce6e8",null,!1,i["a"],c);t["default"]=r.exports},"208d":function(e,t,n){"use strict";n.r(t);var i=n("805b"),a=n.n(i);for(var o in i)"default"!==o&&function(e){n.d(t,e,(function(){return i[e]}))}(o);t["default"]=a.a},"429f":function(e,t,n){"use strict";n.d(t,"b",(function(){return a})),n.d(t,"c",(function(){return o})),n.d(t,"a",(function(){return i}));var i={uImage:n("fcd4").default,uButton:n("dc54").default,uCheckbox:n("f438").default},a=function(){var e=this,t=e.$createElement,n=e._self._c||t;return n("v-uni-view",{staticClass:"container"},[n("v-uni-view",{staticClass:"logo"},[n("u-image",{attrs:{shape:"square",width:"300",height:"300",mode:"widthFix",src:e.siteConfig.website_logo}}),e._v(e._s(e.siteConfig.website_name))],1),n("v-uni-view",{staticClass:"tools"},[n("u-button",{attrs:{disabled:e.disabled,type:"primary"},on:{click:function(t){arguments[0]=t=e.$handleEvent(t),e.wechatLogin.apply(void 0,arguments)}}},[e._v("微信用户快捷登录")]),n("u-checkbox",{attrs:{shape:"circle","label-size":"24"},model:{value:e.privacy.checked,callback:function(t){e.$set(e.privacy,"checked",t)},expression:"privacy.checked"}},[e._v("点击“注册/登录”即表示您同意《"+e._s(e.siteConfig.website_name)+"用户服务条款》和《隐私政策》")]),n("u-button",{attrs:{"custom-style":{marginTop:"250rpx"},type:"info",plain:!0},on:{click:function(t){arguments[0]=t=e.$handleEvent(t),e.navigateTo("/pages/index/index",!0)}}},[e._v("暂不登录")])],1)],1)},o=[]},"805b":function(e,t,n){"use strict";n("4d63"),n("ac1f"),n("25f0"),n("466d"),n("841c"),Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var i={data:function(){return{code:"",loading:!1,disabled:!1,privacy:{checked:!0},siteConfig:{}}},onLoad:function(){this.siteConfig=uni.getStorageSync("siteConfig"),this.getH5Auth()},methods:{getH5Auth:function(){var e=this,t=this.getUrlParam("code");if(null!==t){var n={code:t};this.$u.api.wechatH5Auth(n).then((function(t){uni.setStorageSync("userInfo",t.data),e.navigateTo("/pages/index/index")}))}},getUrlParam:function(e){var t=new RegExp("(^|&)"+e+"=([^&]*)(&|$)"),n=window.location.search.substr(1).match(t);return null!=n?unescape(n[2]):null},getCode:function(){this.$nextTick((function(){var e=this;uni.login({provider:uni.getProvider,success:function(t){e.code=t.code}})}))},wechatLogin:function(){var e=this.siteConfig.weixinh5_appid,t=this.siteConfig.website_url+"h5/#pages/user/login",n="https://open.weixin.qq.com/connect/oauth2/authorize?appid="+e+"&redirect_uri="+encodeURIComponent(t)+"&response_type=code&scope=snsapi_userinfo#wechat_redirect";window.location.href=n}}};t.default=i},a338:function(e,t,n){"use strict";var i=n("fcca"),a=n.n(i);a.a},c211:function(e,t,n){var i=n("24fb");t=i(!1),t.push([e.i,'@charset "UTF-8";\r\n/**\r\n * 下方引入的为uView UI的集成样式文件,为scss预处理器,其中包含了一些"u-"开头的自定义变量\r\n * uView自定义的css类名和scss变量,均以"u-"开头,不会造成冲突,请放心使用 \r\n */.container[data-v-f44ce6e8]{text-align:center;width:90%;margin:auto}.container .logo[data-v-f44ce6e8]{width:%?300?%;height:%?300?%;margin:%?100?% auto}.container .tools[data-v-f44ce6e8]{width:100%;text-align:left;margin-top:%?400?%}',""]),e.exports=t},fcca:function(e,t,n){var i=n("c211");"string"===typeof i&&(i=[[e.i,i,""]]),i.locals&&(e.exports=i.locals);var a=n("4f06").default;a("0a9ac0d5",i,!0,{sourceMap:!1,shadowMode:!1})}}]);
|
# Generated by Django 3.0.1 on 2020-02-04 07:05
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('phone', models.CharField(max_length=20, unique=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='SubscriptionLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('flag', models.BooleanField(default=False)),
('start_at', models.DateTimeField()),
('end_at', models.DateTimeField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ShopAccountInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('shop', models.SmallIntegerField(choices=[(0, '네이버 스마트스토어'), (1, '쿠팡'), (2, '11번가'), (3, '옥션'), (4, '지마켓(지9)'), (5, '옥션, 지마켓(지9) 통합')])),
('login_id', models.CharField(max_length=255)),
('login_pw', models.CharField(max_length=255)),
('logo_uri', models.TextField(default='')),
('session', models.TextField(default='')),
('extra_data', models.TextField(default='')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('write_at', models.DateTimeField()),
('grade', models.SmallIntegerField()),
('review', models.CharField(max_length=200)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
/*"use strict";*/
module.exports = 0xFB0BD1E5;
|
"""
EVM Instruction Encoding (Opcodes)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. contents:: Table of Contents
:backlinks: none
:local:
Introduction
------------
Machine readable representations of EVM instructions, and a mapping to their
implementations.
"""
import enum
from typing import Callable, Dict
from . import arithmetic as arithmetic_instructions
from . import bitwise as bitwise_instructions
from . import block as block_instructions
from . import comparison as comparison_instructions
from . import control_flow as control_flow_instructions
from . import environment as environment_instructions
from . import keccak as keccak_instructions
from . import log as log_instructions
from . import memory as memory_instructions
from . import stack as stack_instructions
from . import storage as storage_instructions
from . import system as system_instructions
class Ops(enum.Enum):
"""
Enum for EVM Opcodes
"""
# Arithmetic Ops
ADD = 0x01
MUL = 0x02
SUB = 0x03
DIV = 0x04
SDIV = 0x05
MOD = 0x06
SMOD = 0x07
ADDMOD = 0x08
MULMOD = 0x09
EXP = 0x0A
SIGNEXTEND = 0x0B
# Comparison Ops
LT = 0x10
GT = 0x11
SLT = 0x12
SGT = 0x13
EQ = 0x14
ISZERO = 0x15
# Bitwise Ops
AND = 0x16
OR = 0x17
XOR = 0x18
NOT = 0x19
BYTE = 0x1A
SHL = 0x1B
SHR = 0x1C
SAR = 0x1D
# Keccak Op
KECCAK = 0x20
# Environmental Ops
ADDRESS = 0x30
BALANCE = 0x31
ORIGIN = 0x32
CALLER = 0x33
CALLVALUE = 0x34
CALLDATALOAD = 0x35
CALLDATASIZE = 0x36
CALLDATACOPY = 0x37
CODESIZE = 0x38
CODECOPY = 0x39
GASPRICE = 0x3A
EXTCODESIZE = 0x3B
EXTCODECOPY = 0x3C
RETURNDATASIZE = 0x3D
RETURNDATACOPY = 0x3E
EXTCODEHASH = 0x3F
# Block Ops
BLOCKHASH = 0x40
COINBASE = 0x41
TIMESTAMP = 0x42
NUMBER = 0x43
DIFFICULTY = 0x44
GASLIMIT = 0x45
# Control Flow Ops
STOP = 0x00
JUMP = 0x56
JUMPI = 0x57
PC = 0x58
GAS = 0x5A
JUMPDEST = 0x5B
# Storage Ops
SLOAD = 0x54
SSTORE = 0x55
# Pop Operation
POP = 0x50
# Push Operations
PUSH1 = 0x60
PUSH2 = 0x61
PUSH3 = 0x62
PUSH4 = 0x63
PUSH5 = 0x64
PUSH6 = 0x65
PUSH7 = 0x66
PUSH8 = 0x67
PUSH9 = 0x68
PUSH10 = 0x69
PUSH11 = 0x6A
PUSH12 = 0x6B
PUSH13 = 0x6C
PUSH14 = 0x6D
PUSH15 = 0x6E
PUSH16 = 0x6F
PUSH17 = 0x70
PUSH18 = 0x71
PUSH19 = 0x72
PUSH20 = 0x73
PUSH21 = 0x74
PUSH22 = 0x75
PUSH23 = 0x76
PUSH24 = 0x77
PUSH25 = 0x78
PUSH26 = 0x79
PUSH27 = 0x7A
PUSH28 = 0x7B
PUSH29 = 0x7C
PUSH30 = 0x7D
PUSH31 = 0x7E
PUSH32 = 0x7F
# Dup operations
DUP1 = 0x80
DUP2 = 0x81
DUP3 = 0x82
DUP4 = 0x83
DUP5 = 0x84
DUP6 = 0x85
DUP7 = 0x86
DUP8 = 0x87
DUP9 = 0x88
DUP10 = 0x89
DUP11 = 0x8A
DUP12 = 0x8B
DUP13 = 0x8C
DUP14 = 0x8D
DUP15 = 0x8E
DUP16 = 0x8F
# Swap operations
SWAP1 = 0x90
SWAP2 = 0x91
SWAP3 = 0x92
SWAP4 = 0x93
SWAP5 = 0x94
SWAP6 = 0x95
SWAP7 = 0x96
SWAP8 = 0x97
SWAP9 = 0x98
SWAP10 = 0x99
SWAP11 = 0x9A
SWAP12 = 0x9B
SWAP13 = 0x9C
SWAP14 = 0x9D
SWAP15 = 0x9E
SWAP16 = 0x9F
# Memory Operations
MLOAD = 0x51
MSTORE = 0x52
MSTORE8 = 0x53
MSIZE = 0x59
# Log Operations
LOG0 = 0xA0
LOG1 = 0xA1
LOG2 = 0xA2
LOG3 = 0xA3
LOG4 = 0xA4
# System Operations
CREATE = 0xF0
RETURN = 0xF3
CALL = 0xF1
CALLCODE = 0xF2
DELEGATECALL = 0xF4
STATICCALL = 0xFA
REVERT = 0xFD
SELFDESTRUCT = 0xFF
CREATE2 = 0xF5
op_implementation: Dict[Ops, Callable] = {
Ops.STOP: control_flow_instructions.stop,
Ops.ADD: arithmetic_instructions.add,
Ops.MUL: arithmetic_instructions.mul,
Ops.SUB: arithmetic_instructions.sub,
Ops.DIV: arithmetic_instructions.div,
Ops.SDIV: arithmetic_instructions.sdiv,
Ops.MOD: arithmetic_instructions.mod,
Ops.SMOD: arithmetic_instructions.smod,
Ops.ADDMOD: arithmetic_instructions.addmod,
Ops.MULMOD: arithmetic_instructions.mulmod,
Ops.EXP: arithmetic_instructions.exp,
Ops.SIGNEXTEND: arithmetic_instructions.signextend,
Ops.LT: comparison_instructions.less_than,
Ops.GT: comparison_instructions.greater_than,
Ops.SLT: comparison_instructions.signed_less_than,
Ops.SGT: comparison_instructions.signed_greater_than,
Ops.EQ: comparison_instructions.equal,
Ops.ISZERO: comparison_instructions.is_zero,
Ops.AND: bitwise_instructions.bitwise_and,
Ops.OR: bitwise_instructions.bitwise_or,
Ops.XOR: bitwise_instructions.bitwise_xor,
Ops.NOT: bitwise_instructions.bitwise_not,
Ops.BYTE: bitwise_instructions.get_byte,
Ops.SHL: bitwise_instructions.bitwise_shl,
Ops.SHR: bitwise_instructions.bitwise_shr,
Ops.SAR: bitwise_instructions.bitwise_sar,
Ops.KECCAK: keccak_instructions.keccak,
Ops.SLOAD: storage_instructions.sload,
Ops.BLOCKHASH: block_instructions.block_hash,
Ops.COINBASE: block_instructions.coinbase,
Ops.TIMESTAMP: block_instructions.timestamp,
Ops.NUMBER: block_instructions.number,
Ops.DIFFICULTY: block_instructions.difficulty,
Ops.GASLIMIT: block_instructions.gas_limit,
Ops.MLOAD: memory_instructions.mload,
Ops.MSTORE: memory_instructions.mstore,
Ops.MSTORE8: memory_instructions.mstore8,
Ops.MSIZE: memory_instructions.msize,
Ops.ADDRESS: environment_instructions.address,
Ops.BALANCE: environment_instructions.balance,
Ops.ORIGIN: environment_instructions.origin,
Ops.CALLER: environment_instructions.caller,
Ops.CALLVALUE: environment_instructions.callvalue,
Ops.CALLDATALOAD: environment_instructions.calldataload,
Ops.CALLDATASIZE: environment_instructions.calldatasize,
Ops.CALLDATACOPY: environment_instructions.calldatacopy,
Ops.CODESIZE: environment_instructions.codesize,
Ops.CODECOPY: environment_instructions.codecopy,
Ops.GASPRICE: environment_instructions.gasprice,
Ops.EXTCODESIZE: environment_instructions.extcodesize,
Ops.EXTCODECOPY: environment_instructions.extcodecopy,
Ops.RETURNDATASIZE: environment_instructions.returndatasize,
Ops.RETURNDATACOPY: environment_instructions.returndatacopy,
Ops.EXTCODEHASH: environment_instructions.extcodehash,
Ops.SSTORE: storage_instructions.sstore,
Ops.JUMP: control_flow_instructions.jump,
Ops.JUMPI: control_flow_instructions.jumpi,
Ops.PC: control_flow_instructions.pc,
Ops.GAS: control_flow_instructions.gas_left,
Ops.JUMPDEST: control_flow_instructions.jumpdest,
Ops.POP: stack_instructions.pop,
Ops.PUSH1: stack_instructions.push1,
Ops.PUSH2: stack_instructions.push2,
Ops.PUSH3: stack_instructions.push3,
Ops.PUSH4: stack_instructions.push4,
Ops.PUSH5: stack_instructions.push5,
Ops.PUSH6: stack_instructions.push6,
Ops.PUSH7: stack_instructions.push7,
Ops.PUSH8: stack_instructions.push8,
Ops.PUSH9: stack_instructions.push9,
Ops.PUSH10: stack_instructions.push10,
Ops.PUSH11: stack_instructions.push11,
Ops.PUSH12: stack_instructions.push12,
Ops.PUSH13: stack_instructions.push13,
Ops.PUSH14: stack_instructions.push14,
Ops.PUSH15: stack_instructions.push15,
Ops.PUSH16: stack_instructions.push16,
Ops.PUSH17: stack_instructions.push17,
Ops.PUSH18: stack_instructions.push18,
Ops.PUSH19: stack_instructions.push19,
Ops.PUSH20: stack_instructions.push20,
Ops.PUSH21: stack_instructions.push21,
Ops.PUSH22: stack_instructions.push22,
Ops.PUSH23: stack_instructions.push23,
Ops.PUSH24: stack_instructions.push24,
Ops.PUSH25: stack_instructions.push25,
Ops.PUSH26: stack_instructions.push26,
Ops.PUSH27: stack_instructions.push27,
Ops.PUSH28: stack_instructions.push28,
Ops.PUSH29: stack_instructions.push29,
Ops.PUSH30: stack_instructions.push30,
Ops.PUSH31: stack_instructions.push31,
Ops.PUSH32: stack_instructions.push32,
Ops.DUP1: stack_instructions.dup1,
Ops.DUP2: stack_instructions.dup2,
Ops.DUP3: stack_instructions.dup3,
Ops.DUP4: stack_instructions.dup4,
Ops.DUP5: stack_instructions.dup5,
Ops.DUP6: stack_instructions.dup6,
Ops.DUP7: stack_instructions.dup7,
Ops.DUP8: stack_instructions.dup8,
Ops.DUP9: stack_instructions.dup9,
Ops.DUP10: stack_instructions.dup10,
Ops.DUP11: stack_instructions.dup11,
Ops.DUP12: stack_instructions.dup12,
Ops.DUP13: stack_instructions.dup13,
Ops.DUP14: stack_instructions.dup14,
Ops.DUP15: stack_instructions.dup15,
Ops.DUP16: stack_instructions.dup16,
Ops.SWAP1: stack_instructions.swap1,
Ops.SWAP2: stack_instructions.swap2,
Ops.SWAP3: stack_instructions.swap3,
Ops.SWAP4: stack_instructions.swap4,
Ops.SWAP5: stack_instructions.swap5,
Ops.SWAP6: stack_instructions.swap6,
Ops.SWAP7: stack_instructions.swap7,
Ops.SWAP8: stack_instructions.swap8,
Ops.SWAP9: stack_instructions.swap9,
Ops.SWAP10: stack_instructions.swap10,
Ops.SWAP11: stack_instructions.swap11,
Ops.SWAP12: stack_instructions.swap12,
Ops.SWAP13: stack_instructions.swap13,
Ops.SWAP14: stack_instructions.swap14,
Ops.SWAP15: stack_instructions.swap15,
Ops.SWAP16: stack_instructions.swap16,
Ops.LOG0: log_instructions.log0,
Ops.LOG1: log_instructions.log1,
Ops.LOG2: log_instructions.log2,
Ops.LOG3: log_instructions.log3,
Ops.LOG4: log_instructions.log4,
Ops.CREATE: system_instructions.create,
Ops.RETURN: system_instructions.return_,
Ops.CALL: system_instructions.call,
Ops.CALLCODE: system_instructions.callcode,
Ops.DELEGATECALL: system_instructions.delegatecall,
Ops.SELFDESTRUCT: system_instructions.selfdestruct,
Ops.STATICCALL: system_instructions.staticcall,
Ops.REVERT: system_instructions.revert,
Ops.CREATE2: system_instructions.create2,
}
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
from paddle.fluid.optimizer import GradientMergeOptimizer as GM
from .meta_optimizer_base import MetaOptimizerBase
__all__ = ["GradientMergeOptimizer"]
# amp + gradient merge + lamb
class GradientMergeOptimizer(MetaOptimizerBase):
def __init__(self, optimizer):
super(GradientMergeOptimizer, self).__init__(optimizer)
self.inner_opt = optimizer
self.wrapped_opt = GM(optimizer)
self.meta_optimizers_white_list = [
"LarsOptimizer",
"LambOptimizer",
"GraphExecutionOptimizer",
]
self.meta_optimizers_black_list = []
def _set_basic_info(self, loss, role_maker, user_defined_optimizer,
user_defined_strategy):
super(GradientMergeOptimizer, self)._set_basic_info(
loss, role_maker, user_defined_optimizer, user_defined_strategy)
self.wrapped_opt._set_k_steps(
self.user_defined_strategy.gradient_merge_configs["k_steps"])
self.wrapped_opt._set_avg(
self.user_defined_strategy.gradient_merge_configs["avg"])
def _can_apply(self):
can_apply = (self.user_defined_strategy.gradient_merge == True) and \
self.user_defined_strategy.gradient_merge_configs["k_steps"] > 1
return can_apply
def _disable_strategy(self, dist_strategy):
dist_strategy.gradient_merge = False
dist_strategy.gradient_merge_configs = {}
def minimize_impl(self,
loss,
startup_program=None,
parameter_list=None,
no_grad_set=None):
optimize_ops, params_grads = \
self.wrapped_opt.minimize(loss, startup_program,
parameter_list, no_grad_set)
return optimize_ops, params_grads
|
/* Definition of `struct stat' used in the kernel.
Copyright (C) 1997, 2000, 2002 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, write to the Free
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA. */
struct kernel_stat
{
unsigned int st_dev;
unsigned int st_ino;
unsigned int st_mode;
unsigned short st_nlink;
unsigned int st_uid;
unsigned int st_gid;
unsigned int st_rdev;
unsigned long int st_size;
unsigned long int st_blksize;
unsigned long int st_blocks;
struct timespec st_atim;
struct timespec st_mtim;
struct timespec st_ctim;
unsigned long int __unused4;
#define _HAVE___UNUSED4
unsigned long int __unused5;
#define _HAVE___UNUSED5
};
#define _HAVE_STAT___UNUSED4
#define _HAVE_STAT___UNUSED5
#define _HAVE_STAT___PAD1
#define _HAVE_STAT___PAD2
#define _HAVE_STAT_NSEC
#define _HAVE_STAT64___UNUSED4
#define _HAVE_STAT64___UNUSED5
#define _HAVE_STAT64___PAD2
#define _HAVE_STAT64_NSEC
|
############################################
# Project: MCT-TFE
# File: TFE.py
# By: ProgrammingIncluded
# Website: ProgrammingIncluded.com
############################################
import numpy as np
import random as rnd
# Game Settings
# Probability of 4 appearing
FOUR_PROB = 10
MAX_VALUE = 2048
# cannot be changed for now
MOV_OPT = ["d", "u", "l", "r"]
# 2048 Class
class TFE:
def __init__(self, board_width):
self.board_width = board_width
self.grid = np.zeros((self.board_width, self.board_width), np.int64)
# Call function to copy this
def copy(self):
cp = TFE(self.board_width)
cp.grid = self.grid.copy()
return cp
def setGrid(self, grid):
print(self.board_width)
self.grid = grid
# Add a new value at a certain position
def putNewAt(self, posx, posy, value):
self.grid[posx, posy] = value
# Attempt to put a new number
def putNew(self):
grid = self.grid
zero = np.argwhere(grid == 0)
if zero.size == 0:
return False
sel = rnd.randint(0, zero.shape[0] - 1)
selK = zero[sel, :]
val = 2 if rnd.randint(0, 100) > 10 else 4
grid[selK[0], selK[1]] = val
return selK, val
# Move a single cell, merges if possible.
def moveCell(self, x, y, dir):
grid = self.grid
if grid[y, x] == 0:
return
# check boundary case
if x <= 0 and dir == "l":
return
elif x >= (self.board_width - 1) and dir == "r":
return
elif y <= 0 and dir == "u":
return
elif y >= (self.board_width-1) and dir == "d":
return
if dir == "l":
xval = -1
yval = 0
bound = lambda v, u: v >= 0
elif dir == "r":
xval = 1
yval = 0
bound = lambda v, u: v < self.board_width
elif dir == "d":
xval = 0
yval = 1
bound = lambda v, u: u < self.board_width
else:
xval = 0
yval = -1
bound = lambda v, u: u >= 0
dx = x + xval
dy = y + yval
while bound(dx, dy):
if grid[dy, dx] == 0:
dx += xval
dy += yval
elif grid[dy, dx] == grid[y, x]:
grid[dy, dx] *= 2
grid[y, x] = 0
# all done
return
else:
break
grid[dy-yval, dx-xval] = grid[y, x]
if dy-yval != y or dx-xval != x:
grid[y, x] = 0
# Move a direction
def moveGrid(self, dir):
grid = self.grid
if dir == "l":
evalO = lambda v, u: u < self.board_width
evalI = lambda v, u: v < self.board_width
x, y = 0, 0
incI = lambda v, u: (v+1, u)
incO = lambda v, u: (v, u + 1)
elif dir == "r":
evalO = lambda v, u: u >= 0
evalI = lambda v, u: v >= 0
x, y = (self.board_width - 1), (self.board_width - 1)
incI = lambda v, u: (v-1, u)
incO = lambda v, u: (v, u - 1)
elif dir == "d":
evalO = lambda v, u: v >= 0
evalI = lambda v, u: u >= 0
x, y = (self.board_width - 1), (self.board_width - 1 )
incI = lambda v, u: (v, u-1)
incO = lambda v, u: (v-1, u)
else:
evalO = lambda v, u: v < self.board_width
evalI = lambda v, u: u < self.board_width
x, y = 0, 0
incI = lambda v, u: (v, u+1)
incO = lambda v, u: (v+1, u)
reset = lambda dx, dy, x, y: (x, dy) if dir == "l" or dir == "r" else (dx, y)
dx, dy = x, y
while evalO(dx, dy):
dx, dy = reset(dx, dy, x, y)
while evalI(dx, dy):
self.moveCell(dx, dy, dir)
dx, dy = incI(dx, dy)
dx, dy = incO(dx, dy)
def restart(self):
grid = np.zeros((self.board_width,self.board_width))
def isWin(self):
return self.grid.max() >= MAX_VALUE
# Check if loosing state. Expensive! Calls availDir
def isLose(self):
return (len(self.availDir()) == 0)
# check available directions. Expensive! Takes O(n^2 * 4)
# Saves a snapshot of each grid. Key and grid.
def availDir(self):
choice = ["u", "d", "l", "r"]
# check if empyt
if self.grid.max() == 0:
return {k: np.copy(self.grid) for k in choice}
result = {}
gridDup = np.copy(self.grid)
for c in choice:
self.moveGrid(c)
if not np.array_equal(self.grid, gridDup):
result[c] = self.grid
self.grid = np.copy(gridDup)
return result
|
export { default } from './ShowBuilderPage';
|
/*-
* Copyright (c) 1989 Stephen Deering.
* Copyright (c) 1992, 1993
* The Regents of the University of California. All rights reserved.
*
* This code is derived from software contributed to Berkeley by
* Stephen Deering of Stanford University.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 4. Neither the name of the University nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* @(#)ip_mroute.h 8.1 (Berkeley) 6/10/93
* $FreeBSD$
*/
#ifndef _NETINET_IP_MROUTE_H_
#define _NETINET_IP_MROUTE_H_
/*
* Definitions for IP multicast forwarding.
*
* Written by David Waitzman, BBN Labs, August 1988.
* Modified by Steve Deering, Stanford, February 1989.
* Modified by Ajit Thyagarajan, PARC, August 1993.
* Modified by Ajit Thyagarajan, PARC, August 1994.
* Modified by Ahmed Helmy, SGI, June 1996.
* Modified by Pavlin Radoslavov, ICSI, October 2002.
*
* MROUTING Revision: 3.3.1.3
* and PIM-SMv2 and PIM-DM support, advanced API support,
* bandwidth metering and signaling.
*/
/*
* Multicast Routing set/getsockopt commands.
*/
#define MRT_INIT 100 /* initialize forwarder */
#define MRT_DONE 101 /* shut down forwarder */
#define MRT_ADD_VIF 102 /* create virtual interface */
#define MRT_DEL_VIF 103 /* delete virtual interface */
#define MRT_ADD_MFC 104 /* insert forwarding cache entry */
#define MRT_DEL_MFC 105 /* delete forwarding cache entry */
#define MRT_VERSION 106 /* get kernel version number */
#define MRT_ASSERT 107 /* enable assert processing */
#define MRT_PIM MRT_ASSERT /* enable PIM processing */
#define MRT_API_SUPPORT 109 /* supported MRT API */
#define MRT_API_CONFIG 110 /* config MRT API */
#define MRT_ADD_BW_UPCALL 111 /* create bandwidth monitor */
#define MRT_DEL_BW_UPCALL 112 /* delete bandwidth monitor */
/*
* Types and macros for handling bitmaps with one bit per virtual interface.
*/
#define MAXVIFS 32
typedef u_long vifbitmap_t;
typedef u_short vifi_t; /* type of a vif index */
#define ALL_VIFS (vifi_t)-1
#define VIFM_SET(n, m) ((m) |= (1 << (n)))
#define VIFM_CLR(n, m) ((m) &= ~(1 << (n)))
#define VIFM_ISSET(n, m) ((m) & (1 << (n)))
#define VIFM_CLRALL(m) ((m) = 0x00000000)
#define VIFM_COPY(mfrom, mto) ((mto) = (mfrom))
#define VIFM_SAME(m1, m2) ((m1) == (m2))
struct mfc;
/*
* Argument structure for MRT_ADD_VIF.
* (MRT_DEL_VIF takes a single vifi_t argument.)
*/
struct vifctl {
vifi_t vifc_vifi; /* the index of the vif to be added */
u_char vifc_flags; /* VIFF_ flags defined below */
u_char vifc_threshold; /* min ttl required to forward on vif */
u_int vifc_rate_limit; /* max rate */
struct in_addr vifc_lcl_addr; /* local interface address */
struct in_addr vifc_rmt_addr; /* remote address (tunnels only) */
};
#define VIFF_TUNNEL 0x1 /* no-op; retained for old source */
#define VIFF_SRCRT 0x2 /* no-op; retained for old source */
#define VIFF_REGISTER 0x4 /* used for PIM Register encap/decap */
/*
* Argument structure for MRT_ADD_MFC and MRT_DEL_MFC
* XXX if you change this, make sure to change struct mfcctl2 as well.
*/
struct mfcctl {
struct in_addr mfcc_origin; /* ip origin of mcasts */
struct in_addr mfcc_mcastgrp; /* multicast group associated*/
vifi_t mfcc_parent; /* incoming vif */
u_char mfcc_ttls[MAXVIFS]; /* forwarding ttls on vifs */
};
/*
* The new argument structure for MRT_ADD_MFC and MRT_DEL_MFC overlays
* and extends the old struct mfcctl.
*/
struct mfcctl2 {
/* the mfcctl fields */
struct in_addr mfcc_origin; /* ip origin of mcasts */
struct in_addr mfcc_mcastgrp; /* multicast group associated*/
vifi_t mfcc_parent; /* incoming vif */
u_char mfcc_ttls[MAXVIFS]; /* forwarding ttls on vifs */
/* extension fields */
uint8_t mfcc_flags[MAXVIFS]; /* the MRT_MFC_FLAGS_* flags */
struct in_addr mfcc_rp; /* the RP address */
};
/*
* The advanced-API flags.
*
* The MRT_MFC_FLAGS_XXX API flags are also used as flags
* for the mfcc_flags field.
*/
#define MRT_MFC_FLAGS_DISABLE_WRONGVIF (1 << 0) /* disable WRONGVIF signals */
#define MRT_MFC_FLAGS_BORDER_VIF (1 << 1) /* border vif */
#define MRT_MFC_RP (1 << 8) /* enable RP address */
#define MRT_MFC_BW_UPCALL (1 << 9) /* enable bw upcalls */
#define MRT_MFC_FLAGS_ALL (MRT_MFC_FLAGS_DISABLE_WRONGVIF | \
MRT_MFC_FLAGS_BORDER_VIF)
#define MRT_API_FLAGS_ALL (MRT_MFC_FLAGS_ALL | \
MRT_MFC_RP | \
MRT_MFC_BW_UPCALL)
/*
* Structure for installing or delivering an upcall if the
* measured bandwidth is above or below a threshold.
*
* User programs (e.g. daemons) may have a need to know when the
* bandwidth used by some data flow is above or below some threshold.
* This interface allows the userland to specify the threshold (in
* bytes and/or packets) and the measurement interval. Flows are
* all packet with the same source and destination IP address.
* At the moment the code is only used for multicast destinations
* but there is nothing that prevents its use for unicast.
*
* The measurement interval cannot be shorter than some Tmin (currently, 3s).
* The threshold is set in packets and/or bytes per_interval.
*
* Measurement works as follows:
*
* For >= measurements:
* The first packet marks the start of a measurement interval.
* During an interval we count packets and bytes, and when we
* pass the threshold we deliver an upcall and we are done.
* The first packet after the end of the interval resets the
* count and restarts the measurement.
*
* For <= measurement:
* We start a timer to fire at the end of the interval, and
* then for each incoming packet we count packets and bytes.
* When the timer fires, we compare the value with the threshold,
* schedule an upcall if we are below, and restart the measurement
* (reschedule timer and zero counters).
*/
struct bw_data {
struct timeval b_time;
uint64_t b_packets;
uint64_t b_bytes;
};
struct bw_upcall {
struct in_addr bu_src; /* source address */
struct in_addr bu_dst; /* destination address */
uint32_t bu_flags; /* misc flags (see below) */
#define BW_UPCALL_UNIT_PACKETS (1 << 0) /* threshold (in packets) */
#define BW_UPCALL_UNIT_BYTES (1 << 1) /* threshold (in bytes) */
#define BW_UPCALL_GEQ (1 << 2) /* upcall if bw >= threshold */
#define BW_UPCALL_LEQ (1 << 3) /* upcall if bw <= threshold */
#define BW_UPCALL_DELETE_ALL (1 << 4) /* delete all upcalls for s,d*/
struct bw_data bu_threshold; /* the bw threshold */
struct bw_data bu_measured; /* the measured bw */
};
/* max. number of upcalls to deliver together */
#define BW_UPCALLS_MAX 128
/* min. threshold time interval for bandwidth measurement */
#define BW_UPCALL_THRESHOLD_INTERVAL_MIN_SEC 3
#define BW_UPCALL_THRESHOLD_INTERVAL_MIN_USEC 0
/*
* The kernel's multicast routing statistics.
*/
struct mrtstat {
uint64_t mrts_mfc_lookups; /* # forw. cache hash table hits */
uint64_t mrts_mfc_misses; /* # forw. cache hash table misses */
uint64_t mrts_upcalls; /* # calls to multicast routing daemon */
uint64_t mrts_no_route; /* no route for packet's origin */
uint64_t mrts_bad_tunnel; /* malformed tunnel options */
uint64_t mrts_cant_tunnel; /* no room for tunnel options */
uint64_t mrts_wrong_if; /* arrived on wrong interface */
uint64_t mrts_upq_ovflw; /* upcall Q overflow */
uint64_t mrts_cache_cleanups; /* # entries with no upcalls */
uint64_t mrts_drop_sel; /* pkts dropped selectively */
uint64_t mrts_q_overflow; /* pkts dropped - Q overflow */
uint64_t mrts_pkt2large; /* pkts dropped - size > BKT SIZE */
uint64_t mrts_upq_sockfull; /* upcalls dropped - socket full */
};
#ifdef _KERNEL
#define MRTSTAT_ADD(name, val) \
VNET_PCPUSTAT_ADD(struct mrtstat, mrtstat, name, (val))
#define MRTSTAT_INC(name) MRTSTAT_ADD(name, 1)
#endif
/*
* Argument structure used by mrouted to get src-grp pkt counts
*/
struct sioc_sg_req {
struct in_addr src;
struct in_addr grp;
u_long pktcnt;
u_long bytecnt;
u_long wrong_if;
};
/*
* Argument structure used by mrouted to get vif pkt counts
*/
struct sioc_vif_req {
vifi_t vifi; /* vif number */
u_long icount; /* Input packet count on vif */
u_long ocount; /* Output packet count on vif */
u_long ibytes; /* Input byte count on vif */
u_long obytes; /* Output byte count on vif */
};
/*
* The kernel's virtual-interface structure.
*/
struct vif {
u_char v_flags; /* VIFF_ flags defined above */
u_char v_threshold; /* min ttl required to forward on vif*/
struct in_addr v_lcl_addr; /* local interface address */
struct in_addr v_rmt_addr; /* remote address (tunnels only) */
struct ifnet *v_ifp; /* pointer to interface */
u_long v_pkt_in; /* # pkts in on interface */
u_long v_pkt_out; /* # pkts out on interface */
u_long v_bytes_in; /* # bytes in on interface */
u_long v_bytes_out; /* # bytes out on interface */
};
#ifdef _KERNEL
/*
* The kernel's multicast forwarding cache entry structure
*/
struct mfc {
LIST_ENTRY(mfc) mfc_hash;
struct in_addr mfc_origin; /* IP origin of mcasts */
struct in_addr mfc_mcastgrp; /* multicast group associated*/
vifi_t mfc_parent; /* incoming vif */
u_char mfc_ttls[MAXVIFS]; /* forwarding ttls on vifs */
u_long mfc_pkt_cnt; /* pkt count for src-grp */
u_long mfc_byte_cnt; /* byte count for src-grp */
u_long mfc_wrong_if; /* wrong if for src-grp */
int mfc_expire; /* time to clean entry up */
struct timeval mfc_last_assert; /* last time I sent an assert*/
uint8_t mfc_flags[MAXVIFS]; /* the MRT_MFC_FLAGS_* flags */
struct in_addr mfc_rp; /* the RP address */
struct bw_meter *mfc_bw_meter; /* list of bandwidth meters */
u_long mfc_nstall; /* # of packets awaiting mfc */
TAILQ_HEAD(, rtdetq) mfc_stall; /* q of packets awaiting mfc */
};
#endif /* _KERNEL */
/*
* Struct used to communicate from kernel to multicast router
* note the convenient similarity to an IP packet
*/
struct igmpmsg {
uint32_t unused1;
uint32_t unused2;
u_char im_msgtype; /* what type of message */
#define IGMPMSG_NOCACHE 1 /* no MFC in the kernel */
#define IGMPMSG_WRONGVIF 2 /* packet came from wrong interface */
#define IGMPMSG_WHOLEPKT 3 /* PIM pkt for user level encap. */
#define IGMPMSG_BW_UPCALL 4 /* BW monitoring upcall */
u_char im_mbz; /* must be zero */
u_char im_vif; /* vif rec'd on */
u_char unused3;
struct in_addr im_src, im_dst;
};
#ifdef _KERNEL
/*
* Argument structure used for pkt info. while upcall is made
*/
struct rtdetq {
TAILQ_ENTRY(rtdetq) rte_link;
struct mbuf *m; /* A copy of the packet */
struct ifnet *ifp; /* Interface pkt came in on */
vifi_t xmt_vif; /* Saved copy of imo_multicast_vif */
};
#define MAX_UPQ 4 /* max. no of pkts in upcall Q */
#endif /* _KERNEL */
/*
* Structure for measuring the bandwidth and sending an upcall if the
* measured bandwidth is above or below a threshold.
*/
struct bw_meter {
struct bw_meter *bm_mfc_next; /* next bw meter (same mfc) */
struct bw_meter *bm_time_next; /* next bw meter (same time) */
uint32_t bm_time_hash; /* the time hash value */
struct mfc *bm_mfc; /* the corresponding mfc */
uint32_t bm_flags; /* misc flags (see below) */
#define BW_METER_UNIT_PACKETS (1 << 0) /* threshold (in packets) */
#define BW_METER_UNIT_BYTES (1 << 1) /* threshold (in bytes) */
#define BW_METER_GEQ (1 << 2) /* upcall if bw >= threshold */
#define BW_METER_LEQ (1 << 3) /* upcall if bw <= threshold */
#define BW_METER_USER_FLAGS (BW_METER_UNIT_PACKETS | \
BW_METER_UNIT_BYTES | \
BW_METER_GEQ | \
BW_METER_LEQ)
#define BW_METER_UPCALL_DELIVERED (1 << 24) /* upcall was delivered */
struct bw_data bm_threshold; /* the upcall threshold */
struct bw_data bm_measured; /* the measured bw */
struct timeval bm_start_time; /* abs. time */
};
#ifdef _KERNEL
struct sockopt;
extern int (*ip_mrouter_set)(struct socket *, struct sockopt *);
extern int (*ip_mrouter_get)(struct socket *, struct sockopt *);
extern int (*ip_mrouter_done)(void);
extern int (*mrt_ioctl)(u_long, caddr_t, int);
#endif /* _KERNEL */
#endif /* _NETINET_IP_MROUTE_H_ */
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\rick.towler\Work\AFSCGit\SurveyApps\MaceFunctions3\QImageViewer\ui\imageAdjustmentsDlg.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_imageAdjustmentsDlg(object):
def setupUi(self, imageAdjustmentsDlg):
imageAdjustmentsDlg.setObjectName("imageAdjustmentsDlg")
imageAdjustmentsDlg.resize(414, 660)
imageAdjustmentsDlg.setMinimumSize(QtCore.QSize(316, 660))
self.verticalLayout_2 = QtWidgets.QVBoxLayout(imageAdjustmentsDlg)
self.verticalLayout_2.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_2.setSpacing(4)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.gbBrightnessContrast = QtWidgets.QGroupBox(imageAdjustmentsDlg)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gbBrightnessContrast.setFont(font)
self.gbBrightnessContrast.setCheckable(True)
self.gbBrightnessContrast.setChecked(False)
self.gbBrightnessContrast.setObjectName("gbBrightnessContrast")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.gbBrightnessContrast)
self.verticalLayout_4.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_4.setSpacing(3)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.label_8 = QtWidgets.QLabel(self.gbBrightnessContrast)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_8.setFont(font)
self.label_8.setAlignment(QtCore.Qt.AlignCenter)
self.label_8.setObjectName("label_8")
self.verticalLayout_4.addWidget(self.label_8)
self.brightnessSlider = QtWidgets.QSlider(self.gbBrightnessContrast)
self.brightnessSlider.setMinimum(-100)
self.brightnessSlider.setMaximum(100)
self.brightnessSlider.setOrientation(QtCore.Qt.Horizontal)
self.brightnessSlider.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.brightnessSlider.setTickInterval(50)
self.brightnessSlider.setObjectName("brightnessSlider")
self.verticalLayout_4.addWidget(self.brightnessSlider)
self.bcAutomatic = QtWidgets.QRadioButton(self.gbBrightnessContrast)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.bcAutomatic.setFont(font)
self.bcAutomatic.setChecked(False)
self.bcAutomatic.setObjectName("bcAutomatic")
self.verticalLayout_4.addWidget(self.bcAutomatic)
self.gbAutoBC = QtWidgets.QGroupBox(self.gbBrightnessContrast)
self.gbAutoBC.setTitle("")
self.gbAutoBC.setObjectName("gbAutoBC")
self.formLayout_4 = QtWidgets.QFormLayout(self.gbAutoBC)
self.formLayout_4.setFieldGrowthPolicy(QtWidgets.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_4.setObjectName("formLayout_4")
self.label_10 = QtWidgets.QLabel(self.gbAutoBC)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.formLayout_4.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_10)
self.bcClipLimit = QtWidgets.QSlider(self.gbAutoBC)
self.bcClipLimit.setMinimum(5)
self.bcClipLimit.setMaximum(50)
self.bcClipLimit.setProperty("value", 17)
self.bcClipLimit.setOrientation(QtCore.Qt.Horizontal)
self.bcClipLimit.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.bcClipLimit.setTickInterval(5)
self.bcClipLimit.setObjectName("bcClipLimit")
self.formLayout_4.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.bcClipLimit)
self.verticalLayout_4.addWidget(self.gbAutoBC)
self.bcManual = QtWidgets.QRadioButton(self.gbBrightnessContrast)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.bcManual.setFont(font)
self.bcManual.setChecked(True)
self.bcManual.setObjectName("bcManual")
self.verticalLayout_4.addWidget(self.bcManual)
self.gbManualBC = QtWidgets.QGroupBox(self.gbBrightnessContrast)
self.gbManualBC.setTitle("")
self.gbManualBC.setObjectName("gbManualBC")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.gbManualBC)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.verticalLayout_6 = QtWidgets.QVBoxLayout()
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.label_7 = QtWidgets.QLabel(self.gbManualBC)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_7.setFont(font)
self.label_7.setAlignment(QtCore.Qt.AlignCenter)
self.label_7.setObjectName("label_7")
self.verticalLayout_6.addWidget(self.label_7)
self.contrastSlider = QtWidgets.QSlider(self.gbManualBC)
self.contrastSlider.setMinimum(-100)
self.contrastSlider.setMaximum(100)
self.contrastSlider.setProperty("value", 0)
self.contrastSlider.setSliderPosition(0)
self.contrastSlider.setOrientation(QtCore.Qt.Horizontal)
self.contrastSlider.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.contrastSlider.setTickInterval(50)
self.contrastSlider.setObjectName("contrastSlider")
self.verticalLayout_6.addWidget(self.contrastSlider)
self.verticalLayout_5.addLayout(self.verticalLayout_6)
self.verticalLayout_4.addWidget(self.gbManualBC)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.pbBCReset = QtWidgets.QPushButton(self.gbBrightnessContrast)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.pbBCReset.setFont(font)
self.pbBCReset.setObjectName("pbBCReset")
self.horizontalLayout.addWidget(self.pbBCReset)
self.verticalLayout_4.addLayout(self.horizontalLayout)
self.verticalLayout_2.addWidget(self.gbBrightnessContrast)
self.gbColorCorrection = QtWidgets.QGroupBox(imageAdjustmentsDlg)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.gbColorCorrection.setFont(font)
self.gbColorCorrection.setCheckable(True)
self.gbColorCorrection.setChecked(False)
self.gbColorCorrection.setObjectName("gbColorCorrection")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.gbColorCorrection)
self.verticalLayout_3.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_3.setSpacing(3)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.cbAWB = QtWidgets.QCheckBox(self.gbColorCorrection)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.cbAWB.setFont(font)
self.cbAWB.setObjectName("cbAWB")
self.verticalLayout_3.addWidget(self.cbAWB)
self.ccSimpleBalance = QtWidgets.QRadioButton(self.gbColorCorrection)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.ccSimpleBalance.setFont(font)
self.ccSimpleBalance.setObjectName("ccSimpleBalance")
self.verticalLayout_3.addWidget(self.ccSimpleBalance)
self.gbAutoCC = QtWidgets.QGroupBox(self.gbColorCorrection)
self.gbAutoCC.setEnabled(False)
self.gbAutoCC.setTitle("")
self.gbAutoCC.setObjectName("gbAutoCC")
self.formLayout_3 = QtWidgets.QFormLayout(self.gbAutoCC)
self.formLayout_3.setObjectName("formLayout_3")
self.label_6 = QtWidgets.QLabel(self.gbAutoCC)
self.label_6.setMinimumSize(QtCore.QSize(125, 0))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_6)
self.ccSatLevel = QtWidgets.QSlider(self.gbAutoCC)
self.ccSatLevel.setEnabled(False)
self.ccSatLevel.setMinimum(1)
self.ccSatLevel.setMaximum(16)
self.ccSatLevel.setPageStep(2)
self.ccSatLevel.setProperty("value", 6)
self.ccSatLevel.setOrientation(QtCore.Qt.Horizontal)
self.ccSatLevel.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.ccSatLevel.setTickInterval(4)
self.ccSatLevel.setObjectName("ccSatLevel")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.ccSatLevel)
self.verticalLayout_3.addWidget(self.gbAutoCC)
self.ccAdaptive = QtWidgets.QRadioButton(self.gbColorCorrection)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.ccAdaptive.setFont(font)
self.ccAdaptive.setChecked(False)
self.ccAdaptive.setObjectName("ccAdaptive")
self.verticalLayout_3.addWidget(self.ccAdaptive)
self.gbAdaptiveCC = QtWidgets.QGroupBox(self.gbColorCorrection)
self.gbAdaptiveCC.setEnabled(False)
self.gbAdaptiveCC.setTitle("")
self.gbAdaptiveCC.setObjectName("gbAdaptiveCC")
self.formLayout_2 = QtWidgets.QFormLayout(self.gbAdaptiveCC)
self.formLayout_2.setFieldGrowthPolicy(QtWidgets.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_2.setObjectName("formLayout_2")
self.label_5 = QtWidgets.QLabel(self.gbAdaptiveCC)
self.label_5.setMinimumSize(QtCore.QSize(80, 0))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_5)
self.ccClipLimit = QtWidgets.QSlider(self.gbAdaptiveCC)
self.ccClipLimit.setMinimum(5)
self.ccClipLimit.setMaximum(30)
self.ccClipLimit.setProperty("value", 10)
self.ccClipLimit.setOrientation(QtCore.Qt.Horizontal)
self.ccClipLimit.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.ccClipLimit.setTickInterval(5)
self.ccClipLimit.setObjectName("ccClipLimit")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.ccClipLimit)
self.verticalLayout_3.addWidget(self.gbAdaptiveCC)
self.ccManual = QtWidgets.QRadioButton(self.gbColorCorrection)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.ccManual.setFont(font)
self.ccManual.setChecked(True)
self.ccManual.setAutoExclusive(True)
self.ccManual.setObjectName("ccManual")
self.verticalLayout_3.addWidget(self.ccManual)
self.gbManualCC = QtWidgets.QGroupBox(self.gbColorCorrection)
self.gbManualCC.setTitle("")
self.gbManualCC.setObjectName("gbManualCC")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.gbManualCC)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.formLayout = QtWidgets.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtWidgets.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName("formLayout")
self.label = QtWidgets.QLabel(self.gbManualCC)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label.setFont(font)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label)
self.redSlider = QtWidgets.QSlider(self.gbManualCC)
self.redSlider.setMinimum(-50)
self.redSlider.setMaximum(50)
self.redSlider.setOrientation(QtCore.Qt.Horizontal)
self.redSlider.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.redSlider.setTickInterval(10)
self.redSlider.setObjectName("redSlider")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.redSlider)
self.label_2 = QtWidgets.QLabel(self.gbManualCC)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.greenSlider = QtWidgets.QSlider(self.gbManualCC)
self.greenSlider.setMinimum(-50)
self.greenSlider.setMaximum(50)
self.greenSlider.setOrientation(QtCore.Qt.Horizontal)
self.greenSlider.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.greenSlider.setTickInterval(10)
self.greenSlider.setObjectName("greenSlider")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.greenSlider)
self.label_3 = QtWidgets.QLabel(self.gbManualCC)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.blueSlider = QtWidgets.QSlider(self.gbManualCC)
self.blueSlider.setMinimum(-50)
self.blueSlider.setMaximum(50)
self.blueSlider.setOrientation(QtCore.Qt.Horizontal)
self.blueSlider.setTickPosition(QtWidgets.QSlider.TicksAbove)
self.blueSlider.setTickInterval(10)
self.blueSlider.setObjectName("blueSlider")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.blueSlider)
self.verticalLayout_7.addLayout(self.formLayout)
self.verticalLayout_3.addWidget(self.gbManualCC)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem1)
self.pbColorReset = QtWidgets.QPushButton(self.gbColorCorrection)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(False)
font.setWeight(50)
self.pbColorReset.setFont(font)
self.pbColorReset.setObjectName("pbColorReset")
self.horizontalLayout_3.addWidget(self.pbColorReset)
self.verticalLayout_3.addLayout(self.horizontalLayout_3)
self.verticalLayout_2.addWidget(self.gbColorCorrection)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem2)
self.pbApply = QtWidgets.QPushButton(imageAdjustmentsDlg)
font = QtGui.QFont()
font.setPointSize(10)
self.pbApply.setFont(font)
self.pbApply.setObjectName("pbApply")
self.horizontalLayout_2.addWidget(self.pbApply)
self.pbCancel = QtWidgets.QPushButton(imageAdjustmentsDlg)
font = QtGui.QFont()
font.setPointSize(10)
self.pbCancel.setFont(font)
self.pbCancel.setObjectName("pbCancel")
self.horizontalLayout_2.addWidget(self.pbCancel)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.retranslateUi(imageAdjustmentsDlg)
QtCore.QMetaObject.connectSlotsByName(imageAdjustmentsDlg)
def retranslateUi(self, imageAdjustmentsDlg):
_translate = QtCore.QCoreApplication.translate
imageAdjustmentsDlg.setWindowTitle(_translate("imageAdjustmentsDlg", "Image Adjustments:"))
self.gbBrightnessContrast.setTitle(_translate("imageAdjustmentsDlg", "Brightness and Contrast"))
self.label_8.setText(_translate("imageAdjustmentsDlg", "Brightness"))
self.bcAutomatic.setText(_translate("imageAdjustmentsDlg", "Automatic Contrast (CLAHE)"))
self.label_10.setText(_translate("imageAdjustmentsDlg", "Clip Limit"))
self.bcManual.setText(_translate("imageAdjustmentsDlg", "Manual Contrast"))
self.label_7.setText(_translate("imageAdjustmentsDlg", "Contrast"))
self.pbBCReset.setText(_translate("imageAdjustmentsDlg", "Reset"))
self.gbColorCorrection.setTitle(_translate("imageAdjustmentsDlg", "Color Correction"))
self.cbAWB.setText(_translate("imageAdjustmentsDlg", "Automatic White Balance"))
self.ccSimpleBalance.setText(_translate("imageAdjustmentsDlg", "Auto Levels"))
self.label_6.setText(_translate("imageAdjustmentsDlg", "Saturation Level"))
self.ccAdaptive.setText(_translate("imageAdjustmentsDlg", "Adaptive Equalization"))
self.label_5.setText(_translate("imageAdjustmentsDlg", "Clip Limit"))
self.ccManual.setText(_translate("imageAdjustmentsDlg", "Manual"))
self.label.setText(_translate("imageAdjustmentsDlg", "R"))
self.label_2.setText(_translate("imageAdjustmentsDlg", "G"))
self.label_3.setText(_translate("imageAdjustmentsDlg", "B"))
self.pbColorReset.setText(_translate("imageAdjustmentsDlg", "Reset"))
self.pbApply.setText(_translate("imageAdjustmentsDlg", "Apply"))
self.pbCancel.setText(_translate("imageAdjustmentsDlg", "Cancel"))
|
var hasOwn = Object.prototype.hasOwnProperty;
var toString = Object.prototype.toString;
module.exports = function forEach (obj, fn, ctx) {
if (toString.call(fn) !== '[object Function]') {
throw new TypeError('iterator must be a function');
}
var l = obj.length;
if (l === +l) {
for (var i = 0; i < l; i++) {
fn.call(ctx, obj[i], i, obj);
}
} else {
for (var k in obj) {
if (hasOwn.call(obj, k)) {
fn.call(ctx, obj[k], k, obj);
}
}
}
};
|
#!/Library/Frameworks/Python.framework/Versions/3.6/bin/python3
from toolbox.AirWatchAPI import AirWatchAPI as airwatch
api = airwatch()
for device in search['Devices']:
if device['EnrollmentStatus'] == 'Enrolled' and device['LocationGroupName'] != 'Disabled':
for policy in device['ComplianceSummary']['DeviceCompliance']:
if policy['PolicyName'] == 'iOS AirWatch Agent Policy':
print(api.prettyJSON(device))
input()
|
var searchData=
[
['t_0',['T',['../classnetdem_1_1_s_d_f_calculator.html#adcfdb7972eb8e7126390fd912c5ed078',1,'netdem::SDFCalculator']]],
['t_5fstart_1',['t_start',['../classnetdem_1_1_d_e_m_profiler.html#ab7d2947cb98fab1a0ad2d96be95cc8b4',1,'netdem::DEMProfiler']]],
['target_5fpressure_2',['target_pressure',['../classnetdem_1_1_wall_servo_control.html#a32b386998006c5274bdef26cf8acab8f',1,'netdem::WallServoControl']]],
['tetmesh_3',['tetmesh',['../classnetdem_1_1_tet_mesh_splittor.html#aadcaf925603219062219b52462777ef6',1,'netdem::TetMeshSplittor']]],
['tetrahedra_4',['tetrahedra',['../classnetdem_1_1_tet_mesh.html#ac1617d42b2556ffa145064048754f5fa',1,'netdem::TetMesh']]],
['theta_5fn_5',['theta_n',['../classnetdem_1_1_bond_geometries.html#a3edc10da51db37efd3bda0f43c469652',1,'netdem::BondGeometries']]],
['theta_5fs_6',['theta_s',['../classnetdem_1_1_bond_geometries.html#aa319e0eafe7e26b6753226add49ec8e1',1,'netdem::BondGeometries']]],
['theta_5ft_7',['theta_t',['../classnetdem_1_1_bond_geometries.html#ac9e76692ab238245ee3fc1b1703e7fe2',1,'netdem::BondGeometries']]],
['time_5fdepedent_8',['time_depedent',['../classnetdem_1_1_peri_digm_boundary_condition.html#aab4d0daf1123ed9444885d5d7ddd45d0',1,'netdem::PeriDigmBoundaryCondition']]],
['time_5finterval_9',['time_interval',['../classnetdem_1_1_breakage_analysis_p_d.html#a0c90b6900a6626bd5048999c9099c167',1,'netdem::BreakageAnalysisPD::time_interval()'],['../classnetdem_1_1_data_dumper.html#a3ec988616691dd755f5faa872b772343',1,'netdem::DataDumper::time_interval()']]],
['time_5fprevious_10',['time_previous',['../classnetdem_1_1_breakage_analysis_p_d.html#af5e3eac9ae86c0ed771e671e269a4552',1,'netdem::BreakageAnalysisPD::time_previous()'],['../classnetdem_1_1_data_dumper.html#ae2162acdef3f24f333893cc2342c8343',1,'netdem::DataDumper::time_previous()']]],
['timer_5flist_11',['timer_list',['../classnetdem_1_1_d_e_m_profiler.html#a1807f89997ec1340772b0dc4cd99db6c',1,'netdem::DEMProfiler']]],
['timer_5fstarted_12',['timer_started',['../classnetdem_1_1_d_e_m_profiler.html#a1e1255ea4691e9da8cb53e4a155f27a7',1,'netdem::DEMProfiler']]],
['timestep_13',['timestep',['../classnetdem_1_1_d_e_m_solver.html#a425c71e883bed7ce636ce51f50f42f7f',1,'netdem::DEMSolver::timestep()'],['../classnetdem_1_1_elasticity_dynamic.html#a71ca762f569e50e71b4bbfd1aed21b30',1,'netdem::ElasticityDynamic::timestep()'],['../classnetdem_1_1_peri_digm_settings.html#a7f22220557a8e3b24f4959e4a62b4a20',1,'netdem::PeriDigmSettings::timestep()']]],
['timestep_5ffactor_14',['timestep_factor',['../classnetdem_1_1_peri_digm_settings.html#a8cc00d7f112552edbd975f870c067759',1,'netdem::PeriDigmSettings']]],
['tol_15',['tol',['../classnetdem_1_1_wall_servo_control.html#ad8e63e46098327cc5d3ca4f0eb2c6f12',1,'netdem::WallServoControl::tol()'],['../classnetdem_1_1_w_s_c_v_t_sampler.html#a5dcdaf6bcfb21f13fab98626e0fe377d',1,'netdem::WSCVTSampler::tol()']]],
['total_16',['total',['../structnetdem_1_1_particle_energy.html#a61c8072778ba415de6fd166f10b20e2c',1,'netdem::ParticleEnergy']]],
['translational_17',['translational',['../structnetdem_1_1_particle_energy.html#a4dac7292192355a4732cff2266ee962b',1,'netdem::ParticleEnergy']]],
['tree_18',['tree',['../classnetdem_1_1_s_d_f_calculator.html#af00a0e39a11dde13a169d5e38249c729',1,'netdem::SDFCalculator']]],
['triangle_19',['triangle',['../classnetdem_1_1_solver_sphere_triangle.html#a6b664eb127912566c6c98ecb7b3508e7',1,'netdem::SolverSphereTriangle']]],
['trimesh_20',['trimesh',['../classnetdem_1_1_deformable_particle.html#a60bdd11588944a909ef3981b8936a450',1,'netdem::DeformableParticle']]],
['type_21',['type',['../classnetdem_1_1_peri_digm_boundary_condition.html#a382a2e3a8c56b0de62d452415ffb5ea6',1,'netdem::PeriDigmBoundaryCondition::type()'],['../classnetdem_1_1_peri_digm_damage_model.html#ac5a12f3415fed0363b44230bae0e800b',1,'netdem::PeriDigmDamageModel::type()'],['../classnetdem_1_1_peri_digm_discretization.html#a865c73f0a32196682323b0acb86ddb62',1,'netdem::PeriDigmDiscretization::type()'],['../classnetdem_1_1_peri_digm_material.html#a00dd07484fd4de87da54bfead91ac6ab',1,'netdem::PeriDigmMaterial::type()']]]
];
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
__author__ = 'Andreas Bader'
__version__ = "1.00"
import logging
import logging.config
import argparse
from fabric.api import *
import os
import time
import Util
import subprocess
import threading
import Vm
import ConfigParser
import datetime
import re
import platform
pyYcsbPdfGenPath="ProcessYcsbLog.py"
testDBs=['basicdb','basicjdbc','basickairosdb','basicopentsdb']
vagrantCredFiles=["vagrantconf.rb", "vagrantconf_gen.rb", "vagrantconf_db.rb", "aws_commands.txt"]
vagrantBasicFilesFolder="basic"
logFile="debug_log_%s.log" % (time.strftime("%Y%m%d%H%M%S", time.localtime()))
logConfigFile="logging.conf"
availProviders=['virtualbox', 'vsphere', 'openstack', 'digital_ocean', 'aws'] # First one is default
def run_workload(genDict, dbDict, dbName, workloadName, timeseries, granularity, bucket, test, onlyPrerun, debug, logger):
if test:
command = ""
else:
command = "nohup "
ipStr = ""
hnStr = ""
for dbKey in sorted(dbDict.keys()):
if dbDict[dbKey] == None or dbDict[dbKey] == "":
ipStr += "%s " %(dbDict[dbKey].vm.hostname())
logger.warning("IP of vm %s is None or an empty string, using hostname instead. This does not work on some providers (e.g. OpenStack)!" %(dbKey))
else:
ipStr += "%s " %(dbDict[dbKey].ip)
hnStr += "%s " % (dbDict[dbKey].name) #.vm.hostname() does not work here!
ip0 = dbDict[dbDict.keys()[0]].ip
if ip0 == None or ip0 == "":
ip0 = dbDict[dbDict.keys()[0]].vm.hostname()
logger.info("BEGIN: Running workload '%s' on %s with ip string %s and hostname string %s." %(workloadName,ip0,ipStr,hnStr))
command+="python2 /home/vagrant/files/RunWorkload.py -d %s -w %s -i %s -s %s" %(dbName,workloadName,ipStr,hnStr)
if timeseries:
command+=" -t"
if granularity:
command+=" -g %s" % (granularity)
if onlyPrerun:
command+=" -p"
if not test:
command += " -n"
if bucket:
command+=" -b %s" % (bucket)
if debug:
command += " --debug"
if not test:
command += " </dev/null"
else:
command += " -n"
# here we expect to get an error and return code 255, seems to be normal when starting a backround process!
ret = genDict[genDict.keys()[0]].run_without_output(True, command, True,True,test)
logger.info("END: Running workload '%s' on %s." %(workloadName,ip0))
return ret
def wait_for_vm(vms, logger, timeout=3600, noshutdown=False):
timerBegin=time.clock()
if len(vms.keys()) < 1:
logger.error("DB VM Dict has zero keys.")
return False
keyOfFirst=sorted(vms.keys())[0]
try:
while vms[keyOfFirst].vm.status()[0].state == "running":
time.sleep(10)
if time.clock()-timerBegin > 3600:
logger.error("VM % is still up, waiting for it to shutdown timeouted after %s seconds." %(Vm.hostname(),timeout))
return False
except IndexError:
logger.error("Python-Vagrant could not parse the output of vagrant status --machine-readable, try check it for "
"yourself. The output should be parsable CSV. Sometimes the \"plugin outdated\" message causes "
"this error. Check that all vagrant plugins are uptodate.", exc_info=True)
return False
if noshutdown:
logger.info("Noshutdown is activated, trying to boot it up again.")
for vmKey in sorted(vms.keys()):
vms[vmKey].vm.up()
return True
def get_remote_file(vm,remotePath,localPath,logger):
with hide('output','running', 'warnings', 'stdout', 'stderr'),\
settings(host_string= vm.user_hostname_port(),
key_filename = vm.keyfile(),
disable_known_hosts = True, warn_only=True):
ret = get(remote_path=remotePath, local_path=localPath)
if len(ret) > 1:
logger.warning("More than one file copied from %s to %s: %s." %(remotePath, localPath, ret))
if len(ret) < 1:
logger.error("No files copied from %s to %s." %(remotePath, localPath))
return ret
def rm_remote_file(vm,remotePath,logger):
with hide('output','running', 'stdout'),\
settings(host_string= vm.user_hostname_port(),
key_filename = vm.keyfile(),
disable_known_hosts = True,
warn_only = True):
run ("rm %s" %(remotePath))
def get_ycsb_file(vm,dbName,workloadName,logger):
ret = get_remote_file(vm,"/home/vagrant/ycsb_%s_%s_*.log" %(dbName,workloadName),".",logger)
if len(ret) > 1:
logger.warning("More than one file copied for %s %s: %s. Taking first one." %(dbName, workloadName, ret))
if len(ret) < 1:
return None
return ret[0]
# returns True when errors are found
def check_result_file(path, logger):
if Util.check_file_exists(path):
file = open(path,"r")
errorsFound = False
errors = []
warningsFound = False
warnings = []
exceptionsFound = False
exceptions = []
for line in file:
if "warn" in line.lower():
warningsFound = True
warnings.append(line)
if "error" in line.lower():
errorsFound = True
errors.append(line)
if "exception" in line.lower():
exceptionsFound = True
exceptions.append(line)
file.close();
if errorsFound:
logger.error("The following errors occurred: ")
for error in errors:
logger.error(error)
return True
if warningsFound:
logger.warning("The following warnings occurred: ")
for warning in warnings:
logger.warning(warning)
return True
if exceptionsFound:
logger.error("The following exceptions occurred: ")
for exception in exceptions:
logger.error(exception)
return True
else:
logger.error("%s not found, can't check for errors." %(path))
return True
# returns True when not all queries are executed
# only possible for testworkload and testworkloadb
# machtes two lines:
# [INSERT], Operations, 1000
# and
# [INSERT], Return=0, 1000
# both numbers on the end of the line must be the same
def check_result_file_extended(path, workload, logger):
if workload not in ["testworkloada", "testworkloadb"]:
return False
if Util.check_file_exists(path):
file = open(path, "r")
resultDict={}
error = False
atLeastOneReturnedZeroDict = {}
for line in file:
if re.match("\[(INSERT|READ|SCAN|AVG|COUNT|SUM)\],\s*(Return=|Operations).+$", line) != None:
splitters = line.split(",")
queryType = splitters[0].replace("[","").replace("]","")
lineType = splitters[1]
amount = int(splitters[2].replace(" ",""))
if "Operations" in lineType:
if queryType in resultDict.keys():
error = True # nothing should be found twice
else:
resultDict[queryType] = [amount,0]
elif "Return=" in lineType:
# check if at least a few non-INSERT queries returned 0 (=succesful)
# INSERT queries must return 0, -1 is not allowed
if queryType not in atLeastOneReturnedZeroDict.keys():
atLeastOneReturnedZeroDict[queryType] = False
if "Return=0" in lineType and "INSERT" in queryType and amount == resultDict[queryType][0]:
atLeastOneReturnedZeroDict[queryType] = True
elif "Return=0" in lineType and amount > 0:
atLeastOneReturnedZeroDict[queryType] = True
if queryType not in resultDict.keys():
error = True # should already be found in operations line
else:
resultDict[queryType][1]+=amount
sum = 0
for key in resultDict:
if key != "INSERT":
sum += resultDict[key][1]
if resultDict[key][0] != resultDict[key][1]:
return True
for key in atLeastOneReturnedZeroDict:
if not atLeastOneReturnedZeroDict[key]:
return True
if (workload == "testworkloada" and len(resultDict.keys()) != 2 and sum != resultDict["INSERT"][1]) or \
(workload == "testworkloadb" and len(resultDict.keys()) != 5 and sum != resultDict["INSERT"][1]) :
return True
return error
else:
logger.error("%s not found, can't check for errors." % (path))
return True
def generate_html(paths, pdf, overwrite):
if Util.check_file_exists(pyYcsbPdfGenPath):
tsString = ""
if args.timeseries:
tsString=" -t"
overwriteString = ""
if overwrite:
overwriteString=" -o"
ycsbFileString = "-f"
if len(paths) < 1:
logger.error("Can't create html or pdf, paths is empty." )
return False
for path in paths:
ycsbFileString += " %s" %(path)
pdfString = ""
if args.pdf:
pdfString = " -p"
multiStr = ""
if len(paths) > 1:
multiStr = " -s"
try:
retcode = subprocess.call("python2 %s %s%s%s%s%s" %(pyYcsbPdfGenPath,ycsbFileString,tsString,pdfString,overwriteString,multiStr), shell=True)
if retcode != 0:
logger.error("Generation of pdf/html returned with %s." %(retcode))
else:
logger.info("Successfully generated pdf/html file.")
except OSError, e:
logger.error("Errors occured while running pdf/html creation process.", exc_info=True)
else:
logger.error("Can't create html or pdf, %s does not exist." %(pyYcsbPdfGenPath))
def cleanup_vm(name, vm, pathFolder, pathVagrantfile, logger, linear):
logger.info("Cleaning up %s." %(name))
if vm != None and linear:
vm.destroy()
if pathFolder != None and pathFolder != "":
if not Util.delete_folder(pathFolder,logger,True):
logger.warning("Error while cleaning up %s." %(name))
return False
if pathVagrantfile != None and pathVagrantfile != "":
if not Util.delete_file(pathVagrantfile,logger,True):
logger.warning("Error while cleaning up %s." %(name))
return False
return True
def cleanup_vms(vmDict,logger, linear):
logger.info("Begin Cleaning up.")
if not linear:
logger.info("Waiting to finish creation if not finished...")
for key in vmDict.keys():
# Wait for Creation to finish if unfinished
vmDict[key].join()
if vmDict[key].created:
# Start Destroying if created :)
vmDict[key].start()
# Wait for Destroying to finish if unfinished
vmDict[key].join()
for key in vmDict.keys():
cleanup_vm(key, vmDict[key].vm,vmDict[key].pathFolder,vmDict[key].pathVagrantfile, logger, linear)
vmDict.pop(key)
overallTime=datetime.datetime.now()
# Configure ArgumentParser
parser = argparse.ArgumentParser(prog="TSDBBench.py",version=__version__,description="A tool for automated bencharming of time series databases.", formatter_class=argparse.RawDescriptionHelpFormatter, epilog="")
parser.add_argument("-l", "--log", action='store_true', help="Be more verbose, log vagrant output.")
parser.add_argument("-t", "--tmpfolder", metavar="TMP", required=True, help="Path to Temp Space")
parser.add_argument("-f", "--vagrantfolders", metavar="VAGRANT", nargs='+', required=True, help="Path to folder(s) with Vagrantfiles. Files from additional folder(s) overwrite existing files from preceding folder(s).")
parser.add_argument("-w", "--workload", metavar="WORKLOAD", help="Only process workload WORKLOAD")
parser.add_argument("-d", "--databases", metavar="DATABASES", nargs='+', help="Only process workloads for all machines for DATABASE (Generator will always be created!), Set to 'all' for all DATABASES, set to 'test' for some special test DB set.)")
parser.add_argument("-n", "--nodestroy", action='store_true', help="Do not destroy VMs")
parser.add_argument("-o", "--noshutdown", action='store_true', help="Do not shutdown db vms, leave them running. Remember: After finishing workload they are rebooted!")
parser.add_argument("-s", "--timeseries", action='store_true', help="Force workload to do timeseries output")
parser.add_argument("-g", "--granularity", metavar="GRANULARITY", type=int, default=1000, help="If forcing to do timeseries output, use granularity GRANULARITY. Default:1000")
parser.add_argument("-b", "--bucket", metavar="BUCKET", type=int, default=100000, help="Use BUCKET bucket size for measurement histograms. Default:100000")
parser.add_argument("-m", "--html", action='store_true', help="Generate html output (ProcessYcsbLog.py required!")
parser.add_argument("-p", "--pdf", action='store_true', help="Generate pdf output (ProcessYcsbLog.py required!")
parser.add_argument("-u", "--nohup", action='store_true', help="Also fetch nohup output (for debugging only)")
parser.add_argument("-c", "--linear", action='store_true', help="Create VMs linear, do not use parallelisation.")
parser.add_argument("-r", "--provider", metavar="PROVIDER", type=str, default=availProviders[0], choices=availProviders, help="Which provider to use. Available: %s" %(availProviders))
parser.add_argument("-z", "--test", action='store_true', help="Test mode. Goes through all or the given databases with the given workload and tests each database. When using testworkloada or testworkloadb it is also checked if the amount of queries matches.")
args = parser.parse_args()
# Configure Logging
logLevel = logging.WARN
if args.log and not args.test:
logLevel = logging.DEBUG
try:
logging.config.fileConfig(logConfigFile)
except ConfigParser.NoSectionError:
print("Error: Can't load logging config from '%s'." %(logConfigFile))
exit(-1)
logger = logging.getLogger("TSDBBench")
if not args.test:
for handler in logger.handlers:
handler.setLevel(logLevel)
else:
logger.handlers = []
if not Util.delete_file(logFile,logger,True):
exit(-1)
if args.log or args.test:
handler = logging.FileHandler(logFile)
if args.test:
handler.setLevel(logging.DEBUG)
else:
handler.setLevel(logLevel)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
if args.test:
print("Executing in test mode.")
print("Python: %s" %(platform.python_version()))
print("Platform: %s" %(platform.platform()))
print("Databases: %s" % (args.databases))
print("Workload: %s" %(args.workload))
if args.workload == "testworkloada" or args.workload == "testworkloadb":
print("Result checking is used for this workload.")
else:
print("Result checking is NOT used for this workload.")
print("Provider: %s" %(args.provider))
print("Parallel creation of VMs: %s" %(not args.linear))
print("Log is written to '%s'." %(logFile))
print("Logging to shell is disabled (except fabric warnings).")
if args.provider == "digital_ocean" and not args.linear:
logger.warning("Provider '%s' does not support parallel creation of VMs. Linear creation is automatically enabled. See https://github.com/devopsgroup-io/vagrant-digitalocean/pull/230 for further details." % args.provider)
args.linear = True
if len(args.databases) > 1 and (args.nodestroy or args.noshutdown):
logger.warning("The arguments --noshutdown and --nodestroy do not work with multiple databases at one run. Both are automatically disabled.")
args.nodestroy = False
args.noshutdown = False
# File checks and deletions (if necessary)
for folder in args.vagrantfolders:
if not Util.check_folder(folder,logger):
exit(-1)
for vagrantCredFile in vagrantCredFiles:
found_cred_file = False
for folder in args.vagrantfolders:
if Util.check_file_exists(os.path.join(folder,vagrantCredFile)):
found_cred_file = True
if not found_cred_file:
logger.error("%s not found in any of the given vagrantfolders (%s)." %(vagrantCredFile, args.vagrantfolders))
exit(-1)
if not Util.check_folder(args.tmpfolder,logger):
exit(-1)
generators={} # list of generator vms
dbs={} # dictinary of db vms
# format "name" = {"path_folder" : "/bla/tmppath", "path_vagrantfile":"/bla/tmppath/file", "vm": vm}
creationTimesGenerators=datetime.datetime.now()
termSize = Util.get_terminal_size(logger)
# Generating Generator VMs
if args.test:
print(Util.multiply_string("-", termSize))
print("Stage 1: Creation of generator VMs.")
generatorFound=False
for path, dir in Util.unsorted_paths(args.vagrantfolders,logger,"",True):
if os.path.isdir(os.path.join(path, dir)) and dir == "generator":
generatorFound=True
found=0 # how many .vagrant files are found, At least 1 is needed!
# search in all generator folders
for path, file in Util.unsorted_paths(args.vagrantfolders,logger, "generator", True):
if os.path.isfile(os.path.join(path, file)):
split = file.rsplit(".vagrant", 1)
# if rsplit is used on bla.vagrant, the result should be ["bla",""]
if len(split)>1 and split[1] == "":
if split[0] in generators.keys():
continue
found+=1
# check if Generator, generator, Generator_1, etcpp. as machine is used, but always create if
# something else than Generator is given (Generator is always created!)
if not args.databases or args.databases == None or args.databases == [] \
or not Util.check_if_in_databases("generator", args.databases) \
or (args.databases and Util.check_if_eq_databases(split[0], args.databases)) \
or (args.databases and Util.check_if_eq_databases(split[0].rsplit("_",1)[0], args.databases)):
if args.linear:
if args.test:
Util.print_wo_nl(split[0] + Util.multiply_string(".", termSize-len(split[0])-len("[ERROR]")))
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.create_vm()
generators[virtMachine.name] = virtMachine
if not virtMachine.created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(split[0]))
if not args.nodestroy:
cleanup_vms(generators,logger, args.linear)
exit(-1)
if args.test:
print("[OK]")
else:
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.start()
Util.sleep_random(2.5,5.0) # needed for openstack, otherwise two vms get the same floating ip
generators[virtMachine.name] = virtMachine
if found == 0:
logger.error("No .vagrant files found in %s." %(Util.unsorted_paths(args.vagrantfolders, logger, "generator")))
exit(-1)
break
if args.linear:
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
if not generatorFound:
logger.error("No Generator found, %s does not exist." %(Util.unsorted_paths(args.vagrantfolders, logger, "generator")))
exit(-1)
if args.databases and (Util.check_if_eq_databases("generator",args.databases) or Util.check_if_eq_databases_rsplit("generator",args.databases)):
if not args.linear:
for generatorKey in generators.keys():
if args.test:
Util.print_wo_nl(generatorKey + Util.multiply_string(".", termSize - len(generatorKey) - len("[ERROR]")))
logger.info("Wait for creation of %s to finish." %(generators[generatorKey].name))
generators[generatorKey].join()
if not generators[generatorKey].created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(generators[generatorKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
exit(-1)
if args.test:
print("[OK]")
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
exit(0)
if args.test and args.linear:
print(Util.multiply_string("-", termSize))
print("Stage 2: Creation of database VMs and execution of workloads.")
ycsbfiles=[]
processedDatabaseVMs=[] # for multi-vagrantfolder-function
processedDatabases=[]
failedDatabases=[]
workingDatabases=[]
notTestedDatabases=list(args.databases)
creationTimesDB={}
workloadTimes={}
# Doing Tests if basic or test is in given dbs
if args.databases and (Util.check_if_eq_databases("basic", args.databases) or Util.check_if_eq_databases("test", args.databases)):
if not args.linear:
for generatorKey in generators.keys():
if args.test:
Util.print_wo_nl(
generatorKey + Util.multiply_string(".", termSize - len(generatorKey) - len("[ERROR]")))
logger.info("Wait for creation of %s to finish." %(generators[generatorKey].name))
generators[generatorKey].join()
if not generators[generatorKey].created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(generators[generatorKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
exit(-1)
if args.test:
print("[OK]")
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
logger.info("Processing Test Databases")
for database in testDBs:
if args.workload:
logger.info("Starting workload '%s' on Generator %s." %(database,generators[generators.keys()[0]].vm.hostname()))
run_workload(generators, generators, database, args.workload, args.timeseries, args.granularity, args.bucket, True, False, args.log, logger)
ycsbFile = get_ycsb_file(generators[generators.keys()[0]].vm,database.lower(),args.workload.lower(),logger)
ycsbfiles.append(ycsbFile)
check_result_file(ycsbFile, logger)
if (args.html or args.pdf) and len(ycsbfiles) == 1:
generate_html([ycsbFile],args.pdf,False)
else:
logger.info("No Workload given, doing nothing.")
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
else:
# Generating Database VMs
logger.info("Processing Database VMs" )
for path, dir in Util.unsorted_paths(args.vagrantfolders, logger, "", False):
if os.path.isdir(os.path.join(path, dir)):
if dir== "generator" or dir.find(".")==0 or dir in processedDatabases:
continue
found=0 # how many .vagrant files are found, At least 1 is needed!
if not args.databases or args.databases == "" \
or re.match("basic.*", dir) != None \
or (args.databases and not Util.check_if_eq_databases(dir, args.databases) and not Util.check_if_eq_databases("all", args.databases)):
continue
if Util.check_if_eq_databases("all", args.databases):
if "all" in notTestedDatabases:
notTestedDatabases.remove("all")
if dir not in notTestedDatabases and dir not in workingDatabases and dir not in failedDatabases:
notTestedDatabases.append(dir)
logger.info("Processing %s." % (dir))
creationTimesDB[dir]=datetime.datetime.now()
for path2, file in Util.unsorted_paths(args.vagrantfolders, logger, dir, True):
if os.path.isfile(os.path.join(path, dir, file)):
split = file.rsplit(".vagrant", 1)
# if rsplit is used on bla.vagrant, the result should be ["bla",""]
if len(split)>1 and split[1] == "":
found+=1
if args.databases and args.databases != None and args.databases != [] \
and split[0] not in processedDatabaseVMs \
and (Util.check_if_eq_databases(split[0], args.databases) \
or Util.check_if_eq_databases(split[0].rsplit("_",1)[0], args.databases) \
or Util.check_if_eq_databases("all", args.databases)):
processedDatabaseVMs.append(split[0])
if args.linear:
if args.test:
Util.print_wo_nl(dir + Util.multiply_string(".", termSize - len(dir) - len("[ERROR]")))
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.create_vm()
dbs[virtMachine.name] = virtMachine
if not virtMachine.created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(split[0]))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
cleanup_vms(dbs, logger, args.linear)
exit(-1)
else:
virtMachine = Vm.Vm(args.vagrantfolders, vagrantCredFiles, vagrantBasicFilesFolder, args.tmpfolder, split[0], logger, args.provider, args.log)
virtMachine.start()
Util.sleep_random(2.5,5.0) # needed for openstack, otherwise two vms get the same floating ip
dbs[virtMachine.name] = virtMachine
if args.linear:
creationTimesDB[dir] = datetime.datetime.now() - creationTimesDB[dir]
processedDatabases.append(dir)
if not args.linear:
for generatorKey in generators.keys():
if args.test and len(workingDatabases) == 0: # only before first database
Util.print_wo_nl(generatorKey + Util.multiply_string(".", termSize - len(generatorKey) - len("[ERROR]")))
logger.info("Wait for creation of %s to finish." %(generators[generatorKey].name))
generators[generatorKey].join()
if not generators[generatorKey].created:
if args.test and len(workingDatabases) == 0: # only before first database
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(generators[generatorKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
cleanup_vms(dbs, logger, args.linear)
exit(-1)
if args.test and len(workingDatabases) == 0: # only before first database
print("[OK]")
if args.test:
if len(workingDatabases) == 0: # only before first database, after last generator VM in parellel mode
creationTimesGenerators = datetime.datetime.now() - creationTimesGenerators
print(Util.multiply_string("-", termSize))
print("Stage 2: Creation of database VMs and execution of workloads.")
Util.print_wo_nl(dir + Util.multiply_string(".", termSize - len(dir) - len("[ERROR]")))
for dbKey in dbs.keys():
logger.info("Wait for creation of %s to finish." %(dbs[dbKey].name))
dbs[dbKey].join()
if not dbs[dbKey].created:
if args.test:
print("[ERROR]")
else:
logger.error("VM %s could not be created." %(dbs[dbKey].name))
if not args.nodestroy:
cleanup_vms(generators, logger, args.linear)
cleanup_vms(dbs, logger, args.linear)
exit(-1)
creationTimesDB[dir] = datetime.datetime.now() - creationTimesDB[dir]
if found == 0:
logger.error("No .vagrant files found in %s." % (Util.unsorted_paths(args.vagrantfolders, logger, dir)))
if args.workload:
workloadTimes[dir] = datetime.datetime.now()
logger.info("Starting workload '%s' on %s on Generator %s." %(args.workload,dbs[dbs.keys()[0]].vm.hostname(),generators[generators.keys()[0]].vm.hostname()))
run_workload(generators, dbs, dir, args.workload, args.timeseries, args.granularity, args.bucket, False, False, args.log, logger)
logger.info("Waiting for workload to finish...")
wait_for_vm(dbs, logger, 3600, args.noshutdown)
ycsbFile = get_ycsb_file(generators[generators.keys()[0]].vm, dir.lower(), args.workload.lower(), logger)
ycsbfiles.append(ycsbFile)
if args.nohup:
logger.info("Trying to fetch nohup files from generators.")
nohupCounter=0
for generatorKey in generators.keys():
get_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out","./nohup_%s_%s_%s.out" % (dir.lower(), args.workload.lower(), nohupCounter), logger)
rm_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out",logger)
nohupCounter+=1;
workloadTimes[dir] = datetime.datetime.now() - workloadTimes[dir]
checkResult=check_result_file(ycsbFile, logger)
if args.test:
checkRestul2 = check_result_file_extended(ycsbFile, args.workload, logger)
if checkResult or checkRestul2:
print("[ERROR]")
failedDatabases.append(dir)
notTestedDatabases.remove(dir)
else:
print("[OK]")
workingDatabases.append(dir)
notTestedDatabases.remove(dir)
if (args.html or args.pdf) and len(args.databases) == 1 and len(ycsbfiles) == 1:
generate_html([ycsbFile],args.pdf,False)
else:
logger.info("No Workload given, just running Prerun commands.")
run_workload(generators, dbs, dir, args.workload, args.timeseries, args.granularity, args.bucket, False, True, args.log, logger)
if args.nohup:
logger.info("Trying to fetch nohup files from generators.")
nohupCounter=0
for generatorKey in generators.keys():
get_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out","./nohup_%s_%s_%s.out" % (dir.lower(), "none", nohupCounter), logger)
rm_remote_file(generators[generatorKey].vm,"/home/vagrant/nohup.out",logger)
nohupCounter+=1;
if not args.nodestroy and not args.noshutdown:
cleanup_vms(dbs,logger, args.linear)
if not args.nodestroy and not args.noshutdown:
cleanup_vms(dbs, logger, args.linear)
cleanup_vms(generators, logger , args.linear)
if (args.html or args.pdf) and len(ycsbfiles) > 1:
if args.test:
print(Util.multiply_string("-", termSize))
print("Stage 3: Creation ofcombined PDF file.")
logger.info("More than one DB given, also generating combined html/pdf file.")
generate_html(ycsbfiles,args.pdf,True)
overallTime = datetime.datetime.now() - overallTime
if args.test:
print(Util.multiply_string("-", termSize))
print("Statistics:")
print("Failed databases: %s" %(failedDatabases))
print("Not tested databases: %s" % (notTestedDatabases))
print("Working databases: %s" % (workingDatabases))
print("Amount of time needed overall: %s" %(Util.timedelta_to_string(overallTime)))
print("Amount of time needed to create generator VMs: %s" %(Util.timedelta_to_string(creationTimesGenerators)))
print("Amount of time needed to create database VMs:")
for key in creationTimesDB.keys():
timedelta_str = Util.timedelta_to_string(creationTimesDB[key])
print(key + Util.multiply_string("-", termSize-len(key)-len(timedelta_str)) + timedelta_str)
print("Amount of time needed to complete %s:" %(args.workload))
for key in workloadTimes.keys():
timedelta_str = Util.timedelta_to_string(workloadTimes[key])
print(key + Util.multiply_string("-", termSize - len(key) - len(timedelta_str)) + timedelta_str)
print("Ending with return code 0.")
exit(0)
|
// Derived from Inferno utils/5c/swt.c
// http://code.google.com/p/inferno-os/source/browse/utils/5c/swt.c
//
// Copyright © 1994-1999 Lucent Technologies Inc. All rights reserved.
// Portions Copyright © 1995-1997 C H Forsyth (forsyth@terzarima.net)
// Portions Copyright © 1997-1999 Vita Nuova Limited
// Portions Copyright © 2000-2007 Vita Nuova Holdings Limited (www.vitanuova.com)
// Portions Copyright © 2004,2006 Bruce Ellis
// Portions Copyright © 2005-2007 C H Forsyth (forsyth@terzarima.net)
// Revisions Copyright © 2000-2007 Lucent Technologies Inc. and others
// Portions Copyright © 2009 The Go Authors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#include <u.h>
#include <libc.h>
#include "gg.h"
int
dsname(Sym *sym, int off, char *t, int n)
{
Prog *p;
p = gins(ADATA, N, N);
p->from.type = D_OREG;
p->from.name = D_EXTERN;
p->from.etype = TINT32;
p->from.offset = off;
p->from.reg = NREG;
p->from.sym = linksym(sym);
p->reg = n;
p->to.type = D_SCONST;
p->to.name = D_NONE;
p->to.reg = NREG;
p->to.offset = 0;
memmove(p->to.u.sval, t, n);
return off + n;
}
/*
* make a refer to the data s, s+len
* emitting DATA if needed.
*/
void
datastring(char *s, int len, Addr *a)
{
Sym *sym;
sym = stringsym(s, len);
a->type = D_OREG;
a->name = D_EXTERN;
a->etype = TINT32;
a->offset = widthptr+4; // skip header
a->reg = NREG;
a->sym = linksym(sym);
a->node = sym->def;
}
/*
* make a refer to the string sval,
* emitting DATA if needed.
*/
void
datagostring(Strlit *sval, Addr *a)
{
Sym *sym;
sym = stringsym(sval->s, sval->len);
a->type = D_OREG;
a->name = D_EXTERN;
a->etype = TSTRING;
a->offset = 0; // header
a->reg = NREG;
a->sym = linksym(sym);
a->node = sym->def;
}
void
gdata(Node *nam, Node *nr, int wid)
{
Prog *p;
vlong v;
if(nr->op == OLITERAL) {
switch(nr->val.ctype) {
case CTCPLX:
gdatacomplex(nam, nr->val.u.cval);
return;
case CTSTR:
gdatastring(nam, nr->val.u.sval);
return;
}
}
if(wid == 8 && is64(nr->type)) {
v = mpgetfix(nr->val.u.xval);
p = gins(ADATA, nam, nodintconst(v));
p->reg = 4;
p = gins(ADATA, nam, nodintconst(v>>32));
p->reg = 4;
p->from.offset += 4;
return;
}
p = gins(ADATA, nam, nr);
p->reg = wid;
}
void
gdatacomplex(Node *nam, Mpcplx *cval)
{
Prog *p;
int w;
w = cplxsubtype(nam->type->etype);
w = types[w]->width;
p = gins(ADATA, nam, N);
p->reg = w;
p->to.type = D_FCONST;
p->to.u.dval = mpgetflt(&cval->real);
p = gins(ADATA, nam, N);
p->reg = w;
p->from.offset += w;
p->to.type = D_FCONST;
p->to.u.dval = mpgetflt(&cval->imag);
}
void
gdatastring(Node *nam, Strlit *sval)
{
Prog *p;
Node nod1;
p = gins(ADATA, nam, N);
datastring(sval->s, sval->len, &p->to);
p->reg = types[tptr]->width;
p->to.type = D_CONST;
p->to.etype = TINT32;
//print("%P\n", p);
nodconst(&nod1, types[TINT32], sval->len);
p = gins(ADATA, nam, &nod1);
p->reg = types[TINT32]->width;
p->from.offset += types[tptr]->width;
}
int
dstringptr(Sym *s, int off, char *str)
{
Prog *p;
off = rnd(off, widthptr);
p = gins(ADATA, N, N);
p->from.type = D_OREG;
p->from.name = D_EXTERN;
p->from.sym = linksym(s);
p->from.offset = off;
p->reg = widthptr;
datastring(str, strlen(str)+1, &p->to);
p->to.type = D_CONST;
p->to.etype = TINT32;
off += widthptr;
return off;
}
int
dgostrlitptr(Sym *s, int off, Strlit *lit)
{
Prog *p;
if(lit == nil)
return duintptr(s, off, 0);
off = rnd(off, widthptr);
p = gins(ADATA, N, N);
p->from.type = D_OREG;
p->from.name = D_EXTERN;
p->from.sym = linksym(s);
p->from.offset = off;
p->reg = widthptr;
datagostring(lit, &p->to);
p->to.type = D_CONST;
p->to.etype = TINT32;
off += widthptr;
return off;
}
int
dgostringptr(Sym *s, int off, char *str)
{
int n;
Strlit *lit;
if(str == nil)
return duintptr(s, off, 0);
n = strlen(str);
lit = mal(sizeof *lit + n);
strcpy(lit->s, str);
lit->len = n;
return dgostrlitptr(s, off, lit);
}
int
dsymptr(Sym *s, int off, Sym *x, int xoff)
{
Prog *p;
off = rnd(off, widthptr);
p = gins(ADATA, N, N);
p->from.type = D_OREG;
p->from.name = D_EXTERN;
p->from.sym = linksym(s);
p->from.offset = off;
p->reg = widthptr;
p->to.type = D_CONST;
p->to.name = D_EXTERN;
p->to.sym = linksym(x);
p->to.offset = xoff;
off += widthptr;
return off;
}
void
nopout(Prog *p)
{
p->as = ANOP;
}
|
import time
LABELS = [
'brightpixel',
'narrowband',
'narrowbanddrd',
'noise',
'squarepulsednarrowband',
'squiggle',
'squigglesquarepulsednarrowband'
]
LABEL_TO_ID = {label: label_i for label_i, label in enumerate(LABELS)}
def tprint(msg):
print('%s: %s' % (int(time.time()), msg))
def stats(conf_mat):
ret = [[None, 'precision', 'recall', 'f1']]
prec_acc = 0.0
recall_acc = 0.0
f1_acc = 0.0
successful = 0
for i in range(7):
true_pos = float(conf_mat[i, i])
false_pos = float(sum(conf_mat[:, i]) - conf_mat[i, i])
false_neg = float(sum(conf_mat[i]) - conf_mat[i, i])
try:
precision = true_pos / (true_pos + false_pos)
recall = true_pos / (true_pos + false_neg)
f1_score = 2 * precision * recall / (precision + recall)
prec_acc += precision
recall_acc += recall
f1_acc += f1_score
successful += 1
except ZeroDivisionError:
recall = None
precision = None
f1_score = None
ret.append([LABELS[i], precision, recall, f1_score])
ret.append([
'avg',
prec_acc/successful,
recall_acc/successful,
f1_acc/successful])
return ret
|
"""
Plotting for nirvana outputs.
.. include:: ../include/links.rst
"""
import numpy as np
from matplotlib import pyplot as plt
import matplotlib
from mpl_toolkits.axes_grid1 import make_axes_locatable as mal
import re
import os
import traceback
import multiprocessing as mp
from functools import partial
import dynesty
import corner
import pickle
from glob import glob
from tqdm import tqdm
from astropy.io import fits
from ..models.higher_order import bisym_model
from ..models.beam import smear, ConvolveFFTW
from ..models.geometry import projected_polar
from ..data.manga import MaNGAStellarKinematics, MaNGAGasKinematics
from ..data.kinematics import Kinematics
from ..data.util import unpack
from .fits_prep import fileprep, dynmeds, profs
def summaryplot(f, plate=None, ifu=None, smearing=True, stellar=False, maxr=None, cen=True,
fixcent=True, save=False, clobber=False, remotedir=None, gal=None, relative_pab=False):
"""
Make a summary plot for a `nirvana` output file with MaNGA velocity
field.
Shows the values for the global parameters of the galaxy, the rotation
curves (with 1-sigma lower and upper bounds) for the different velocity
components, then comparisons of the MaNGA data, the model, and the
residuals for the rotational velocity and the velocity dispersion.
Args:
f (:obj:`str`, `dynesty.NestedSampler`_, `dynesty.results.Results`_):
`.fits` file, sampler, results, `.nirv` file of dumped results
from :func:`~nirvana.fitting.fit`. If this is in the regular
format from the automatic outfile generator in
:func:`~nirvana.scripts.nirvana.main` then it will fill in most
of the rest of the parameters by itself.
plate (:obj:`int`, optional):
MaNGA plate number for desired galaxy. Can be auto filled by `f`.
ifu (:obj:`int`, optional):
MaNGA IFU number for desired galaxy. Can be auto filled by `f`.
smearing (:obj:`bool`, optional):
Whether or not to apply beam smearing to models. Can be auto
filled by `f`.
stellar (:obj:`bool`, optional):
Whether or not to use stellar velocity data instead of gas. Can
be auto filled by `f`.
maxr (:obj:`float`, optional):
Maximum radius to make edges go out to in units of effective
radii. Can be auto filled by `f`.
cen (:obj:`bool`, optional):
Whether the position of the center was fit. Can be auto filled by
`f`.
fixcent (:obj:`bool`, optional):
Whether the center velocity bin was held at 0 in the fit. Can be
auto filled by `f`.
save (:obj:`bool`, optional):
Flag for whether to save the plot. Will save as a pdf in the same
directory as `f` is in but inside a folder called `plots`.
clobber (:obj:`bool`, optional):
Flag to overwrite plot file if it already exists. Only matters if
`save=True`
remotedir (:obj:`str`, optional):
Directory to load MaNGA data files from, or save them if they are
not found and are remotely downloaded.
gal (:class:`~nirvana.data.fitargs.FitArgs`, optional):
Pre existing galaxy object to use instead of loading from scratch
"""
#check if plot file already exists
if save and not clobber:
path = f[:f.rfind('/')+1]
fname = f[f.rfind('/')+1:-5]
if os.path.isfile(f'{path}/plots/{fname}.pdf'):
raise ValueError('Plot file already exists')
#unpack input file into useful objects
args, resdict = fileprep(f, plate, ifu, smearing, stellar, maxr, cen, fixcent, remotedir=remotedir, gal=gal)
#generate velocity models
velmodel, sigmodel = bisym_model(args,resdict,plot=True,relative_pab=relative_pab)
vel_r = args.kin.remap('vel')
sig_r = np.sqrt(args.kin.remap('sig_phys2')) if hasattr(args.kin, 'sig_phys2') else args.kin.remap('sig')
if args.kin.vel_ivar is None: args.kin.vel_ivar = np.ones_like(args.kin.vel)
if args.kin.sig_ivar is None: args.kin.sig_ivar = np.ones_like(args.kin.sig)
#calculate chisq maps
vel_ivar = args.kin.remap('vel_ivar')
sig_ivar = args.kin.remap('sig_phys2_ivar')**.5
if args.scatter:
vel_ivar = 1/(1/vel_ivar + resdict['vel_scatter']**2)
sig_ivar = 1/(1/sig_ivar + resdict['sig_scatter']**2)
velchisq = (vel_r - velmodel)**2 * vel_ivar
sigchisq = (sig_r - sigmodel)**2 * sig_ivar
#print global parameters on figure
fig = plt.figure(figsize = (12,9))
plt.subplot(3,4,1)
ax = plt.gca()
infobox(ax, resdict, args, cen, relative_pab)
#image
plt.subplot(3,4,2)
if args.kin.image is not None: plt.imshow(args.kin.image)
else: plt.text(.5,.5, 'No image found', horizontalalignment='center',
transform=plt.gca().transAxes, size=14)
plt.axis('off')
#Radial velocity profiles
plt.subplot(3,4,3)
ls = [r'$V_t$',r'$V_{2t}$',r'$V_{2r}$']
for i,v in enumerate(['vt', 'v2t', 'v2r']):
plt.plot(args.edges, resdict[v], label=ls[i])
errors = [[resdict['vtl'], resdict['vtu']], [resdict['v2tl'], resdict['v2tu']], [resdict['v2rl'], resdict['v2ru']]]
for i,p in enumerate(errors):
plt.fill_between(args.edges, p[0], p[1], alpha=.5)
plt.ylim(bottom=0)
plt.legend(loc=2)
plt.xlabel('Radius (arcsec)')
plt.ylabel(r'$v$ (km/s)')
plt.title('Velocity Profiles')
#dispersion profile
plt.subplot(3,4,4)
plt.plot(args.edges, resdict['sig'])
plt.fill_between(args.edges, resdict['sigl'], resdict['sigu'], alpha=.5)
plt.ylim(bottom=0)
plt.title('Velocity Dispersion Profile')
plt.xlabel('Radius (arcsec)')
plt.ylabel(r'$v$ (km/s)')
#MaNGA Ha velocity field
plt.subplot(3,4,5)
plt.title(f"{resdict['type']} Velocity Data")
vmax = min(np.max(np.abs(vel_r)), 300)
plt.imshow(vel_r, cmap='jet', origin='lower', vmin=-vmax, vmax=vmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
cb = plt.colorbar(cax=cax)
cb.set_label('km/s', labelpad=-10)
#Vel model from dynesty fit
plt.subplot(3,4,6)
plt.title('Velocity Model')
plt.imshow(velmodel,'jet', origin='lower', vmin=-vmax, vmax=vmax)
plt.tick_params(left=False, bottom=False,labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
plt.colorbar(label='km/s', cax=cax)
cb = plt.colorbar(cax=cax)
cb.set_label('km/s', labelpad=-10)
#Residuals from vel fit
plt.subplot(3,4,7)
plt.title('Velocity Residuals')
resid = vel_r - velmodel
vmax = min(np.abs(vel_r-velmodel).max(), 50)
plt.imshow(vel_r-velmodel, 'jet', origin='lower', vmin=-vmax, vmax=vmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
plt.colorbar(label='km/s', cax=cax)
cb = plt.colorbar(cax=cax)
cb.set_label('km/s', labelpad=-10)
#Chisq from vel fit
plt.subplot(3,4,8)
plt.title('Velocity Chi Squared')
plt.imshow(velchisq, 'jet', origin='lower', vmin=0, vmax=50)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
plt.colorbar(cax=cax)
#MaNGA Ha velocity disp
plt.subplot(3,4,9)
plt.title(f"{resdict['type']} Dispersion Data")
vmax = min(np.max(sig_r), 200)
plt.imshow(sig_r, cmap='jet', origin='lower', vmax=vmax, vmin=0)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
cb = plt.colorbar(cax=cax)
cb.set_label('km/s', labelpad=0)
#disp model from dynesty fit
plt.subplot(3,4,10)
plt.title('Dispersion Model')
plt.imshow(sigmodel, 'jet', origin='lower', vmin=0, vmax=vmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
cb = plt.colorbar(cax=cax)
cb.set_label('km/s', labelpad=0)
#Residuals from disp fit
plt.subplot(3,4,11)
plt.title('Dispersion Residuals')
resid = sig_r - sigmodel
vmax = min(np.abs(sig_r - sigmodel).max(), 50)
plt.imshow(sig_r-sigmodel, 'jet', origin='lower', vmin=-vmax, vmax=vmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
cb = plt.colorbar(cax=cax)
cb.set_label('km/s', labelpad=-10)
#Chisq from sig fit
plt.subplot(3,4,12)
plt.title('Dispersion Chi Squared')
plt.imshow(sigchisq, 'jet', origin='lower', vmin=0, vmax=50)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('right', size='5%', pad=.05)
plt.colorbar(cax=cax)
plt.tight_layout()
if save:
path = f[:f.rfind('/')+1]
fname = f[f.rfind('/')+1:-5]
plt.savefig(f'{path}plots/{fname}.pdf', format='pdf')
plt.close()
print(resdict)
return fig
def separate_components(f, plate=None, ifu=None, smearing=True, stellar=False, maxr=None, cen=True,
fixcent=True, save=False, clobber=False, remotedir=None, gal=None, relative_pab=False, cmap='RdBu',mock=None):
"""
Make a plot `nirvana` output file with the different velocity components
searated.
Plot the first order velocity component and the two second order velocity
components next to each other along with the full model, data, image, and
global parameters
The created plot contains the global parameters of the galaxy, the image
of the galaxy, and the data of the velocity field on the first row. The
second row is the full model followed by the different components broken
out with + and = signs between.
Args:
f (:class:`dynesty.NestedSampler`, :obj:`str`, :class:`dynesty.results.Results`):
Sampler, results, or file of dumped results from `dynesty` fit.
plate (:obj:`int`, optional):
MaNGA plate number for desired galaxy. Must be specified if
`auto=False`.
ifu (:obj:`int`, optional):
MaNGA IFU design number for desired galaxy. Must be specified if
`auto=False`.
smearing (:obj:`bool`, optional):
Flag for whether or not to apply beam smearing to models.
stellar (:obj:`bool`, optional):
Flag for whether or not to use stellar velocity data instead of
gas.
cen (:obj:`bool`, optional):
Flag for whether the position of the center was fit.
"""
args, resdict = fileprep(f, plate, ifu, smearing, stellar, maxr, cen, fixcent, remotedir=remotedir, gal=gal)
z = np.zeros(len(resdict['vt']))
vtdict, v2tdict, v2rdict = [resdict.copy(), resdict.copy(), resdict.copy()]
vtdict['v2t'] = z
vtdict['v2r'] = z
v2tdict['vt'] = z
v2tdict['v2r'] = z
v2rdict['vt'] = z
v2rdict['v2t'] = z
if maxr is not None:
r,th = projected_polar(args.kin.x, args.kin.y, *np.radians((resdict['pa'], resdict['inc'])))
rmask = r > maxr
args.kin.vel_mask |= rmask
args.kin.sig_mask |= rmask
velmodel, sigmodel = bisym_model(args, resdict, plot=True)
vtmodel, sigmodel = bisym_model(args, vtdict, plot=True)
v2tmodel, sigmodel = bisym_model(args, v2tdict, plot=True)
v2rmodel, sigmodel = bisym_model(args, v2rdict, plot=True)
vel_r = args.kin.remap('vel')
#must set all masked areas to 0 or else vmax calculations barf
for v in [vel_r, velmodel, vtmodel, v2tmodel, v2rmodel]:
v.data[v.mask] = 0
v -= resdict['vsys'] #recenter at 0
v2model = v2tmodel + v2rmodel
v2model.data[v2model.mask] = 0
velresid = vel_r - velmodel
vtresid = vel_r - v2tmodel - v2rmodel
v2tresid = vel_r - vtmodel - v2rmodel
v2rresid = vel_r - vtmodel - v2tmodel
v2resid = vel_r - vtmodel
datavmax = min(np.max(np.abs([vel_r, velmodel])), 300)
velvmax = min(np.max(np.abs(velresid)), 300)
vtvmax = min(np.max(np.abs([vtmodel, vtresid])), 300)
v2tvmax = min(np.max(np.abs([v2tmodel, v2tresid])), 300)
v2rvmax = min(np.max(np.abs([v2rmodel, v2rresid])), 300)
v2vmax = min(np.max(np.abs([v2model, v2resid])), 300)
plt.figure(figsize = (15,9))
plt.subplot(3,5,1)
ax = plt.gca()
infobox(ax, resdict, args)
#image
plt.subplot(3,5,2)
plt.imshow(args.kin.image)
plt.axis('off')
#MaNGA Ha velocity field
plt.subplot(3,5,3)
plt.title(r'Velocity Data')
plt.imshow(vel_r, cmap=cmap, origin='lower', vmin=-datavmax, vmax=datavmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
#Radial velocity profiles
plt.subplot(3,5,4)
ls = [r'$V_t$',r'$V_{2t}$',r'$V_{2r}$']
for i,v in enumerate(['vt', 'v2t', 'v2r']):
plt.plot(args.edges, resdict[v], label=ls[i])
errors = [[resdict['vtl'], resdict['vtu']], [resdict['v2tl'], resdict['v2tu']], [resdict['v2rl'], resdict['v2ru']]]
for i,p in enumerate(errors):
plt.fill_between(args.edges, p[0], p[1], alpha=.5)
plt.ylim(bottom=0)
plt.legend(loc=2)
plt.xlabel('Radius (arcsec)', labelpad=-1)
plt.ylabel(r'$v$ (km/s)')
plt.title('Velocity Profiles')
plt.gca().tick_params(direction='in')
if mock is not None:
margs, mresdict = fileprep(mock)
ls = [r'$V_t$',r'$V_{2t}$',r'$V_{2r}$']
cs = ['C0', 'C1', 'C2']
for i,v in enumerate(['vt', 'v2t', 'v2r']):
plt.plot(margs.edges, mresdict[v], label=ls[i], ls='--',c=cs[i])
#errors = [[mresdict['vtl'], mresdict['vtu']], [mresdict['v2tl'], mresdict['v2tu']], [mresdict['v2rl'], mresdict['v2ru']]]
#for i,p in enumerate(errors):
# plt.fill_between(margs.edges, p[0], p[1], alpha=.5)
plt.ylim(bottom=0)
plt.legend(loc=2)
plt.xlabel('Radius (arcsec)', labelpad=-1)
plt.ylabel(r'$v$ (km/s)')
plt.title('Mock Velocity Profiles')
plt.gca().tick_params(direction='in')
plt.subplot(3,5,5)
ax = plt.gca()
infobox(ax, mresdict, margs, title='Mock Params')
plt.subplot(3,5,6)
plt.imshow(velmodel, cmap=cmap, origin='lower', vmin=-datavmax, vmax=datavmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
#plt.text(1.15,.5,'=', transform=plt.gca().transAxes, size=30)
plt.title(r'Model', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,7)
plt.imshow(vtmodel, cmap=cmap, origin='lower', vmin=-vtvmax, vmax=vtvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
#plt.text(1.15,.5,'+', transform=plt.gca().transAxes, size=30)
plt.title(r'$V_t$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,8)
plt.imshow(v2tmodel, cmap=cmap, origin='lower', vmin=-v2tvmax, vmax=v2tvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
#plt.text(1.15,.5,'+', transform=plt.gca().transAxes, size=30)
plt.title(r'$V_{2t}$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,9)
plt.imshow(v2rmodel, cmap=cmap, origin='lower', vmin=-v2rvmax, vmax=v2rvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'$V_{2r}$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,10)
plt.imshow(v2model, cmap=cmap, origin='lower', vmin=-v2vmax, vmax=v2vmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'$V_{2t} + V_{2r}$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,11)
plt.imshow(velresid, cmap=cmap, origin='lower', vmin=-velvmax, vmax=velvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'Data $-$ Model', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,12)
plt.imshow(vtresid, cmap=cmap, origin='lower', vmin=-vtvmax, vmax=vtvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'Data$- (V_{2t} + V_{2r})$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,13)
plt.imshow(v2tresid, cmap=cmap, origin='lower', vmin=-v2tvmax, vmax=v2tvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'Data$- (V_t + V_{2r})$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,14)
plt.imshow(v2rresid, cmap=cmap, origin='lower', vmin=-v2rvmax, vmax=v2rvmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'Data$- (V_t + V_{2t})$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.subplot(3,5,15)
plt.imshow(v2resid, cmap=cmap, origin='lower', vmin=-v2vmax, vmax=v2vmax)
plt.tick_params(left=False, bottom=False, labelleft=False, labelbottom=False)
plt.title(r'Data$- V_t$', fontsize=16)
cax = mal(plt.gca()).append_axes('bottom', size='5%', pad=0)
cb = plt.colorbar(cax=cax, orientation='horizontal')
cax.tick_params(direction='in')
cb.set_label('km/s', labelpad=-2)
plt.tight_layout(pad=-.025)
if save:
path = f[:f.rfind('/')+1]
fname = f[f.rfind('/')+1:-5]
plt.savefig(f'{path}plots/sepcomp_{fname}.pdf', format='pdf')
plt.close()
def sinewave(f, plate=None, ifu=None, smearing=True, stellar=False, maxr=None, cen=True):
"""
Compare the `nirvana` fit to the data azimuthally in radial bins.
Breaks down the data into radial bins and plots the velocity data points
in each bin azimuthally, overplotting the fit for each bin. These are
separated out into a Joy Division style plot.
The plot provides the data points in each radial bin plotted azimuthally,
color coded and separated on an arbitrary y axis. The curve the fit
generated is plotted on top.
Args:
f (:class:`dynesty.NestedSampler`, :obj:`str`, :class:`dynesty.results.Results`):
Sampler, results, or file of dumped results from `dynesty` fit.
plate (:obj:`int`, optional):
MaNGA plate number for desired galaxy. Must be specified if
`auto=False`.
ifu (:obj:`int`, optional):
MaNGA IFU design number for desired galaxy. Must be specified if
`auto=False`.
smearing (:obj:`bool`, optional):
Flag for whether or not to apply beam smearing to models.
stellar (:obj:`bool`, optional):
Flag for whether or not to use stellar velocity data instead of
gas.
cen (:obj:`bool`, optional):
Flag for whether the position of the center was fit.
"""
#prep the data, parameters, and coordinates
args, resdict, chains, meds = fileprep(f, plate, ifu, smearing, stellar, maxr, cen, fixcent)
inc, pa, pab = np.radians([resdict['inc'], resdict['pa'], resdict['pab']])
r,th = projected_polar(args.kin.x, args.kin.y, pa, inc)
plt.figure(figsize=(4,len(args.edges)*.75))
c = plt.cm.jet(np.linspace(0,1,len(args.edges)-1))
plt.title(f"{resdict['plate']}-{resdict['ifu']} {resdict['type']}")
#for each radial bin, plot data points and model
for i in range(len(args.edges)-1):
cut = (r > args.edges[i]) * (r < args.edges[i+1])
sort = np.argsort(th[cut])
thcs = th[cut][sort]
plt.plot(np.degrees(thcs), args.kin.vel[cut][sort]+100*i, '.', c=c[i])
#generate model from fit parameters
velmodel = resdict['vsys'] + np.sin(inc) * (resdict['vt'][i] * np.cos(thcs) \
- resdict['v2t'][i] * np.cos(2 * (thcs - pab)) * np.cos(thcs) \
- resdict['v2r'][i] * np.sin(2 * (thcs - pab)) * np.sin(thcs))
plt.plot(np.degrees(thcs), velmodel+100*i, 'k--')
plt.tick_params(left=False, labelleft=False)
plt.xlabel('Azimuth (deg)')
plt.tight_layout()
def safeplot(f, func='sum', **kwargs):
'''
Call :func:`~nirvana.plotting.summaryplot` in a safe way.
Really should be a decorator but I couldn't figure it out.
Args:
f (:obj:`str`):
Name of the `.fits` file you want to plot.
func (:obj:`str`, optional):
Use `sum` to plot data with
:func:`~nirvana.util.plotting.summaryplot` or `sep` to plot using
kwargs (optional):
Arguments for `~nirvana.plotting.summaryplot`.
'''
if func not in ['sum', 'sep']: raise ValueError('Please provide a valid plotting function: sum or sep')
try:
if func == 'sum': summaryplot(f, save=True, **kwargs)
elif func == 'sep': separate_components(f, save=True, **kwargs)
else:
raise UserInputError('Plotting function must be "sum" or "sep"')
except Exception:
print(f, 'failed')
print(traceback.format_exc())
def plotdir(directory='/data/manga/digiorgio/nirvana/', fname='*-*_*.nirv', cores=20, func='sum', **kwargs):
'''
Make summaryplots of an entire directory of output files.
Will try to look for automatically named nirvana output files unless told
otherwise.
CAUTION: If you use too many cores and don't call `plt.ioff()` before this,
this function may crash the desktop environment of your operating system
because it tries to open too many windows at once.
Args:
directory (:obj:`str`, optional):
Directory to look for files in
fname (:obj:`str`, optional):
Filename format for files you want plotted with appropriate
wildcards. Defaults to standard nirvana output format
cores (:obj:`int`, optional):
Number of cores to use for multiprocessing. CAUTION: If you use too
many cores and don't call `plt.ioff()` before this, this function
may crash the desktop environment of your operating system because
it tries to open too many windows at once.
func (:obj:`str`, optional):
Use `sum` to plot data with
:func:`~nirvana.util.plotting.summaryplot` or `sep` to plot using
:func:`~nirvana.util.plotting.separatecomponents`
kwargs (optional):
Arguments for `~nirvana.plotting.summaryplot`.
'''
plt.ioff() #turn off plot displaying (don't know if this works in a script)
fs = glob(directory + fname)
if len(fs) == 0: raise FileNotFoundError('No files found')
else: print(len(fs), 'files found')
with mp.Pool(cores) as p:
p.map(partial(safeplot, func=func), fs)
def infobox(plot, resdict, args, cen=True, relative_pab=False, title=None):
#generate velocity models
velmodel, sigmodel = bisym_model(args,resdict,plot=True,relative_pab=relative_pab)
vel_r = args.kin.remap('vel')
sig_r = np.sqrt(args.kin.remap('sig_phys2')) if hasattr(args, 'sig_phys2') else args.kin.remap('sig')
#calculate number of variables
if 'velmask' in resdict:
fill = len(resdict['velmask'])
fixcent = resdict['vt'][0] == 0
lenmeds = 6 + 3*(fill - resdict['velmask'].sum() - fixcent) + (fill - resdict['sigmask'].sum())
else: lenmeds = len(resdict['vt'])
nvar = len(args.kin.vel) + len(args.kin.sig) - lenmeds
#calculate reduced chisq for vel and sig
vel_ivar = args.kin.remap('vel_ivar')
sig_ivar = args.kin.remap('sig_phys2_ivar')**.5
if args.scatter:
vel_ivar = 1/(1/vel_ivar + resdict['vel_scatter']**2)
sig_ivar = 1/(1/sig_ivar + resdict['sig_scatter']**2)
rchisqv = np.sum((vel_r - velmodel)**2 * vel_ivar) / nvar
rchisqs = np.sum((sig_r - sigmodel)**2 * sig_ivar) / nvar
#print global parameters on figure
plot.axis('off')
ny = 6 + 2*cen + args.scatter
fontsize = 14 - 2*cen - args.scatter
ys = np.linspace(1 - .01*fontsize, 0, ny)
title = f"{resdict['plate']}-{resdict['ifu']} {resdict['type']}" if title is None else title
plot.set_title(title, size=18)
plot.text(.1, ys[0], r'$i$: %0.1f$^{+%0.1f}_{-%0.1f}$ deg. (phot: %0.1f$^\circ$)'
%(resdict['inc'], resdict['incu'] - resdict['inc'],
resdict['inc'] - resdict['incl'], args.kin.phot_inc),
transform=plot.transAxes, size=fontsize)
plot.text(.1, ys[1], r'$\phi$: %0.1f$^{+%0.1f}_{-%0.1f}$ deg.'
%(resdict['pa'], resdict['pau'] - resdict['pa'],
resdict['pa'] - resdict['pal']), transform=plot.transAxes, size=fontsize)
plot.text(.1, ys[2], r'$\phi_b$: %0.1f$^{+%0.1f}_{-%0.1f}$ deg.'
%(resdict['pab'], resdict['pabu'] - resdict['pab'],
resdict['pab'] - resdict['pabl']), transform=plot.transAxes, size=fontsize)
plot.text(.1, ys[3], r'$v_{{sys}}$: %0.1f$^{+%0.1f}_{-%0.1f}$ km/s'
%(resdict['vsys'], resdict['vsysu'] - resdict['vsys'],
resdict['vsys'] - resdict['vsysl']),transform=plot.transAxes, size=fontsize)
plot.text(.1, ys[4], r'$\chi_v^2$: %0.1f, $\chi_s^2$: %0.1f' % (rchisqv, rchisqs),
transform=plot.transAxes, size=fontsize)
plot.text(.1, ys[5], 'Asymmetry: %0.3f' % args.arc,
transform=plot.transAxes, size=fontsize)
if cen:
plot.text(.1, ys[6], r'$x_c: %0.1f$" $ ^{+%0.1f}_{-%0.1f}$' %
(resdict['xc'], abs(resdict['xcu'] - resdict['xc']),
abs(resdict['xcl'] - resdict['xc'])),
transform=plot.transAxes, size=fontsize)
plot.text(.1, ys[7], r'$y_c: %0.1f$" $ ^{+%0.1f}_{-%0.1f}$' %
(resdict['yc'], abs(resdict['ycu'] - resdict['yc']),
abs(resdict['ycl'] - resdict['yc'])),
transform=plot.transAxes, size=fontsize)
if args.scatter:
plot.text(.1, ys[8], r'$\sigma_v$: %0.1f, $\sigma_s^2$: %0.1f'
% (resdict['vel_scatter'], resdict['sig_scatter']),
transform=plot.transAxes, size=fontsize)
|
'use strict';
const Generator = require('yeoman-generator');
const chalk = require('chalk');
const yosay = require('yosay');
module.exports = class extends Generator {
prompting() {
// Have Yeoman greet the user.
this.log(
yosay(
`Welcome to the premium ${chalk.red('generator-micrub')} generator!`
)
);
const prompts = [
{
type: 'confirm',
name: 'someAnswer',
message: 'Would you like to enable this option?',
default: true
}
];
return this.prompt(prompts).then(props => {
// To access props later use this.props.someAnswer;
this.props = props;
});
}
writing() {
this.fs.copy(
this.templatePath('dummyfile.txt'),
this.destinationPath('dummyfile.txt')
);
}
install() {
this.installDependencies();
}
};
|
#!/usr/bin/env python
import subprocess
import sys
def restart_until_success(cmd):
ret_code = -1
while ret_code != 0:
ret_code = subprocess.call(cmd)
if __name__ == '__main__':
cmd = sys.argv[1:]
print ('Executing:', ' '.join(cmd))
restart_until_success(cmd)
|
# Generated by Django 2.2.2 on 2021-05-02 06:25
import account.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='account',
name='avatar',
field=models.ImageField(default='avatar/default.png', upload_to=account.models.upload_location),
),
]
|
import sys, os
sys.path.insert(0, os.path.abspath('..'))
import pygame, pygbutton
from pygame.locals import *
FPS = 30
WINDOWWIDTH = 640
WINDOWHEIGHT = 480
WHITE = (255, 255, 255)
def main():
pygame.init()
FPSCLOCK = pygame.time.Clock()
DISPLAYSURFACE = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
pygame.display.set_caption('PygButton Test 3')
catButt = pygbutton.PygButton((50, 100, 200, 30), normal='catbutton_normal.png', down='catbutton_down.png', highlight='catbutton_highlight.png')
while True: # main game loop
for event in pygame.event.get(): # event handling loop
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
buttonEvents = catButt.handleEvent(event)
if 'click' in buttonEvents:
catButt.rect = pygame.Rect((catButt.rect.left, catButt.rect.top, catButt.rect.width + 10, catButt.rect.height + 10))
DISPLAYSURFACE.fill(WHITE)
catButt.draw(DISPLAYSURFACE)
pygame.display.update()
FPSCLOCK.tick(FPS)
if __name__ == '__main__':
main()
|
import SPViewElement from '/js/controls/view.js';
export default class SPConfigViewElement extends SPViewElement {
connectedCallback() {
super.connectedCallback();
if (!this.created2) {
this.create();
this.created2 = true;
}
}
create() {
this.classList.add('sp-view');
this.innerHTML = '<form>' +
'<h1>' + _('Settings') + '</h1>' +
'<fieldset><legend>' + _('Appearance') + '</legend><sp-theme></sp-theme></fieldset>' +
'<button type="submit">Apply</button>' +
'</form>';
this.querySelector('form').addEventListener('submit', (e) => {
e.preventDefault();
GlobalChromeElement.theme = this.querySelector('sp-theme').theme;
GlobalChromeElement.saveTheme(GlobalChromeElement.theme);
return false;
})
this.querySelector('sp-theme').theme = GlobalChromeElement.theme;
this.querySelector('sp-theme').addEventListener('change', (e) => {
if (this.getAttribute('instant') == 'true') {
GlobalChromeElement.saveTheme(GlobalChromeElement.theme);
return false;
}
})
this.querySelector('sp-theme').addEventListener('drag', (e) => {
if (this.getAttribute('instant') == 'true') {
GlobalChromeElement.hue = this.querySelector('sp-theme').theme.hue;
GlobalChromeElement.saturation = this.querySelector('sp-theme').theme.saturation;
return false;
}
})
}
activate() {
super.activate();
}
}
|
import email
import boto3
import logging
from botocore.exceptions import ClientError
logger = logging.getLogger()
def download_email(message_id):
"""
This method downloads full email MIME content from WorkMailMessageFlow and uses email.parser class
for parsing it into Python email.message.EmailMessage class.
Reference:
https://docs.python.org/3.6/library/email.message.html#email.message.EmailMessage
https://docs.python.org/3/library/email.parser.html
Parameters
----------
message_id: string, required
message_id of the email to download
Returns
-------
email.message.Message
EmailMessage representation the downloaded email.
Raises
------
botocore.exceptions.ClientError:
When email message cannot be downloaded.
email.errors.MessageParseError
When email message cannot be parsed.
"""
workmail_message_flow = boto3.client('workmailmessageflow')
response = None
try:
response = workmail_message_flow.get_raw_message_content(messageId=message_id)
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
logger.error(f"Message {message_id} does not exist. Messages in transit are no longer accessible after 1 day. \
See: https://docs.aws.amazon.com/workmail/latest/adminguide/lambda-content.html for more details.")
raise(e)
email_content = response['messageContent'].read()
return email.message_from_bytes(email_content)
def extract_email_body(parsed_email):
"""
Extract email message content of type "text/plain" from a parsed email
Parameters
----------
parsed_email: email.message.Message, required
The parsed email as returned by download_email
Returns
-------
string
string containing text/plain email body decoded with according to the Content-Transfer-Encoding header
and then according to content charset.
None
No content of type "text/plain" is found.
"""
text_content = None
text_charset = None
if parsed_email.is_multipart():
# Walk over message parts of this multipart email.
for part in parsed_email.walk():
content_type = part.get_content_type()
content_disposition = str(part.get_content_disposition())
# Look for 'text/plain' content but ignore inline attachments.
if content_type == 'text/plain' and 'attachment' not in content_disposition:
text_content = part.get_payload(decode=True)
text_charset = part.get_content_charset()
break
else:
text_content = parsed_email.get_payload(decode=True)
text_charset = parsed_email.get_content_charset()
if text_content and text_charset:
return text_content.decode(text_charset)
return
def search_active_words(subject, active_words):
"""
This method looks for active_words in subject in a case-insensitive fashion
Parameters
----------
subject: string, required
email subject
active_words: string, required
active words represented in a comma delimited fashion
Returns
-------
True
If any active words were found in subject or,
No active words are configured
False
If no active words were found in subject
"""
if not active_words:
return True
else:
# Convert to lower case words by splitting active_words. For example: 'Hello , World,' is generated as ('hello','world').
lower_words = [word.strip().lower() for word in filter(None, active_words.split(','))]
# Convert subject to lower case in order to do a case insensitive lookup.
subject_lower = subject.lower()
for word in lower_words:
if word in subject_lower:
return True
return False
|
import torch._C
from contextlib import contextmanager
from typing import Iterator
from torch.utils import set_module
# These are imported so users can access them from the `torch.jit` module
from torch._jit_internal import (
Final,
Future,
_IgnoreContextManager,
_overload,
_overload_method,
ignore,
_isinstance,
is_scripting,
export,
unused,
)
from torch.jit._script import (
script,
_script_pdt,
Attribute,
ScriptModule,
script_method,
RecursiveScriptModule,
ScriptWarning,
interface,
CompilationUnit,
ScriptFunction,
_unwrap_optional,
)
from torch.jit._trace import (
trace,
trace_module,
TracedModule,
TracerWarning,
TracingCheckError,
is_tracing,
ONNXTracedModule,
TopLevelTracedModule,
_unique_state_dict,
_flatten,
_script_if_tracing,
_get_trace_graph,
)
from torch.jit._async import fork, wait
from torch.jit._serialization import save, load
from torch.jit._fuser import optimized_execution, fuser, last_executed_optimized_graph
from torch.jit._freeze import freeze, optimize_for_inference, run_frozen_optimizations
# For backwards compatibility
_fork = fork
_wait = wait
def export_opnames(m):
r"""
Generates new bytecode for a Script module and returns what the op list
would be for a Script Module based off the current code base. If you
have a LiteScriptModule and want to get the currently present
list of ops call _export_operator_list instead.
"""
return torch._C._export_opnames(m._c)
# torch.jit.Error
Error = torch._C.JITException
set_module(Error, "torch.jit")
# This is not perfect but works in common cases
Error.__name__ = "Error"
Error.__qualname__ = "Error"
# for use in python if using annotate
def annotate(the_type, the_value):
"""
This method is a pass-through function that returns `the_value`, used to hint TorchScript
compiler the type of `the_value`. It is a no-op when running outside of TorchScript.
Though TorchScript can infer correct type for most Python expressions, there are some cases where
type infernece can be wrong, including:
- Empty containers like `[]` and `{}`, which TorchScript assumes to be container of `Tensor`s
- Optional types like `Optional[T]` but assigned a valid value of type `T`, TorchScript would assume
it is type `T` rather than `Optional[T]`
Note that `annotate()` does not help in `__init__` method of `torch.nn.Module` subclasses because it
is executed in eager mode. To annotate types of `torch.nn.Module` attributes,
use :meth:`~torch.jit.Annotate` instead.
Example:
.. testcode::
import torch
from typing import Dict
@torch.jit.script
def fn():
# Telling TorchScript that this empty dictionary is a (str -> int) dictionary
# instead of default dictionary type of (str -> Tensor).
d = torch.jit.annotate(Dict[str, int], {})
# Without `torch.jit.annotate` above, following statement would fail because of
# type mismatch.
d["name"] = 20
.. testcleanup::
del fn
Args:
the_type: Python type that should be passed to TorchScript compiler as type hint for `the_value`
the_value: Value or expression to hint type for.
Returns:
`the_value` is passed back as return value.
"""
return the_value
def script_if_tracing(fn):
"""
Compiles ``fn`` when it is first called during tracing. ``torch.jit.script``
has a non-negligible start up time when it is first called due to
lazy-initializations of many compiler builtins. Therefore you should not use
it in library code. However, you may want to have parts of your library work
in tracing even if they use control flow. In these cases, you should use
``@torch.jit.script_if_tracing`` to substitute for
``torch.jit.script``.
Args:
fn: A function to compile.
Returns:
If called during tracing, a :class:`ScriptFunction` created by `torch.jit.script` is returned.
Otherwise, the original function `fn` is returned.
"""
return _script_if_tracing(fn)
# for torch.jit.isinstance
def isinstance(obj, target_type):
"""
This function provides for conatiner type refinement in TorchScript. It can refine
parameterized containers of the List, Dict, Tuple, and Optional types. E.g. ``List[str]``,
``Dict[str, List[torch.Tensor]]``, ``Optional[Tuple[int,str,int]]``. It can also
refine basic types such as bools and ints that are available in TorchScript.
Args:
obj: object to refine the type of
target_type: type to try to refine obj to
Returns:
``bool``: True if obj was successfully refined to the type of target_type,
False otherwise with no new type refinement
Example (using ``torch.jit.isinstance`` for type refinement):
.. testcode::
import torch
from typing import Any, Dict, List
class MyModule(torch.nn.Module):
def __init__(self):
super(MyModule, self).__init__()
def forward(self, input: Any): # note the Any type
if torch.jit.isinstance(input, List[torch.Tensor]):
for t in input:
y = t.clamp(0, 0.5)
elif torch.jit.isinstance(input, Dict[str, str]):
for val in input.values():
print(val)
m = torch.jit.script(MyModule())
x = [torch.rand(3,3), torch.rand(4,3)]
m(x)
y = {"key1":"val1","key2":"val2"}
m(y)
"""
return _isinstance(obj, target_type)
# Context manager for globally hiding source ranges when printing graphs.
# Note that these functions are exposed to Python as static members of the
# Graph class, so mypy checks need to be skipped.
@contextmanager
def _hide_source_ranges() -> Iterator[None]:
old_enable_source_ranges = torch._C.Graph.global_print_source_ranges # type: ignore[attr-defined]
try:
torch._C.Graph.set_global_print_source_ranges(False) # type: ignore[attr-defined]
yield
finally:
torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges) # type: ignore[attr-defined]
if not torch._C._jit_init():
raise RuntimeError("JIT initialization failed")
|
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process("L1TStage2DQM", eras.Run2_2018)
#--------------------------------------------------
# Event Source and Condition
# Live Online DQM in P5
process.load("DQM.Integration.config.inputsource_cfi")
# # Testing in lxplus
# process.load("DQM.Integration.config.fileinputsource_cfi")
# process.load("FWCore.MessageLogger.MessageLogger_cfi")
# process.MessageLogger.cerr.FwkReport.reportEvery = 1
# Required to load Global Tag
process.load("DQM.Integration.config.FrontierCondition_GT_cfi")
# # Condition for lxplus: change and possibly customise the GT
# from Configuration.AlCa.GlobalTag import GlobalTag as gtCustomise
# process.GlobalTag = gtCustomise(process.GlobalTag, 'auto:run2_data', '')
# Required to load EcalMappingRecord
process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
#--------------------------------------------------
# DQM Environment
process.load("DQM.Integration.config.environment_cfi")
process.dqmEnv.subSystemFolder = "L1T"
process.dqmSaver.tag = "L1T"
process.DQMStore.referenceFileName = "/dqmdata/dqm/reference/l1t_reference.root"
process.dqmEndPath = cms.EndPath(process.dqmEnv * process.dqmSaver)
#--------------------------------------------------
# Standard Unpacking Path
process.load("Configuration.StandardSequences.RawToDigi_Data_cff")
# remove unneeded unpackers
process.RawToDigi.remove(process.ecalPreshowerDigis)
process.RawToDigi.remove(process.muonCSCDigis)
process.RawToDigi.remove(process.muonDTDigis)
process.RawToDigi.remove(process.muonRPCDigis)
process.RawToDigi.remove(process.siPixelDigis)
process.RawToDigi.remove(process.siStripDigis)
process.RawToDigi.remove(process.castorDigis)
process.RawToDigi.remove(process.scalersRawToDigi)
process.RawToDigi.remove(process.tcdsDigis)
process.RawToDigi.remove(process.totemTriggerRawToDigi)
process.RawToDigi.remove(process.totemRPRawToDigi)
process.RawToDigi.remove(process.ctppsDiamondRawToDigi)
process.RawToDigi.remove(process.ctppsPixelDigis)
process.rawToDigiPath = cms.Path(process.RawToDigi)
#--------------------------------------------------
# Stage2 Unpacker and DQM Path
# Filter fat events
from HLTrigger.HLTfilters.hltHighLevel_cfi import hltHighLevel
process.hltFatEventFilter = hltHighLevel.clone()
process.hltFatEventFilter.throw = cms.bool(False)
# HLT_Physics now has the event % 107 filter as well as L1FatEvents
process.hltFatEventFilter.HLTPaths = cms.vstring('HLT_L1FatEvents_v*', 'HLT_Physics_v*')
# This can be used if HLT filter not available in a run
process.selfFatEventFilter = cms.EDFilter("HLTL1NumberFilter",
invert = cms.bool(False),
period = cms.uint32(107),
rawInput = cms.InputTag("rawDataCollector"),
fedId = cms.int32(1024)
)
process.load("DQM.L1TMonitor.L1TStage2_cff")
process.l1tMonitorPath = cms.Path(
process.l1tStage2OnlineDQM +
process.hltFatEventFilter +
# process.selfFatEventFilter +
process.l1tStage2OnlineDQMValidationEvents
)
# Remove DQM Modules
#process.l1tStage2OnlineDQM.remove(process.l1tStage2CaloLayer1)
#process.l1tStage2OnlineDQM.remove(process.l1tStage2CaloLayer2)
#process.l1tStage2OnlineDQM.remove(process.l1tStage2Bmtf)
#process.l1tStage2OnlineDQM.remove(process.l1tStage2Emtf)
#process.l1tStage2OnlineDQM.remove(process.l1tStage2uGMT)
#process.l1tStage2OnlineDQM.remove(process.l1tStage2uGt)
#--------------------------------------------------
# Stage2 Quality Tests
process.load("DQM.L1TMonitorClient.L1TStage2MonitorClient_cff")
process.l1tStage2MonitorClientPath = cms.Path(process.l1tStage2MonitorClient)
#--------------------------------------------------
# Customize for other type of runs
# Cosmic run
if (process.runType.getRunType() == process.runType.cosmic_run):
process.DQMStore.referenceFileName = "/dqmdata/dqm/reference/l1t_reference_cosmic.root"
# Remove Quality Tests for L1T Muon Subsystems since they are not optimized yet for cosmics
process.l1tStage2MonitorClient.remove(process.l1TStage2uGMTQualityTests)
process.l1tStage2MonitorClient.remove(process.l1TStage2EMTFQualityTests)
#process.l1tStage2MonitorClient.remove(process.l1TStage2OMTFQualityTests)
process.l1tStage2MonitorClient.remove(process.l1TStage2BMTFQualityTests)
process.l1tStage2MonitorClient.remove(process.l1TStage2MuonQualityTestsCollisions)
process.l1tStage2EventInfoClient.DisableL1Systems = cms.vstring("EMTF", "OMTF", "BMTF", "uGMT")
# Heavy-Ion run
if (process.runType.getRunType() == process.runType.hi_run):
process.DQMStore.referenceFileName = "/dqmdata/dqm/reference/l1t_reference_hi.root"
process.onlineMetaDataDigis.onlineMetaDataInputLabel = cms.InputTag("rawDataRepacker")
process.onlineMetaDataRawToDigi.onlineMetaDataInputLabel = cms.InputTag("rawDataRepacker")
process.castorDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.ctppsDiamondRawToDigi.rawDataTag = cms.InputTag("rawDataRepacker")
process.ctppsPixelDigis.inputLabel = cms.InputTag("rawDataRepacker")
process.ecalDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.ecalPreshowerDigis.sourceTag = cms.InputTag("rawDataRepacker")
process.hcalDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.muonCSCDigis.InputObjects = cms.InputTag("rawDataRepacker")
process.muonDTDigis.inputLabel = cms.InputTag("rawDataRepacker")
process.muonRPCDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.muonGEMDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.scalersRawToDigi.scalersInputTag = cms.InputTag("rawDataRepacker")
process.siPixelDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.siStripDigis.ProductLabel = cms.InputTag("rawDataRepacker")
process.tcdsDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.tcdsRawToDigi.InputLabel = cms.InputTag("rawDataRepacker")
process.totemRPRawToDigi.rawDataTag = cms.InputTag("rawDataRepacker")
process.totemTriggerRawToDigi.rawDataTag = cms.InputTag("rawDataRepacker")
process.totemTimingRawToDigi.rawDataTag = cms.InputTag("rawDataRepacker")
process.csctfDigis.producer = cms.InputTag("rawDataRepacker")
process.dttfDigis.DTTF_FED_Source = cms.InputTag("rawDataRepacker")
process.gctDigis.inputLabel = cms.InputTag("rawDataRepacker")
process.gtDigis.DaqGtInputTag = cms.InputTag("rawDataRepacker")
process.twinMuxStage2Digis.DTTM7_FED_Source = cms.InputTag("rawDataRepacker")
process.RPCTwinMuxRawToDigi.inputTag = cms.InputTag("rawDataRepacker")
process.bmtfDigis.InputLabel = cms.InputTag("rawDataRepacker")
process.omtfStage2Digis.inputLabel = cms.InputTag("rawDataRepacker")
process.emtfStage2Digis.InputLabel = cms.InputTag("rawDataRepacker")
process.gmtStage2Digis.InputLabel = cms.InputTag("rawDataRepacker")
process.caloLayer1Digis.InputLabel = cms.InputTag("rawDataRepacker")
process.caloStage1Digis.InputLabel = cms.InputTag("rawDataRepacker")
process.caloStage2Digis.InputLabel = cms.InputTag("rawDataRepacker")
process.gtStage2Digis.InputLabel = cms.InputTag("rawDataRepacker")
process.l1tStage2CaloLayer1.fedRawDataLabel = cms.InputTag("rawDataRepacker")
process.l1tStage2uGMTZeroSupp.rawData = cms.InputTag("rawDataRepacker")
process.l1tStage2uGMTZeroSuppFatEvts.rawData = cms.InputTag("rawDataRepacker")
process.l1tStage2BmtfZeroSupp.rawData = cms.InputTag("rawDataRepacker")
process.l1tStage2BmtfZeroSuppFatEvts.rawData = cms.InputTag("rawDataRepacker")
process.selfFatEventFilter.rawInput = cms.InputTag("rawDataRepacker")
#--------------------------------------------------
# L1T Online DQM Schedule
process.schedule = cms.Schedule(
process.rawToDigiPath,
process.l1tMonitorPath,
process.l1tStage2MonitorClientPath,
# process.l1tMonitorEndPath,
process.dqmEndPath
)
#--------------------------------------------------
# Process Customizations
from DQM.Integration.config.online_customizations_cfi import *
process = customise(process)
|
/*-
* Copyright (c) 1989, 1993
* The Regents of the University of California. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by the University of
* California, Berkeley and its contributors.
* 4. Neither the name of the University nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#if defined(LIBC_SCCS) && !defined(lint)
static char sccsid[] = "@(#)termios.c 8.2 (Berkeley) 2/21/94";
#endif /* LIBC_SCCS and not lint */
#include <sys/cdefs.h>
__FBSDID("$FreeBSD: src/lib/libc/gen/termios.c,v 1.13 2002/05/28 16:59:39 alfred Exp $");
#include "namespace.h"
#include <sys/types.h>
#include <sys/fcntl.h>
#include <sys/ioctl.h>
#include <sys/time.h>
#include <errno.h>
#include <termios.h>
#include <unistd.h>
#include "un-namespace.h"
int
tcgetattr(fd, t)
int fd;
struct termios *t;
{
return (_ioctl(fd, TIOCGETA, t));
}
int
tcsetattr(fd, opt, t)
int fd, opt;
const struct termios *t;
{
struct termios localterm;
if (opt & TCSASOFT) {
localterm = *t;
localterm.c_cflag |= CIGNORE;
t = &localterm;
}
switch (opt & ~TCSASOFT) {
case TCSANOW:
return (_ioctl(fd, TIOCSETA, t));
case TCSADRAIN:
return (_ioctl(fd, TIOCSETAW, t));
case TCSAFLUSH:
return (_ioctl(fd, TIOCSETAF, t));
default:
errno = EINVAL;
return (-1);
}
}
int
tcsetpgrp(int fd, pid_t pgrp)
{
int s;
s = pgrp;
return (_ioctl(fd, TIOCSPGRP, &s));
}
pid_t
tcgetpgrp(fd)
int fd;
{
int s;
if (_ioctl(fd, TIOCGPGRP, &s) < 0)
return ((pid_t)-1);
return ((pid_t)s);
}
speed_t
cfgetospeed(t)
const struct termios *t;
{
return (t->c_ospeed);
}
speed_t
cfgetispeed(t)
const struct termios *t;
{
return (t->c_ispeed);
}
int
cfsetospeed(t, speed)
struct termios *t;
speed_t speed;
{
t->c_ospeed = speed;
return (0);
}
int
cfsetispeed(t, speed)
struct termios *t;
speed_t speed;
{
t->c_ispeed = speed;
return (0);
}
int
cfsetspeed(t, speed)
struct termios *t;
speed_t speed;
{
t->c_ispeed = t->c_ospeed = speed;
return (0);
}
/*
* Make a pre-existing termios structure into "raw" mode: character-at-a-time
* mode with no characters interpreted, 8-bit data path.
*/
void
cfmakeraw(t)
struct termios *t;
{
t->c_iflag &= ~(IMAXBEL|IXOFF|INPCK|BRKINT|PARMRK|ISTRIP|INLCR|IGNCR|ICRNL|IXON|IGNPAR);
t->c_iflag |= IGNBRK;
t->c_oflag &= ~OPOST;
t->c_lflag &= ~(ECHO|ECHOE|ECHOK|ECHONL|ICANON|ISIG|IEXTEN|NOFLSH|TOSTOP|PENDIN);
t->c_cflag &= ~(CSIZE|PARENB);
t->c_cflag |= CS8|CREAD;
t->c_cc[VMIN] = 1;
t->c_cc[VTIME] = 0;
}
int
tcsendbreak(fd, len)
int fd, len;
{
struct timeval sleepytime;
sleepytime.tv_sec = 0;
sleepytime.tv_usec = 400000;
if (_ioctl(fd, TIOCSBRK, 0) == -1)
return (-1);
(void)_select(0, 0, 0, 0, &sleepytime);
if (_ioctl(fd, TIOCCBRK, 0) == -1)
return (-1);
return (0);
}
int
__tcdrain(fd)
int fd;
{
return (_ioctl(fd, TIOCDRAIN, 0));
}
__weak_reference(__tcdrain, tcdrain);
__weak_reference(__tcdrain, _tcdrain);
int
tcflush(fd, which)
int fd, which;
{
int com;
switch (which) {
case TCIFLUSH:
com = FREAD;
break;
case TCOFLUSH:
com = FWRITE;
break;
case TCIOFLUSH:
com = FREAD | FWRITE;
break;
default:
errno = EINVAL;
return (-1);
}
return (_ioctl(fd, TIOCFLUSH, &com));
}
int
tcflow(fd, action)
int fd, action;
{
struct termios term;
u_char c;
switch (action) {
case TCOOFF:
return (_ioctl(fd, TIOCSTOP, 0));
case TCOON:
return (_ioctl(fd, TIOCSTART, 0));
case TCION:
case TCIOFF:
if (tcgetattr(fd, &term) == -1)
return (-1);
c = term.c_cc[action == TCIOFF ? VSTOP : VSTART];
if (c != _POSIX_VDISABLE && _write(fd, &c, sizeof(c)) == -1)
return (-1);
return (0);
default:
errno = EINVAL;
return (-1);
}
/* NOTREACHED */
}
|
'use strict'
const path = require('path')
const defaultSettings = require('./src/settings.js')
function resolve(dir) {
return path.join(__dirname, dir)
}
const name = defaultSettings.title || '火狐一卡通系统' // 标题
const port = process.env.port || process.env.npm_config_port || 80 // 端口
// vue.config.js 配置说明
//官方vue.config.js 参考文档 https://cli.vuejs.org/zh/config/#css-loaderoptions
// 这里只列一部分,具体配置参考文档
module.exports = {
// 部署生产环境和开发环境下的URL。
// 默认情况下,Vue CLI 会假设你的应用是被部署在一个域名的根路径上
// 例如 https://www.ruoyi.vip/。如果应用被部署在一个子路径上,你就需要用这个选项指定这个子路径。例如,如果你的应用被部署在 https://www.ruoyi.vip/admin/,则设置 baseUrl 为 /admin/。
publicPath: process.env.NODE_ENV === "production" ? "/" : "/",
// 在npm run build 或 yarn build 时 ,生成文件的目录名称(要和baseUrl的生产环境路径一致)(默认dist)
outputDir: 'dist',
// 用于放置生成的静态资源 (js、css、img、fonts) 的;(项目打包之后,静态资源会放在这个文件夹下)
assetsDir: 'static',
// 是否开启eslint保存检测,有效值:ture | false | 'error'
lintOnSave: process.env.NODE_ENV === 'development',
// 如果你不需要生产环境的 source map,可以将其设置为 false 以加速生产环境构建。
productionSourceMap: false,
// webpack-dev-server 相关配置
devServer: {
host: '0.0.0.0',
port: port,
open: true,
proxy: {
// detail: https://cli.vuejs.org/config/#devserver-proxy
[process.env.VUE_APP_BASE_API]: {
target: `http://127.0.0.1:8088`,
changeOrigin: true,
pathRewrite: {
['^' + process.env.VUE_APP_BASE_API]: ''
}
}
},
disableHostCheck: true
},
configureWebpack: {
name: name,
resolve: {
alias: {
'@': resolve('src')
}
}
},
chainWebpack(config) {
config.plugins.delete('preload') // TODO: need test
config.plugins.delete('prefetch') // TODO: need test
// set svg-sprite-loader
config.module
.rule('svg')
.exclude.add(resolve('src/assets/icons'))
.end()
config.module
.rule('icons')
.test(/\.svg$/)
.include.add(resolve('src/assets/icons'))
.end()
.use('svg-sprite-loader')
.loader('svg-sprite-loader')
.options({
symbolId: 'icon-[name]'
})
.end()
config
.when(process.env.NODE_ENV !== 'development',
config => {
config
.plugin('ScriptExtHtmlWebpackPlugin')
.after('html')
.use('script-ext-html-webpack-plugin', [{
// `runtime` must same as runtimeChunk name. default is `runtime`
inline: /runtime\..*\.js$/
}])
.end()
config
.optimization.splitChunks({
chunks: 'all',
cacheGroups: {
libs: {
name: 'chunk-libs',
test: /[\\/]node_modules[\\/]/,
priority: 10,
chunks: 'initial' // only package third parties that are initially dependent
},
elementUI: {
name: 'chunk-elementUI', // split elementUI into a single package
priority: 20, // the weight needs to be larger than libs and app or it will be packaged into libs or app
test: /[\\/]node_modules[\\/]_?element-ui(.*)/ // in order to adapt to cnpm
},
commons: {
name: 'chunk-commons',
test: resolve('src/components'), // can customize your rules
minChunks: 3, // minimum common number
priority: 5,
reuseExistingChunk: true
}
}
})
config.optimization.runtimeChunk('single'),
{
from: path.resolve(__dirname, './public/robots.txt'),//防爬虫文件
to:'./',//到根目录下
}
}
)
}
}
|
"use strict";
var _ = require("lodash");
function skewer(input) {
var output = _.kebabCase(input);
return output;
}
var message = skewer("EnableJavacriptIntellisense");
console.log(message);
|