text stringlengths 1 1.05M |
|---|
"use strict";
class Pokemon {
constructor(
pokedex_number,
name,
species,
attack_iv,
defense_iv,
stamina_iv,
current_hp,
max_hp,
iv_percentage,
cp,
candy,
favorite,
family_name,
id,
move_1,
move_2,
caught_time,
level
)
{
this.pokedex_number = pokedex_number;
this.name = name;
this.species = species;
this.attack_iv = attack_iv;
this.defense_iv = defense_iv;
this.stamina_iv = stamina_iv;
this.current_hp = current_hp;
this.max_hp = max_hp;
this.iv_percentage = iv_percentage;
this.cp = cp;
this.candy = candy;
this.favorite = favorite;
this.family_name = family_name;
this.id = id;
this.move_1 = move_1;
this.move_2 = move_2;
this.caught_time = caught_time;
this.level = level;
}
}
module.exports = Pokemon;
|
<reponame>MortenHe/WebsocketVideoPlayer
import { Component, OnInit } from '@angular/core';
import { BackendService } from '../../services/backend.service';
import { CdkDragDrop, moveItemInArray } from '@angular/cdk/drag-drop';
@Component({
selector: 'playlist',
templateUrl: './playlist.component.html',
styleUrls: ['./playlist.component.scss']
})
export class PlaylistComponent implements OnInit {
//Liste der Dateien, die abgespielt werden
files: any[] = [];
//Gesamtlaenge der Playlist
filesTotalTime: string = "";
//aktueller Index in Titelliste
position: number = 0;
//aktuelle Zeit des laufenden Items
time: string = "";
//temp. Wert, wohin gerade gesprungen werden soll
jumpPosition: number = -1;
//Service injecten
constructor(private bs: BackendService) { }
//beim Init
ngOnInit() {
//akutelle Zeit per Service abbonieren und in Variable schreiben
this.bs.getTime().subscribe(time => this.time = time);
//Liste des aktuellen per Service abbonieren und in Variable schreiben
this.bs.getFiles().subscribe(files => this.files = files);
//aktuellen Index in Titelliste abbonieren und in Variable schreiben (fuer CSS-Klasse)
this.bs.getPosition().subscribe(position => {
//kurz Verzoegerung damit Spinner sichtbar ist
setTimeout(() => {
this.position = position;
//temp. Sprungwert (fuer optische Darstellung) wieder zuruecksetzen
this.jumpPosition = -1;
}, 1000);
});
//Laenge der Playlist abbonieren
this.bs.getFilesTotalTime().subscribe(filesTotalTime => this.filesTotalTime = filesTotalTime);
}
//Titel aus Playlist entfernen
removeItemFromPlaylist(position: number) {
this.bs.sendMessage({ type: "remove-from-playlist", value: position });
}
//zu gewissem Titel in Playlist springen
jumpTo(position: number) {
//Befehl an WSS schicken
this.bs.sendMessage({ type: "jump-to", value: position });
//Wenn zu einem anderen Titel gesprungen werden soll
if (this.position !== position) {
//bei diesem Eintrag einen Spinner anzeigen, bis der Titel geladen wurde
this.jumpPosition = position;
}
}
//Wenn Sortiervorgang abgeschlossen ist, Server ueber neue Sortierung informieren
drop(event: CdkDragDrop<string[]>) {
//lokale Daten sortieren
moveItemInArray(this.files, event.previousIndex, event.currentIndex);
//ggf. position (=aktives Video) anpassen, wenn gerade ein Video laeuft
if (this.position > -1) {
//aktives Video wurde verschoben -> Endposition als neues aktives Video
if (event.previousIndex === this.position) {
this.position = event.currentIndex;
}
//Video vor akt. Video wurde auf Position des akt. Videos oder dahinter geschoben -> akt. Video rueckt eins nach oben
else if (event.previousIndex < this.position && event.currentIndex >= this.position) {
this.position--;
}
//Video hinter akt. Video wurde auf Position des akt. Videos oder davor geschoben -> akt. Video rueckt eins nach unten
else if (event.previousIndex > this.position && event.currentIndex <= this.position) {
this.position++;
}
}
//Server informieren
this.bs.sendMessage({
type: "sort-playlist", value: {
from: event.previousIndex,
to: event.currentIndex
}
});
};
} |
<reponame>sajadweb/msm-cli
const useContext = ({ SERVICE_DIR, service, name }) => {
return `<?php
namespace ${SERVICE_DIR.fUC()}\\${service}\\Providers;
#region use
use Illuminate\\Support\\Facades\\Route;
use Illuminate\\Support\\ServiceProvider;
use Services\\${service}\\Repositories\\I${name}Repository;
use Services\\${service}\\Repositories\\${name}Repository;
#endregion
/**
* ${name}
* @author Sajadweb
* ${Date()}
*/
class ${name} extends ServiceProvider
{
/**
* Register any application services.
*
* @return void
*/
public function register()
{
#region Helper
require_once __DIR__ . "./../Helpers/${service}Helper.php";
#endregion
Route::middleware('api')
->prefix('api')
->namespace('${SERVICE_DIR.fUC()}\\${service}\\Controllers')
->group(base_path('${SERVICE_DIR}/${service}/Routes/api.php'));
#region Repository
$this->app->bind(I${service}Repository::class,${service}Repository::class);
#endregion
}
}`;
};
const setContext = async ({ SERVICE_DIR, storeg, micro, name }) => {
storeg.directoryUpdateOrNew(`${SERVICE_DIR}/${micro}/Providers`);
await storeg.write(
`${SERVICE_DIR}/${micro}/Providers/${name}Provider.php`,
useContext({ SERVICE_DIR, service: micro, name }),
true
);
};
const appendContext = {
setRepository: async ({ SERVICE_DIR, storeg, micro, name }) => {
const service = micro;
const fileRoute = `${SERVICE_DIR}/${micro}/Providers/${micro}Provider.php`;
if (storeg.directoryExists(fileRoute)) {
let file = await storeg.readSync(fileRoute);
let text= file.toString();
text=text.replace(
"#region use",
`#region use
use Services\\${service.fUC()}\\Repositories\\I${name.fUC()}Repository;
use Services\\${service.fUC()}\\Repositories\\${name.fUC()}Repository;`
)
await storeg.write(
fileRoute,
text.replace(
"#region Repository",
`#region Repository
$this->app->bind(I${name.fUC()}Repository::class,${name.fUC()}Repository::class);`
),
true
);
}
},
};
module.exports = {
useContext,
setContext,
appendContext,
};
|
import React, {Component} from 'react';
import {Link} from 'react-router-dom';
import Head from "../../components/head";
import SideBar from "../../components/sidebar";
import {api, Axios} from "../../api/api";
import './index.scss'
function throttle (fn, wait, mustRun) {
let timeout;
let startTime = new Date();
return function() {
let context = this;
let args = arguments;
let curTime = new Date();
clearTimeout(timeout);
// 如果达到了规定的触发时间间隔,触发 handler
if (curTime - startTime >= mustRun) {
fn.apply(context, args);
startTime = curTime;
// 没达到触发间隔,重新设定定时器
} else {
timeout = setTimeout(fn, wait);
}
};
};
function formatTime(time, format) {
var t = new Date(time);
var tf = function (i) {
return (i < 10 ? '0' : '') + i
};
return format.replace(/yyyy|MM|dd|HH|mm|ss/g, function (a) {
switch (a) {
case 'yyyy':
return tf(t.getFullYear());
case 'MM':
return tf(t.getMonth() + 1);
case 'mm':
return tf(t.getMinutes());
case 'dd':
return tf(t.getDate());
case 'HH':
return tf(t.getHours());
case 'ss':
return tf(t.getSeconds());
default:
return ''
}
})
}
class Index extends Component {
constructor(props) {
super(props)
const menus = [{
value: '全部',
tab: '/topics/all',
}, {
value: '问答',
tab: '/topics/ask',
}, {
value: '分享',
tab: '/topics/share',
}, {
value: '招聘',
tab: '/topics/job',
}, {
value: '精华',
tab: '/topics/good',
}, {
value: '关于',
tab: '/about'
}]
const pathname = props.location.pathname
const menu = menus.find(_ => ~_.tab.indexOf(pathname))
this.state = {
open: false,
menus,
title: menu ? menu.value : '全部',
list: [],
}
this.page = 1
this.LOCK = false
this.reset = this.reset.bind(this)
this.toggle = this.toggle.bind(this)
}
componentWillMount() {
const tab = this.props.match ? this.props.match.params.tab : 'all'
this.getTopics(this.page, tab)
}
componentDidMount() {
window.addEventListener('scroll', throttle(this.handleScroll.bind(this), 300, 1000));
}
componentWillUnmount() {
window.removeEventListener('scroll', this.handleScroll.bind(this));
}
handleScroll(e) {
if (!this.LOCK) {
let totalH = parseInt(document.documentElement.scrollHeight)
let scrollH = parseInt(document.documentElement.scrollTop)
let winH = parseInt(window.innerHeight)
if (scrollH + winH + 200 > totalH) {
this.LOCK = true
this.getTopics(this.page + 1)
}
}
}
getTopics(page, tab) {
Axios.get(api.topics, {params: {page: page, limit: 20, mdrender: true, tab}}).then(res => {
if (res.data.length) {
this.LOCK = false
this.page = page
this.setState((state) => ({
list: state.list.concat(res.data)
}))
} else {
this.LOCK = true
}
}).catch(err => {
this.LOCK = false
})
}
toggle() {
this.setState((state) => ({
open: !state.open
}))
}
shouldComponentUpdate(nextProps, nextState, nextContext) {
if (nextProps.location.pathname !== this.props.location.pathname) {
this.reset()
this.getTopics(this.page, nextProps.match.params.tab)
}
return true
}
changeTitle(title) {
if (title === this.state.title) {
this.toggle()
} else {
this.setState({
open: false,
title: title
})
}
}
reset() {
this.setState({
list: []
})
this.page = 1
this.LOCK = false
}
render() {
const list = this.state.list
const topics = list.map((topic) => (
<li key={topic.id + Math.random()}>
<Link to={`/topic/${topic.id}`}>
<div className="topic-title">{topic.title}</div>
<div className="topic-content">
<div className="topic-avatar">
<img src={topic.author.avatar_url} alt="" className="avatar"/>
</div>
<div className="topic-info">
<p>
<span className="topic-author">{topic.author.loginname}</span>
<span className="topic-total"><b>{topic.reply_count}</b>/{topic.visit_count}</span>
</p>
<p>
<span className="topic-ctime">Post:{formatTime(topic.create_at, 'yyyy.MM.dd')}</span>
<span className="topic-rtime">Reply:{formatTime(topic.last_reply_at, 'yyyy.MM.dd')}</span>
</p>
</div>
</div>
</Link>
</li>
))
return (
<div className="page-topics">
<Head open={this.state.open} title={this.state.title} onClick={this.toggle}/>
<SideBar open={this.state.open} menus={this.state.menus} onChange={this.changeTitle.bind(this)} onClose={this.toggle}/>
<div className="page-cont">
<ul className="topic-list">
{topics}
</ul>
</div>
</div>
);
}
}
export default Index;
|
<gh_stars>1000+
package cmd
import (
"github.com/stretchr/testify/require"
"testing"
)
func Test_getReplaceSecretCommand(t *testing.T) {
type args struct {
cmdParams configureBridgeCmdParams
}
tests := []struct {
name string
args args
want string
}{
{
name: "print output with placeholders",
args: args{
cmdParams: configureBridgeCmdParams{},
},
want: "kubectl create secret -n keptn generic bridge-credentials --from-literal=\"BASIC_AUTH_USERNAME=${BRIDGE_USER}\" --from-literal=\"BASIC_AUTH_PASSWORD=${BRIDGE_PASSWORD}\" -oyaml --dry-run=client | kubectl replace -f -\n",
},
{
name: "print output with provided values",
args: args{
cmdParams: configureBridgeCmdParams{
User: stringp("my-user"),
Password: stringp("<PASSWORD>"),
},
},
want: "kubectl create secret -n keptn generic bridge-credentials --from-literal=\"BASIC_AUTH_USERNAME=my-user\" --from-literal=\"BASIC_AUTH_PASSWORD=<PASSWORD>\" -oyaml --dry-run=client | kubectl replace -f -\n",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := getReplaceSecretCommand(tt.args.cmdParams)
require.Contains(t, got, tt.want)
})
}
}
|
let select_id = 0
$('#box_add_form').submit( ( event ) => {
event.preventDefault();
const cname = $("#cname").val()
const pname = $("#pname").val()
const count = $("#count").val()
const bNo = $("#bNo").val()
const obj = {
b_customer_id: cname,
b_product_id: pname,
b_count: count,
b_bno: bNo,
}
if ( select_id != 0 ) {
// update
obj["b_id"] = select_id;
}
$.ajax({
url: './box-post',
type: 'POST',
data: { obj: JSON.stringify(obj) },
dataType: 'JSON',
success: function (data) {
if ( data > 0 ) {
alert("İşlem Başarılı")
allBox(selectedCustomer);
}else {
alert("İşlem sırasında hata oluştu!");
}
},
error: function (err) {
console.log(err)
alert("Ekleme işlemi sırısında bir hata oluştu!");
}
})
})
// all cusomer list - start
function allBox(cid) {
$.ajax({
url: './box-get?cid='+cid,
type: 'GET',
dataType: 'Json',
success: function (data) {
createRow(data);
},
error: function (err) {
console.log(err)
}
})
}
let globalArr = []
function createRow( data ) {
globalArr = data;
let html = ``
for (let i = 0; i < data.length; i++) {
const itm = data[i];
html += `<tr role="row" class="odd">
<td>`+itm.b_id+`</td>
<td>`+itm.cu_name+" "+itm.cu_surname+`</td>
<td>`+itm.p_title+`</td>
<td>`+itm.p_sales_price+`</td>
<td>`+itm.b_count+`</td>
<td>`+itm.b_bno+`</td>
<td class="text-right" >
<div class="btn-group" role="group" aria-label="Basic mixed styles example">
<button onclick="fncBoxDelete(`+itm.b_id+`)" type="button" class="btn btn-outline-primary "><i class="far fa-trash-alt"></i></button>
</div>
</td>
</tr>`;
}
$('#tableRow').html(html);
}
function codeGenerator() {
const date = new Date();
const time = date.getTime();
const key = time.toString().substring(4);
$('#ccode').val( key )
$('#pcode').val( key )
$('#bNo').val(key)
}
allBox();
// box delete - start
function fncBoxDelete(b_id){
let answer = confirm("Silmek istediğinizden emin misniz?");
if ( answer ) {
$.ajax({
url: './box-delete?b_id='+b_id,
type: 'DELETE',
dataType: 'text',
success: function (data) {
if ( data != "0" ) {
allBox(selectedCustomer);
}else {
alert("Silme sırasında bir hata oluştu!");
}
},
error: function (err) {
console.log(err)
}
})
}
}
// box delete - end
let selectedCustomer = 0;
$("#cname").on("change",function (){
selectedCustomer = (this.value)
allBox(this.value)
}) |
<filename>dist/index.cjs.js
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var React = require('react');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var React__default = /*#__PURE__*/_interopDefaultLegacy(React);
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
enumerableOnly && (symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
})), keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = null != arguments[i] ? arguments[i] : {};
i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
_defineProperty(target, key, source[key]);
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
return target;
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
Object.defineProperty(Constructor, "prototype", {
writable: false
});
return Constructor;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _extends() {
_extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
return _extends.apply(this, arguments);
}
function _inherits(subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function");
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
writable: true,
configurable: true
}
});
Object.defineProperty(subClass, "prototype", {
writable: false
});
if (superClass) _setPrototypeOf(subClass, superClass);
}
function _getPrototypeOf(o) {
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
return o.__proto__ || Object.getPrototypeOf(o);
};
return _getPrototypeOf(o);
}
function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
function _isNativeReflectConstruct() {
if (typeof Reflect === "undefined" || !Reflect.construct) return false;
if (Reflect.construct.sham) return false;
if (typeof Proxy === "function") return true;
try {
Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
return true;
} catch (e) {
return false;
}
}
function _construct(Parent, args, Class) {
if (_isNativeReflectConstruct()) {
_construct = Reflect.construct;
} else {
_construct = function _construct(Parent, args, Class) {
var a = [null];
a.push.apply(a, args);
var Constructor = Function.bind.apply(Parent, a);
var instance = new Constructor();
if (Class) _setPrototypeOf(instance, Class.prototype);
return instance;
};
}
return _construct.apply(null, arguments);
}
function _isNativeFunction(fn) {
return Function.toString.call(fn).indexOf("[native code]") !== -1;
}
function _wrapNativeSuper(Class) {
var _cache = typeof Map === "function" ? new Map() : undefined;
_wrapNativeSuper = function _wrapNativeSuper(Class) {
if (Class === null || !_isNativeFunction(Class)) return Class;
if (typeof Class !== "function") {
throw new TypeError("Super expression must either be null or a function");
}
if (typeof _cache !== "undefined") {
if (_cache.has(Class)) return _cache.get(Class);
_cache.set(Class, Wrapper);
}
function Wrapper() {
return _construct(Class, arguments, _getPrototypeOf(this).constructor);
}
Wrapper.prototype = Object.create(Class.prototype, {
constructor: {
value: Wrapper,
enumerable: false,
writable: true,
configurable: true
}
});
return _setPrototypeOf(Wrapper, Class);
};
return _wrapNativeSuper(Class);
}
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = _objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
function _assertThisInitialized(self) {
if (self === void 0) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return self;
}
function _possibleConstructorReturn(self, call) {
if (call && (typeof call === "object" || typeof call === "function")) {
return call;
} else if (call !== void 0) {
throw new TypeError("Derived constructors may only return object or undefined");
}
return _assertThisInitialized(self);
}
function _createSuper(Derived) {
var hasNativeReflectConstruct = _isNativeReflectConstruct();
return function _createSuperInternal() {
var Super = _getPrototypeOf(Derived),
result;
if (hasNativeReflectConstruct) {
var NewTarget = _getPrototypeOf(this).constructor;
result = Reflect.construct(Super, arguments, NewTarget);
} else {
result = Super.apply(this, arguments);
}
return _possibleConstructorReturn(this, result);
};
}
function _slicedToArray(arr, i) {
return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest();
}
function _toConsumableArray(arr) {
return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread();
}
function _arrayWithoutHoles(arr) {
if (Array.isArray(arr)) return _arrayLikeToArray(arr);
}
function _arrayWithHoles(arr) {
if (Array.isArray(arr)) return arr;
}
function _iterableToArray(iter) {
if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter);
}
function _iterableToArrayLimit(arr, i) {
var _i = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"];
if (_i == null) return;
var _arr = [];
var _n = true;
var _d = false;
var _s, _e;
try {
for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) {
_arr.push(_s.value);
if (i && _arr.length === i) break;
}
} catch (err) {
_d = true;
_e = err;
} finally {
try {
if (!_n && _i["return"] != null) _i["return"]();
} finally {
if (_d) throw _e;
}
}
return _arr;
}
function _unsupportedIterableToArray(o, minLen) {
if (!o) return;
if (typeof o === "string") return _arrayLikeToArray(o, minLen);
var n = Object.prototype.toString.call(o).slice(8, -1);
if (n === "Object" && o.constructor) n = o.constructor.name;
if (n === "Map" || n === "Set") return Array.from(o);
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
}
function _arrayLikeToArray(arr, len) {
if (len == null || len > arr.length) len = arr.length;
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
return arr2;
}
function _nonIterableSpread() {
throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
function _nonIterableRest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
function _createForOfIteratorHelper(o, allowArrayLike) {
var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"];
if (!it) {
if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") {
if (it) o = it;
var i = 0;
var F = function () {};
return {
s: F,
n: function () {
if (i >= o.length) return {
done: true
};
return {
done: false,
value: o[i++]
};
},
e: function (e) {
throw e;
},
f: F
};
}
throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
var normalCompletion = true,
didErr = false,
err;
return {
s: function () {
it = it.call(o);
},
n: function () {
var step = it.next();
normalCompletion = step.done;
return step;
},
e: function (e) {
didErr = true;
err = e;
},
f: function () {
try {
if (!normalCompletion && it.return != null) it.return();
} finally {
if (didErr) throw err;
}
}
};
}
var StageContext = /*#__PURE__*/React__default["default"].createContext(null);
var throttle = function throttle(fn) {
var tick = false;
return function () {
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
if (!tick) {
requestAnimationFrame(function () {
fn.apply(void 0, args);
tick = false;
});
}
tick = true;
};
};
var ScaleMode = {
SCALE_TO_FIT: Math.min,
SCALE_TO_COVER: Math.max
};
function Stage(_ref) {
var scaleMode = _ref.scaleMode,
_ref$width = _ref.width,
width = _ref$width === void 0 ? 300 : _ref$width,
_ref$height = _ref.height,
height = _ref$height === void 0 ? 300 : _ref$height,
_ref$backgroundColor = _ref.backgroundColor,
backgroundColor = _ref$backgroundColor === void 0 ? 'transparent' : _ref$backgroundColor,
children = _ref.children;
var stageElement = React.useRef(null);
var _useState = React.useState(1),
_useState2 = _slicedToArray(_useState, 2),
scale = _useState2[0],
setScale = _useState2[1];
React.useEffect(function () {
if (typeof scaleMode !== 'function') {
return;
}
var onWindowResize = throttle(function () {
var scaleX = window.innerWidth / width;
var scaleY = window.innerHeight / height;
setScale(scaleMode(scaleX, scaleY));
});
window.addEventListener('resize', onWindowResize);
onWindowResize();
return function () {
window.removeEventListener('resize', onWindowResize);
};
}, [scaleMode, width, height]);
return /*#__PURE__*/React__default["default"].createElement(StageContext.Provider, {
value: {
width: width,
height: height,
scale: scale
}
}, /*#__PURE__*/React__default["default"].createElement("div", {
style: {
width: "".concat(width * scale, "px"),
height: "".concat(height * scale, "px"),
backgroundColor: backgroundColor,
position: 'relative',
overflow: 'hidden'
}
}, /*#__PURE__*/React__default["default"].createElement("div", {
ref: stageElement,
style: {
width: "".concat(width, "px"),
height: "".concat(height, "px"),
transformOrigin: '0 0',
transform: "scale(".concat(scale, ")")
}
}, children)));
}
var registerCustomElement = function registerCustomElement(name, constructor) {
customElements.get(name) || customElements.define(name, constructor);
};
var localCoordinatesFromEvent = function localCoordinatesFromEvent(rect, event, scale) {
var clientX = event.clientX,
clientY = event.clientY;
var x = (clientX - rect.x) / scale;
var y = (clientY - rect.y) / scale;
return {
x: x,
y: y
};
};
var createElement = function createElement(type) {
var props = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var element = document.createElement(type);
for (var _i = 0, _Object$keys = Object.keys(props); _i < _Object$keys.length; _i++) {
var key = _Object$keys[_i];
element[key] = props[key];
}
return element;
};
var eventInit = function eventInit(sourceEvent) {
var bubbles = sourceEvent.bubbles,
cancelable = sourceEvent.cancelable,
composed = sourceEvent.composed;
return {
bubbles: bubbles,
cancelable: cancelable,
composed: composed
};
};
var uIEventInit = function uIEventInit(sourceEvent) {
var detail = sourceEvent.detail,
view = sourceEvent.view,
sourceCapabilities = sourceEvent.sourceCapabilities;
return _objectSpread2(_objectSpread2({}, eventInit(sourceEvent)), {}, {
detail: detail,
view: view,
sourceCapabilities: sourceCapabilities
});
};
var mouseEventInit = function mouseEventInit(sourceEvent) {
var screenX = sourceEvent.screenX,
screenY = sourceEvent.screenY,
clientX = sourceEvent.clientX,
clientY = sourceEvent.clientY,
ctrlKey = sourceEvent.ctrlKey,
shiftKey = sourceEvent.shiftKey,
altKey = sourceEvent.altKey,
metaKey = sourceEvent.metaKey,
button = sourceEvent.button,
buttons = sourceEvent.buttons,
relatedTarget = sourceEvent.relatedTarget,
region = sourceEvent.region;
return _objectSpread2(_objectSpread2({}, uIEventInit(sourceEvent)), {}, {
screenX: screenX,
screenY: screenY,
clientX: clientX,
clientY: clientY,
ctrlKey: ctrlKey,
shiftKey: shiftKey,
altKey: altKey,
metaKey: metaKey,
button: button,
buttons: buttons,
relatedTarget: relatedTarget,
region: region
});
};
var touchEventInit = function touchEventInit(sourceEvent) {
var touches = sourceEvent.touches,
targetTouches = sourceEvent.targetTouches,
changedTouches = sourceEvent.changedTouches,
ctrlKey = sourceEvent.ctrlKey,
shiftKey = sourceEvent.shiftKey,
altKey = sourceEvent.altKey,
metaKey = sourceEvent.metaKey;
return _objectSpread2(_objectSpread2({}, uIEventInit(sourceEvent)), {}, {
touches: touches,
targetTouches: targetTouches,
changedTouches: changedTouches,
ctrlKey: ctrlKey,
shiftKey: shiftKey,
altKey: altKey,
metaKey: metaKey
});
};
var touchInit = function touchInit(sourceTouch) {
var identifier = sourceTouch.identifier,
target = sourceTouch.target,
clientX = sourceTouch.clientX,
clientY = sourceTouch.clientY,
screenX = sourceTouch.screenX,
screenY = sourceTouch.screenY,
pageX = sourceTouch.pageX,
pageY = sourceTouch.pageY,
radiusX = sourceTouch.radiusX,
radiusY = sourceTouch.radiusY,
rotationAngle = sourceTouch.rotationAngle,
force = sourceTouch.force;
return {
identifier: identifier,
target: target,
clientX: clientX,
clientY: clientY,
screenX: screenX,
screenY: screenY,
pageX: pageX,
pageY: pageY,
radiusX: radiusX,
radiusY: radiusY,
rotationAngle: rotationAngle,
force: force
};
};
var ColorIncrementer = /*#__PURE__*/function () {
function ColorIncrementer() {
var step = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 4;
_classCallCheck(this, ColorIncrementer);
this.step = step;
this.rgb = [0, 0, 0];
}
_createClass(ColorIncrementer, [{
key: "reset",
value: function reset() {
this.rgb = [0, 0, 0];
}
}, {
key: "next",
value: function next() {
for (var index = 0; index < 3; index++) {
if (this.rgb[index] + this.step < 256) {
this.rgb[index] += this.step;
return "rgb(".concat(this.rgb.join(','), ")");
} else if (index < 2) {
this.rgb[index] = 0;
}
}
throw new Error('Color incrementer overflow');
}
}]);
return ColorIncrementer;
}();
var _excluded$8 = ["children"];
var colorIncrementer = new ColorIncrementer();
var hitElementMap = new Map();
var notNullFilter = function notNullFilter(item) {
return item !== null;
};
function Layer(_ref) {
var children = _ref.children,
rest = _objectWithoutProperties(_ref, _excluded$8);
var _useContext = React.useContext(StageContext),
scale = _useContext.scale,
width = _useContext.width,
height = _useContext.height;
var hoveredElement = React.useRef(null);
var touchEntities = React.useRef({});
var canvasElement = React.useRef(null);
var hitCanvasElement = React.useRef(createElement('canvas', {
width: width,
height: height
}));
var lastSibling = React.useRef(null);
React.useEffect(function () {
lastSibling.current = canvasElement.current.parentNode.querySelector('canvas:last-of-type');
}, [canvasElement]);
var drawChildren = React.useCallback(function (ctx, children) {
var offset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {
x: 0,
y: 0,
opacity: 1,
rotation: 0,
scaleX: 1,
scaleY: 1
};
Array.from(children).sort(function (a, b) {
return a.zIndex - b.zIndex;
}).forEach(function (child) {
child.draw(ctx, offset); // Assume all children in top most layer might have mouse / touch event handlers
if (lastSibling.current && lastSibling.current === canvasElement.current) {
var _ctx = hitCanvasElement.current.getContext('2d');
var color = colorIncrementer.next();
hitElementMap.set(color, child);
child.drawHitArea(_ctx, offset, color);
}
if (child.children.length > 0) {
drawChildren(ctx, child.children, {
x: child.x + offset.x,
y: child.y + offset.y,
opacity: child.opacity * offset.opacity,
rotation: child.rotation + offset.rotation,
scaleX: child.scaleX * offset.scaleX,
scaleY: child.scaleY * offset.scaleY
});
}
});
}, [lastSibling]);
React.useEffect(function () {
var canvas = canvasElement.current;
var ctx = canvas.getContext('2d');
var hitCanvas = hitCanvasElement.current;
var hitCtx = hitCanvas.getContext('2d');
var onUpdate = throttle(function (event) {
ctx.clearRect(0, 0, canvas.width, canvas.height);
hitCtx.clearRect(0, 0, hitCanvas.width, hitCanvas.height);
colorIncrementer.reset();
drawChildren(ctx, canvasElement.current.children);
});
requestAnimationFrame(onUpdate);
canvas.addEventListener('attributeChange', onUpdate);
canvas.addEventListener('connect', onUpdate);
canvas.addEventListener('disconnect', onUpdate);
canvas.addEventListener('load', onUpdate);
return function () {
canvas.removeEventListener('attributeChange', onUpdate);
canvas.removeEventListener('connect', onUpdate);
canvas.removeEventListener('disconnect', onUpdate);
canvas.removeEventListener('load', onUpdate);
};
}, [drawChildren]);
var getEventTargetAt = function getEventTargetAt(point) {
var ctx = hitCanvasElement.current.getContext('2d');
var pixel = ctx.getImageData(point.x, point.y, 1, 1).data;
var color = "rgb(".concat(pixel[0], ",").concat(pixel[1], ",").concat(pixel[2], ")");
return hitElementMap.get(color);
};
var onTouchStart = function onTouchStart(event) {
if (event.target !== canvasElement.current) {
return;
}
var rect = event.target.getBoundingClientRect();
var changedTouches = Array.from(event.changedTouches).map(function (changedTouch) {
var point = localCoordinatesFromEvent(rect, changedTouch, scale);
var target = getEventTargetAt(point);
if (!target) {
return null;
}
var touch = new Touch(_objectSpread2(_objectSpread2({}, touchInit(changedTouch)), {}, {
target: target,
clientX: point.x,
clientY: point.y
}));
touchEntities.current[touch.identifier] = touch;
return touch;
}).filter(notNullFilter);
changedTouches.forEach(function (changedTouch) {
changedTouch.target.dispatchEvent(new TouchEvent('touchstart', _objectSpread2(_objectSpread2({}, touchEventInit(event)), {}, {
touches: Object.values(touchEntities.current),
targetTouches: Object.values(touchEntities.current).filter(function (targetTouch) {
return targetTouch.target === changedTouch.target;
}),
changedTouches: changedTouches
})));
});
};
var onTouchEvent = function onTouchEvent(event) {
if (event.target !== canvasElement.current) {
return;
}
var rect = event.target.getBoundingClientRect();
var touchMapper = function touchMapper(touch) {
if (!touchEntities.current[touch.identifier]) {
return null;
}
var point = localCoordinatesFromEvent(rect, touch, scale);
return new Touch(_objectSpread2(_objectSpread2({}, touchInit(touch)), {}, {
target: touchEntities.current[touch.identifier].target,
clientX: point.x,
clientY: point.y
}));
};
var touches = Array.from(event.touches).map(touchMapper).filter(notNullFilter);
var targetTouches = touches.filter(function (touch) {
return touch.target === event.target;
});
var changedTouches = Array.from(event.changedTouches).map(touchMapper).filter(notNullFilter); // This will fire duplicate touch `moveevents` if there are multiple touches,
// but hard to avoid...
var touchTargets = event.type === 'touchmove' ? touches : changedTouches;
touchTargets.forEach(function (touch) {
touch.target.dispatchEvent(new TouchEvent(event.type, _objectSpread2(_objectSpread2({}, touchEventInit(event)), {}, {
touches: touches,
targetTouches: targetTouches,
changedTouches: changedTouches
})));
});
if (event.type === 'touchend' || event.type === 'touchcancel') {
var _iterator = _createForOfIteratorHelper(event.changedTouches),
_step;
try {
for (_iterator.s(); !(_step = _iterator.n()).done;) {
var touch = _step.value;
delete touchEntities.current[touch.identifier];
}
} catch (err) {
_iterator.e(err);
} finally {
_iterator.f();
}
}
};
var onMouseEvent = function onMouseEvent(event) {
if (event.target !== canvasElement.current) {
return;
}
var rect = event.target.getBoundingClientRect();
var point = localCoordinatesFromEvent(rect, event, scale);
var childTarget = getEventTargetAt(point);
var eventInit = _objectSpread2(_objectSpread2({}, mouseEventInit(event)), {}, {
clientX: point.x,
clientY: point.y
}); // Handle mouse event for Layer component by calling corresponding passed
// event handler
Object.keys(rest).forEach(function (key) {
if (key.toLowerCase() === "on".concat(event.type)) {
rest[key](new MouseEvent(event.type, eventInit));
}
}); // Handle mouse events for child components
if (childTarget) {
childTarget.dispatchEvent(new MouseEvent(event.type, _objectSpread2(_objectSpread2({}, mouseEventInit(event)), {}, {
clientX: point.x,
clientY: point.y
})));
if (event.type === 'mousemove') {
if (hoveredElement.current && childTarget !== hoveredElement.current) {
hoveredElement.current.dispatchEvent(new MouseEvent('mouseout', eventInit));
}
if (hoveredElement.current !== childTarget) {
hoveredElement.current = childTarget;
hoveredElement.current.dispatchEvent(new MouseEvent('mouseover', eventInit));
}
}
} else if (event.type === 'mouseout' && hoveredElement.current) {
hoveredElement.current.dispatchEvent(new MouseEvent('mouseout', eventInit));
hoveredElement.current = null;
}
};
return /*#__PURE__*/React__default["default"].createElement("canvas", {
style: {
position: 'absolute'
},
width: width,
height: height,
ref: canvasElement,
onClick: onMouseEvent,
onMouseMove: onMouseEvent,
onMouseDown: onMouseEvent,
onMouseUp: onMouseEvent,
onDoubleClick: onMouseEvent,
onContextMenu: onMouseEvent,
onMouseOut: onMouseEvent,
onMouseOver: onMouseEvent,
onTouchStart: onTouchStart,
onTouchMove: onTouchEvent,
onTouchEnd: onTouchEvent,
onTouchCancel: onTouchEvent
}, children);
}
var AbstractShape = /*#__PURE__*/function (_HTMLElement) {
_inherits(AbstractShape, _HTMLElement);
var _super = _createSuper(AbstractShape);
function AbstractShape() {
var _this;
_classCallCheck(this, AbstractShape);
_this = _super.call(this);
_this.offset = {
x: 0,
y: 0,
rotation: 0,
opacity: 1
};
_this.pipeline = [];
_this.canvasElement = null;
return _this;
}
_createClass(AbstractShape, [{
key: "attributeChangedCallback",
value: function attributeChangedCallback(name, oldValue, newValue) {
if (this.canvasElement) {
var customEvent = new CustomEvent('attributeChange', {
bubbles: true,
detail: {
name: name,
oldValue: oldValue,
newValue: newValue
}
});
this.canvasElement.dispatchEvent(customEvent);
}
}
}, {
key: "connectedCallback",
value: function connectedCallback() {
this.canvasElement = this.closest('canvas');
if (this.canvasElement) {
var customEvent = new CustomEvent('connect', {
bubbles: true
});
this.canvasElement.dispatchEvent(customEvent);
}
}
}, {
key: "disconnectedCallback",
value: function disconnectedCallback() {
if (this.canvasElement) {
var customEvent = new CustomEvent('disconnect', {
bubbles: true
});
this.canvasElement.dispatchEvent(customEvent);
this.canvasElement = null;
}
}
}, {
key: "getTextualAttribute",
value: function getTextualAttribute(attributeName) {
var _this$getAttribute;
var defaultValue = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
return (_this$getAttribute = this.getAttribute(attributeName)) !== null && _this$getAttribute !== void 0 ? _this$getAttribute : defaultValue;
}
}, {
key: "getNumericAttribute",
value: function getNumericAttribute(attributeName) {
var _this$getAttribute2;
var defaultValue = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
return Number((_this$getAttribute2 = this.getAttribute(attributeName)) !== null && _this$getAttribute2 !== void 0 ? _this$getAttribute2 : defaultValue);
}
}, {
key: "getBooleanAttribute",
value: function getBooleanAttribute(attributeName) {
return this.hasAttribute(attributeName);
}
}, {
key: "setBooleanAttribute",
value: function setBooleanAttribute(attributeName, value) {
if (value) {
this.setAttribute(attributeName, '');
} else {
this.removeAttribute(attributeName);
}
}
}, {
key: "x",
get: function get() {
return this.getNumericAttribute('x');
},
set: function set(value) {
this.setAttribute('x', value);
}
}, {
key: "y",
get: function get() {
return this.getNumericAttribute('y');
},
set: function set(value) {
this.setAttribute('y', value);
}
}, {
key: "backgroundColor",
get: function get() {
return this.getAttribute('backgroundColor');
},
set: function set(value) {
this.setAttribute('backgroundColor', value);
}
}, {
key: "backgroundImage",
get: function get() {
return this.getAttribute('backgroundImage');
},
set: function set(value) {
this.setAttribute('backgroundImage', value);
}
}, {
key: "borderColor",
get: function get() {
return this.getAttribute('borderColor');
},
set: function set(value) {
this.setAttribute('borderColor', value);
}
}, {
key: "borderWidth",
get: function get() {
return this.getNumericAttribute('borderWidth', 1);
},
set: function set(value) {
this.setAttribute('borderWidth', value);
}
}, {
key: "opacity",
get: function get() {
return this.getNumericAttribute('opacity', 1);
},
set: function set(value) {
this.setAttribute('opacity', value);
}
}, {
key: "originX",
get: function get() {
return this.getNumericAttribute('originX', 0.5);
},
set: function set(value) {
this.setAttribute('originX', value);
}
}, {
key: "originY",
get: function get() {
return this.getNumericAttribute('originY', 0.5);
},
set: function set(value) {
this.setAttribute('originY', value);
}
}, {
key: "rotation",
get: function get() {
return this.getNumericAttribute('rotation');
},
set: function set(value) {
this.setAttribute('rotation', value);
}
}, {
key: "scaleX",
get: function get() {
return this.getNumericAttribute('scaleX', 1);
},
set: function set(value) {
this.setAttribute('scaleX', value);
}
}, {
key: "scaleY",
get: function get() {
return this.getNumericAttribute('scaleY', 1);
},
set: function set(value) {
this.setAttribute('scaleY', value);
}
}, {
key: "shadowColor",
get: function get() {
return this.getAttribute('shadowColor');
},
set: function set(value) {
this.setAttribute('shadowColor', value);
}
}, {
key: "shadowBlur",
get: function get() {
return this.getNumericAttribute('shadowBlur');
},
set: function set(value) {
this.setAttribute('shadowBlur', value);
}
}, {
key: "shadowOffsetX",
get: function get() {
return this.getNumericAttribute('shadowOffsetX');
},
set: function set(value) {
this.setAttribute('shadowOffsetX', value);
}
}, {
key: "shadowOffsetY",
get: function get() {
return this.getNumericAttribute('shadowOffsetY');
},
set: function set(value) {
this.setAttribute('shadowOffsetY', value);
}
}, {
key: "borderDash",
get: function get() {
var _this$getAttribute$sp, _this$getAttribute3;
return (_this$getAttribute$sp = (_this$getAttribute3 = this.getAttribute('borderDash')) === null || _this$getAttribute3 === void 0 ? void 0 : _this$getAttribute3.split(',').map(function (item) {
return Number(item);
})) !== null && _this$getAttribute$sp !== void 0 ? _this$getAttribute$sp : [];
},
set: function set(value) {
this.setAttribute('borderDash', value);
}
}, {
key: "zIndex",
get: function get() {
return this.getNumericAttribute('zIndex');
},
set: function set(value) {
this.setAttribute('zIndex', value);
}
}, {
key: "getBoundingBox",
value: function getBoundingBox() {
throw new Error('Method must be implemented in sub class');
}
}, {
key: "getTranslationCenter",
value: function getTranslationCenter(offset) {
throw new Error('Method must be implemented in sub class');
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
throw new Error('Method must be implemented in sub class');
}
}, {
key: "draw",
value: function draw(ctx, offset) {
throw new Error('Method must be implemented in sub class');
}
}, {
key: "drawPipeline",
value: function drawPipeline(ctx, offset) {
ctx.save();
while ((_this$pipeline$shift = this.pipeline.shift()) !== null && _this$pipeline$shift !== void 0 && _this$pipeline$shift.call(this, ctx, offset)) {
var _this$pipeline$shift;
}
this.pipeline = [];
ctx.restore();
}
}], [{
key: "observedAttributes",
get: function get() {
return ['x', 'y', 'backgroundcolor', 'backgroundimage', 'bordercolor', 'borderwidth', 'opacity', 'originx', 'originy', 'rotation', 'scalex', 'scaley', 'shadowcolor', 'shadowblur', 'shadowoffsetx', 'shadowoffsety', 'borderdash', 'zindex'];
}
}]);
return AbstractShape;
}( /*#__PURE__*/_wrapNativeSuper(HTMLElement));
var traceRectangle = function traceRectangle(rectangle) {
return function (ctx, offset) {
var _rectangle$getBoundin = rectangle.getBoundingBox(offset),
left = _rectangle$getBoundin.left,
top = _rectangle$getBoundin.top,
right = _rectangle$getBoundin.right,
bottom = _rectangle$getBoundin.bottom;
ctx.beginPath();
ctx.rect(left + rectangle.borderWidth / 2, top + rectangle.borderWidth / 2, right - left - rectangle.borderWidth, bottom - top - rectangle.borderWidth);
return true;
};
};
var traceRoundedRectangle = function traceRoundedRectangle(roundedRectangle) {
return function (ctx, offset) {
var _roundedRectangle$get = roundedRectangle.getBoundingBox(offset),
left = _roundedRectangle$get.left,
top = _roundedRectangle$get.top,
right = _roundedRectangle$get.right,
bottom = _roundedRectangle$get.bottom;
var radius = roundedRectangle.radius;
var x = left + roundedRectangle.borderWidth / 2;
var y = top + roundedRectangle.borderWidth / 2;
var width = right - left - roundedRectangle.borderWidth;
var height = bottom - top - roundedRectangle.borderWidth;
ctx.beginPath();
ctx.moveTo(x + radius, y);
ctx.arcTo(x + width, y, x + width, y + height, radius);
ctx.arcTo(x + width, y + height, x, y + height, radius);
ctx.arcTo(x, y + height, x, y, radius);
ctx.arcTo(x, y, x + width, y, radius);
ctx.closePath();
return true;
};
};
var rotateAndScale = function rotateAndScale(shape) {
return function (ctx, offset) {
var scaleX = shape.scaleX * offset.scaleX;
var scaleY = shape.scaleY * offset.scaleY;
var rotation = shape.rotation + offset.rotation;
if (scaleX !== 1 || scaleY !== 1 || rotation !== 0) {
var translate = shape.getTranslationCenter(offset);
ctx.translate(translate.x, translate.y);
ctx.rotate(rotation);
ctx.scale(scaleX, scaleY);
ctx.translate(-translate.x, -translate.y);
}
return true;
};
};
var shade = function shade(shape) {
return function (ctx, offset) {
var globalAlpha = shape.opacity * offset.opacity;
if (globalAlpha !== 1) {
ctx.globalAlpha = globalAlpha;
}
if (shape.shadowColor) {
ctx.shadowColor = shape.shadowColor;
}
if (shape.shadowBlur !== 0) {
ctx.shadowBlur = shape.shadowBlur;
}
if (shape.shadowOffsetX !== 0) {
ctx.shadowOffsetX = shape.shadowOffsetX;
}
if (shape.shadowOffsetY !== 0) {
ctx.shadowOffsetY = shape.shadowOffsetY;
}
return true;
};
};
var fillAndStroke = function fillAndStroke(shape) {
return function (ctx, offset) {
var _shape$borderDash;
if (shape.backgroundColor) {
ctx.fillStyle = shape.backgroundColor;
ctx.fill();
}
if ((_shape$borderDash = shape.borderDash) !== null && _shape$borderDash !== void 0 && _shape$borderDash.length) {
ctx.setLineDash(shape.borderDash);
}
if (shape.borderColor && shape.borderWidth) {
ctx.strokeStyle = shape.borderColor;
ctx.lineWidth = shape.borderWidth;
ctx.stroke();
}
return true;
};
};
var imageCache = {};
var drawBackgroundImage = function drawBackgroundImage(shape) {
return function (ctx, offset) {
if (!shape.image) {
return true;
}
var _shape$getBoundingBox = shape.getBoundingBox(offset),
left = _shape$getBoundingBox.left,
right = _shape$getBoundingBox.right,
top = _shape$getBoundingBox.top,
bottom = _shape$getBoundingBox.bottom;
var width = right - left;
var height = bottom - top;
ctx.drawImage(shape.image, left, top, width, height);
return true;
};
};
var drawImage = function drawImage(image) {
return function (ctx, offset) {
var _image$getBoundingBox = image.getBoundingBox(offset),
left = _image$getBoundingBox.left,
top = _image$getBoundingBox.top;
ctx.drawImage(image.image, left + image.borderWidth, top + image.borderWidth, image.width - image.borderWidth * 2, image.height - image.borderWidth * 2);
return true;
};
};
var clipBackgroundImage = function clipBackgroundImage(shape) {
return function (ctx, offset) {
if (!shape.image) {
return true;
}
ctx.clip();
return true;
};
};
var loadImage = function loadImage(shape, ctx, src) {
if (!src) {
if (shape.image) {
delete shape.image;
}
return true;
}
shape.image = imageCache[src];
if (!shape.image) {
shape.image = new Image();
shape.image.onload = function () {
var customEvent = new CustomEvent('load', {
bubbles: true
});
shape.dispatchEvent(customEvent);
};
shape.image.src = src;
imageCache[src] = shape.image;
}
return shape.image.complete;
};
var loadSrc = function loadSrc(shape) {
return function (ctx, offset) {
var src = shape.src;
return loadImage(shape, ctx, src);
};
};
var loadBackgroundImage = function loadBackgroundImage(shape) {
return function (ctx, offset) {
var backgroundImage = shape.backgroundImage;
return loadImage(shape, ctx, backgroundImage);
};
};
var _excluded$7 = ["children"];
var CanvasRectangle = /*#__PURE__*/function (_AbstractShape) {
_inherits(CanvasRectangle, _AbstractShape);
var _super = _createSuper(CanvasRectangle);
function CanvasRectangle() {
_classCallCheck(this, CanvasRectangle);
return _super.apply(this, arguments);
}
_createClass(CanvasRectangle, [{
key: "width",
get: function get() {
return this.getNumericAttribute('width');
},
set: function set(value) {
this.setAttribute('width', value);
}
}, {
key: "height",
get: function get() {
return this.getNumericAttribute('height');
},
set: function set(value) {
this.setAttribute('height', value);
}
}, {
key: "getBoundingBox",
value: function getBoundingBox(offset) {
var left = this.x + offset.x - this.width * this.originX;
var top = this.y + offset.y - this.height * this.originY;
var right = left + this.width;
var bottom = top + this.height;
return {
left: left,
right: right,
top: top,
bottom: bottom
};
}
}, {
key: "getTranslationCenter",
value: function getTranslationCenter(offset) {
var _this$getBoundingBox = this.getBoundingBox(offset),
top = _this$getBoundingBox.top,
left = _this$getBoundingBox.left;
var x = left + this.width * this.originX;
var y = top + this.height * this.originY;
return {
x: x,
y: y
};
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
backgroundImage = this.backgroundImage,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceRectangle(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || backgroundImage ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(loadBackgroundImage(this));
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceRectangle(this));
this.pipeline.push(shade(this));
this.pipeline.push(fillAndStroke(this));
this.pipeline.push(clipBackgroundImage(this));
this.pipeline.push(drawBackgroundImage(this));
this.drawPipeline(ctx, offset);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(AbstractShape.observedAttributes), ['width', 'height']);
}
}]);
return CanvasRectangle;
}(AbstractShape);
registerCustomElement('canvas-rectangle', CanvasRectangle);
var rectangle = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$7);
return /*#__PURE__*/React__default["default"].createElement("canvas-rectangle", _extends({}, props, {
ref: ref
}), children);
});
var tracePolygon = function tracePolygon(polygon) {
return function (ctx, offset) {
var _polygon$getBoundingB = polygon.getBoundingBox(offset),
left = _polygon$getBoundingB.left,
top = _polygon$getBoundingB.top;
var x = left + polygon.radius;
var y = top + polygon.radius;
ctx.beginPath();
ctx.moveTo(x + polygon.radius - polygon.borderWidth / 2, y);
for (var side = 0; side < polygon.sides; side++) {
ctx.lineTo(x + (polygon.radius - polygon.borderWidth / 2) * Math.cos(side * 2 * Math.PI / polygon.sides), y + (polygon.radius - polygon.borderWidth / 2) * Math.sin(side * 2 * Math.PI / polygon.sides));
}
ctx.closePath();
return true;
};
};
var _excluded$6 = ["children"];
var CanvasImage = /*#__PURE__*/function (_CanvasRectangle) {
_inherits(CanvasImage, _CanvasRectangle);
var _super = _createSuper(CanvasImage);
function CanvasImage() {
_classCallCheck(this, CanvasImage);
return _super.call(this);
}
_createClass(CanvasImage, [{
key: "src",
get: function get() {
return this.getAttribute('src');
},
set: function set(value) {
this.setAttribute('src', value);
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
src = this.src,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceRectangle(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || src ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(loadSrc(this));
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceRectangle(this));
this.pipeline.push(shade(this));
this.pipeline.push(fillAndStroke(this));
this.pipeline.push(drawImage(this));
this.drawPipeline(ctx, offset);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(CanvasRectangle.observedAttributes), ['src']);
}
}]);
return CanvasImage;
}(CanvasRectangle);
registerCustomElement('canvas-image', CanvasImage);
var image = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$6);
return /*#__PURE__*/React__default["default"].createElement("canvas-image", _extends({}, props, {
ref: ref
}), children);
});
var traceArc = function traceArc(arc) {
return function (ctx, offset) {
var _arc$startAngle, _arc$endAngle, _arc$counterclockwise;
var _arc$getBoundingBox = arc.getBoundingBox(offset),
left = _arc$getBoundingBox.left,
top = _arc$getBoundingBox.top;
ctx.beginPath();
ctx.arc(left + arc.radius, top + arc.radius, arc.radius - arc.borderWidth / 2, ((_arc$startAngle = arc.startAngle) !== null && _arc$startAngle !== void 0 ? _arc$startAngle : 0) - Math.PI / 2, ((_arc$endAngle = arc.endAngle) !== null && _arc$endAngle !== void 0 ? _arc$endAngle : Math.PI * 2) - Math.PI / 2, (_arc$counterclockwise = arc.counterclockwise) !== null && _arc$counterclockwise !== void 0 ? _arc$counterclockwise : false);
return true;
};
};
var _excluded$5 = ["children"];
var CanvasCircle = /*#__PURE__*/function (_AbstractShape) {
_inherits(CanvasCircle, _AbstractShape);
var _super = _createSuper(CanvasCircle);
function CanvasCircle() {
_classCallCheck(this, CanvasCircle);
return _super.apply(this, arguments);
}
_createClass(CanvasCircle, [{
key: "radius",
get: function get() {
return this.getNumericAttribute('radius');
},
set: function set(value) {
this.setAttribute('radius', value);
}
}, {
key: "getBoundingBox",
value: function getBoundingBox(offset) {
var left = this.x + offset.x - this.radius * 2 * this.originX;
var top = this.y + offset.y - this.radius * 2 * this.originY;
var right = left + this.radius * 2;
var bottom = top + this.radius * 2;
return {
left: left,
right: right,
top: top,
bottom: bottom
};
}
}, {
key: "getTranslationCenter",
value: function getTranslationCenter(offset) {
var _this$getBoundingBox = this.getBoundingBox(offset),
top = _this$getBoundingBox.top,
left = _this$getBoundingBox.left;
var x = left + this.radius * this.originX * 2;
var y = top + this.radius * this.originY * 2;
return {
x: x,
y: y
};
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
backgroundImage = this.backgroundImage,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceArc(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || backgroundImage ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(loadBackgroundImage(this));
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceArc(this));
this.pipeline.push(shade(this));
this.pipeline.push(fillAndStroke(this));
this.pipeline.push(clipBackgroundImage(this));
this.pipeline.push(drawBackgroundImage(this));
this.drawPipeline(ctx, offset);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(AbstractShape.observedAttributes), ['radius']);
}
}]);
return CanvasCircle;
}(AbstractShape);
registerCustomElement('canvas-circle', CanvasCircle);
var circle = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$5);
return /*#__PURE__*/React__default["default"].createElement("canvas-circle", _extends({}, props, {
ref: ref
}), children);
});
var _excluded$4 = ["children"];
var CanvasArc = /*#__PURE__*/function (_CanvasCircle) {
_inherits(CanvasArc, _CanvasCircle);
var _super = _createSuper(CanvasArc);
function CanvasArc() {
_classCallCheck(this, CanvasArc);
return _super.apply(this, arguments);
}
_createClass(CanvasArc, [{
key: "startAngle",
get: function get() {
return this.getNumericAttribute('startAngle');
},
set: function set(value) {
this.setAttribute('startAngle', value);
}
}, {
key: "endAngle",
get: function get() {
return this.getNumericAttribute('endAngle');
},
set: function set(value) {
this.setAttribute('endAngle', value);
}
}, {
key: "counterclockwise",
get: function get() {
return this.getBooleanAttribute('counterclockwise');
},
set: function set(value) {
this.setBooleanAttribute('counterclockwise', value);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(AbstractShape.observedAttributes), ['radius', 'startangle', 'endangle', 'counterclockwise']);
}
}]);
return CanvasArc;
}(CanvasCircle);
registerCustomElement('canvas-arc', CanvasArc);
var arc = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$4);
return /*#__PURE__*/React__default["default"].createElement("canvas-arc", _extends({}, props, {
ref: ref
}), children);
});
var fillAndStrokeText = function fillAndStrokeText(text) {
return function (ctx, offset) {
ctx.font = "".concat(text.fontStyle, " ").concat(text.fontWeight, " ").concat(text.fontSize, "px ").concat(text.fontFamily);
ctx.textBaseline = text.baseline;
ctx.textAlign = text.align;
var _text$cropAndMeasure = text.cropAndMeasure(),
textContent = _text$cropAndMeasure.textContent;
var x = text.x + offset.x;
var y = text.y + offset.y;
if (text.color) {
ctx.fillStyle = text.color;
ctx.fillText(textContent, x - text.borderWidth / 2, y - text.borderWidth / 2);
}
if (text.borderColor && text.borderWidth) {
ctx.strokeStyle = text.borderColor;
ctx.lineWidth = text.borderWidth;
ctx.strokeText(textContent, x - text.borderWidth / 2, y - text.borderWidth / 2);
}
};
};
var traceTextBox = function traceTextBox(text) {
return function (ctx, offset) {
var _text$getBoundingBox = text.getBoundingBox(offset),
left = _text$getBoundingBox.left,
top = _text$getBoundingBox.top,
right = _text$getBoundingBox.right,
bottom = _text$getBoundingBox.bottom;
ctx.beginPath();
ctx.rect(left - text.borderWidth / 2, top - text.borderWidth / 2, right - left, bottom - top);
return true;
};
};
function memoize(fn, cache) {
return function () {
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
var cacheKey = args.join(',');
var result = cache.read(cacheKey);
if (result === undefined) {
result = fn.apply(void 0, args);
cache.write(cacheKey, result);
}
return result;
};
}
// From https://medium.com/dsinjs/implementing-lru-cache-in-javascript-94ba6755cda9
var Node = function Node(key, value) {
var next = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : null;
var prev = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : null;
_classCallCheck(this, Node);
this.key = key;
this.value = value;
this.next = next;
this.prev = prev;
};
var Lru = /*#__PURE__*/function (_Symbol$iterator) {
//set default limit of 10 if limit is not passed.
function Lru() {
var limit = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 10;
_classCallCheck(this, Lru);
this.size = 0;
this.limit = limit;
this.head = null;
this.tail = null;
this.cacheMap = {};
}
_createClass(Lru, [{
key: "write",
value: function write(key, value) {
var existingNode = this.cacheMap[key];
if (existingNode) {
this.detach(existingNode);
this.size--;
} else if (this.size === this.limit) {
delete this.cacheMap[this.tail.key];
this.detach(this.tail);
this.size--;
} // Write to head of LinkedList
if (!this.head) {
this.head = this.tail = new Node(key, value);
} else {
var node = new Node(key, value, this.head);
this.head.prev = node;
this.head = node;
} // update cacheMap with LinkedList key and Node reference
this.cacheMap[key] = this.head;
this.size++;
}
}, {
key: "read",
value: function read(key) {
var existingNode = this.cacheMap[key];
if (existingNode) {
var value = existingNode.value; // Make the node as new Head of LinkedList if not already
if (this.head !== existingNode) {
// write will automatically remove the node from it's position and make it a new head i.e most used
this.write(key, value);
}
return value;
}
}
}, {
key: "detach",
value: function detach(node) {
if (node.prev !== null) {
node.prev.next = node.next;
} else {
this.head = node.next;
}
if (node.next !== null) {
node.next.prev = node.prev;
} else {
this.tail = node.prev;
}
}
}, {
key: "clear",
value: function clear() {
this.head = null;
this.tail = null;
this.size = 0;
this.cacheMap = {};
} // Invokes the callback function with every node of the chain and the index of the node.
}, {
key: "forEach",
value: function forEach(fn) {
var node = this.head;
var counter = 0;
while (node) {
fn(node, counter);
node = node.next;
counter++;
}
} // To iterate over LRU with a 'for...of' loop
}, {
key: _Symbol$iterator,
value:
/*#__PURE__*/
regeneratorRuntime.mark(function value() {
var node;
return regeneratorRuntime.wrap(function value$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
node = this.head;
case 1:
if (!node) {
_context.next = 7;
break;
}
_context.next = 4;
return node;
case 4:
node = node.next;
_context.next = 1;
break;
case 7:
case "end":
return _context.stop();
}
}
}, value, this);
})
}]);
return Lru;
}(Symbol.iterator);
var measureText = memoize(function (style, weight, size, family, baseline, align, text) {
var canvas = document.createElement('canvas');
var ctx = canvas.getContext('2d');
ctx.font = "".concat(style, " ").concat(weight, " ").concat(size, "px ").concat(family);
ctx.textBaseline = baseline;
ctx.textAlign = align;
return ctx.measureText(text);
}, new Lru(50));
var cropEnd = function cropEnd(text) {
var ellipses = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '…';
var main = text.replace(new RegExp("".concat(ellipses, "$")), '');
if (main === '') {
return main;
}
var cropped = main.slice(0, -1);
return cropped + ellipses;
};
var _excluded$3 = ["children"];
var CanvasLabel = /*#__PURE__*/function (_AbstractShape) {
_inherits(CanvasLabel, _AbstractShape);
var _super = _createSuper(CanvasLabel);
function CanvasLabel() {
_classCallCheck(this, CanvasLabel);
return _super.apply(this, arguments);
}
_createClass(CanvasLabel, [{
key: "textContent",
get: function get() {
return this.getTextualAttribute('textContent', '');
},
set: function set(value) {
this.setAttribute('textContent', value);
}
}, {
key: "fontSize",
get: function get() {
return this.getNumericAttribute('fontSize', 10);
},
set: function set(value) {
this.setAttribute('fontSize', value);
}
}, {
key: "fontFamily",
get: function get() {
return this.getTextualAttribute('fontFamily', 'sans-serif');
},
set: function set(value) {
this.setAttribute('fontFamily', value);
}
}, {
key: "fontStyle",
get: function get() {
return this.getTextualAttribute('fontStyle', 'normal');
},
set: function set(value) {
this.setAttribute('fontStyle', value);
}
}, {
key: "fontWeight",
get: function get() {
return this.getTextualAttribute('fontWeight', 'normal');
},
set: function set(value) {
this.setAttribute('fontWeight', value);
}
}, {
key: "color",
get: function get() {
return this.getAttribute('color');
},
set: function set(value) {
this.setAttribute('color', value);
}
}, {
key: "baseline",
get: function get() {
return this.getTextualAttribute('baseline', 'alphabetic');
},
set: function set(value) {
this.setAttribute('baseline', value);
}
}, {
key: "align",
get: function get() {
return this.getTextualAttribute('align', 'start');
},
set: function set(value) {
this.setAttribute('align', value);
}
}, {
key: "maxWidth",
get: function get() {
return this.getNumericAttribute('maxWidth', Infinity);
},
set: function set(value) {
this.setAttribute('maxWidth', value);
}
}, {
key: "width",
get: function get() {
var _this$cropAndMeasure = this.cropAndMeasure(),
width = _this$cropAndMeasure.width;
return width;
}
}, {
key: "height",
get: function get() {
var _this$cropAndMeasure2 = this.cropAndMeasure(),
height = _this$cropAndMeasure2.height;
return height;
}
}, {
key: "getTextMetrics",
value: function getTextMetrics(text) {
return measureText(this.fontStyle, this.fontWeight, this.fontSize, this.fontFamily, this.baseline, this.align, text);
}
}, {
key: "cropAndMeasure",
value: function cropAndMeasure() {
var textContent = this.textContent;
var textMetrics = this.getTextMetrics(textContent);
var width = textMetrics.actualBoundingBoxLeft + textMetrics.actualBoundingBoxRight;
while (textContent !== '' && width > this.maxWidth) {
textContent = cropEnd(textContent);
textMetrics = this.getTextMetrics(textContent);
width = textMetrics.actualBoundingBoxLeft + textMetrics.actualBoundingBoxRight;
}
var height = textMetrics.actualBoundingBoxAscent + textMetrics.actualBoundingBoxDescent;
return {
textContent: textContent,
height: height,
width: width,
textMetrics: textMetrics
};
}
}, {
key: "getBoundingBox",
value: function getBoundingBox(offset) {
var _this$cropAndMeasure3 = this.cropAndMeasure(),
textMetrics = _this$cropAndMeasure3.textMetrics,
width = _this$cropAndMeasure3.width,
height = _this$cropAndMeasure3.height;
var left = this.x + offset.x - textMetrics.actualBoundingBoxLeft;
var right = left + width;
var top = this.y + offset.y - textMetrics.actualBoundingBoxAscent;
var bottom = top + height;
return {
left: left,
right: right,
top: top,
bottom: bottom
};
}
}, {
key: "getTranslationCenter",
value: function getTranslationCenter(offset) {
var x = this.x + offset.x;
var y = this.y + offset.y;
return {
x: x,
y: y
};
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
backgroundImage = this.backgroundImage,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceTextBox(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || backgroundImage ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(shade(this));
this.pipeline.push(traceTextBox(this));
this.pipeline.push(fillAndStroke({
backgroundColor: this.backgroundColor,
borderColor: this.backgroundColor,
borderWidth: this.borderWidth
}));
this.pipeline.push(fillAndStrokeText(this));
this.drawPipeline(ctx, offset);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(AbstractShape.observedAttributes), ['color', 'fontsize', 'fontfamily', 'fontstyle', 'fontweight', 'baseline', 'align', 'maxwidth', 'textcontent']);
}
}]);
return CanvasLabel;
}(AbstractShape);
registerCustomElement('canvas-label', CanvasLabel);
var label = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$3);
return /*#__PURE__*/React__default["default"].createElement("canvas-label", _extends({}, props, {
ref: ref
}), children);
});
var _excluded$2 = ["children"];
var CanvasRoundedRectangle = /*#__PURE__*/function (_CanvasRectangle) {
_inherits(CanvasRoundedRectangle, _CanvasRectangle);
var _super = _createSuper(CanvasRoundedRectangle);
function CanvasRoundedRectangle() {
_classCallCheck(this, CanvasRoundedRectangle);
return _super.apply(this, arguments);
}
_createClass(CanvasRoundedRectangle, [{
key: "radius",
get: function get() {
return this.getNumericAttribute('radius');
},
set: function set(value) {
this.setAttribute('radius', value);
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
backgroundImage = this.backgroundImage,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceRoundedRectangle(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || backgroundImage ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(loadBackgroundImage(this));
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceRoundedRectangle(this));
this.pipeline.push(shade(this));
this.pipeline.push(fillAndStroke(this));
this.pipeline.push(clipBackgroundImage(this));
this.pipeline.push(drawBackgroundImage(this));
this.drawPipeline(ctx, offset);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(AbstractShape.observedAttributes), ['radius']);
}
}]);
return CanvasRoundedRectangle;
}(CanvasRectangle);
registerCustomElement('canvas-rounded-rectangle', CanvasRoundedRectangle);
var roundedRectangle = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$2);
return /*#__PURE__*/React__default["default"].createElement("canvas-rounded-rectangle", _extends({}, props, {
ref: ref
}), children);
});
var traceSector = function traceSector(sector) {
return function (ctx, offset) {
var _sector$startAngle, _sector$endAngle, _sector$counterclockw;
var _sector$getBoundingBo = sector.getBoundingBox(offset),
left = _sector$getBoundingBo.left,
top = _sector$getBoundingBo.top;
ctx.beginPath();
ctx.moveTo(left + sector.radius, top + sector.radius);
ctx.arc(left + sector.radius, top + sector.radius, sector.radius - sector.borderWidth / 2, ((_sector$startAngle = sector.startAngle) !== null && _sector$startAngle !== void 0 ? _sector$startAngle : 0) - Math.PI / 2, ((_sector$endAngle = sector.endAngle) !== null && _sector$endAngle !== void 0 ? _sector$endAngle : Math.PI * 2) - Math.PI / 2, (_sector$counterclockw = sector.counterclockwise) !== null && _sector$counterclockw !== void 0 ? _sector$counterclockw : false);
ctx.closePath();
return true;
};
};
var _excluded$1 = ["children"];
var CanvasSector = /*#__PURE__*/function (_CanvasArc) {
_inherits(CanvasSector, _CanvasArc);
var _super = _createSuper(CanvasSector);
function CanvasSector() {
_classCallCheck(this, CanvasSector);
return _super.apply(this, arguments);
}
_createClass(CanvasSector, [{
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
backgroundImage = this.backgroundImage,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceSector(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || backgroundImage ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(loadBackgroundImage(this));
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(traceSector(this));
this.pipeline.push(shade(this));
this.pipeline.push(fillAndStroke(this));
this.pipeline.push(clipBackgroundImage(this));
this.pipeline.push(drawBackgroundImage(this));
this.drawPipeline(ctx, offset);
}
}]);
return CanvasSector;
}(CanvasArc);
registerCustomElement('canvas-sector', CanvasSector);
var sector = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded$1);
return /*#__PURE__*/React__default["default"].createElement("canvas-sector", _extends({}, props, {
ref: ref
}), children);
});
var _excluded = ["children"];
var CanvasPolygon = /*#__PURE__*/function (_CanvasCircle) {
_inherits(CanvasPolygon, _CanvasCircle);
var _super = _createSuper(CanvasPolygon);
function CanvasPolygon() {
_classCallCheck(this, CanvasPolygon);
return _super.apply(this, arguments);
}
_createClass(CanvasPolygon, [{
key: "sides",
get: function get() {
return this.getNumericAttribute('sides');
},
set: function set(value) {
this.setAttribute('sides', value);
}
}, {
key: "drawHitArea",
value: function drawHitArea(ctx, offset, color) {
var backgroundColor = this.backgroundColor,
backgroundImage = this.backgroundImage,
borderColor = this.borderColor,
borderWidth = this.borderWidth;
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(tracePolygon(this));
this.pipeline.push(fillAndStroke({
backgroundColor: backgroundColor || backgroundImage ? color : undefined,
borderColor: borderColor ? color : undefined,
borderWidth: borderWidth
}));
this.drawPipeline(ctx, offset);
}
}, {
key: "draw",
value: function draw(ctx, offset) {
this.pipeline.push(loadBackgroundImage(this));
this.pipeline.push(rotateAndScale(this));
this.pipeline.push(tracePolygon(this));
this.pipeline.push(shade(this));
this.pipeline.push(fillAndStroke(this));
this.pipeline.push(clipBackgroundImage(this));
this.pipeline.push(drawBackgroundImage(this));
this.drawPipeline(ctx, offset);
}
}], [{
key: "observedAttributes",
get: function get() {
return [].concat(_toConsumableArray(CanvasCircle.observedAttributes), ['sides']);
}
}]);
return CanvasPolygon;
}(CanvasCircle);
registerCustomElement('canvas-polygon', CanvasPolygon);
var polygon = /*#__PURE__*/React__default["default"].forwardRef(function (_ref, ref) {
var children = _ref.children,
props = _objectWithoutProperties(_ref, _excluded);
return /*#__PURE__*/React__default["default"].createElement("canvas-polygon", _extends({}, props, {
ref: ref
}), children);
});
exports.Arc = arc;
exports.Circle = circle;
exports.Image = image;
exports.Label = label;
exports.Layer = Layer;
exports.Polygon = polygon;
exports.Rectangle = rectangle;
exports.RoundedRectangle = roundedRectangle;
exports.ScaleMode = ScaleMode;
exports.Sector = sector;
exports.Stage = Stage;
|
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.config;
import com.hazelcast.config.Config;
import com.hazelcast.core.HazelcastException;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.net.URL;
/**
* A support class for the {@link XmlClientConfigBuilder} to locate the client
* xml configuration.
*/
public class XmlClientConfigLocator {
private static final ILogger LOGGER = Logger.getLogger(XmlClientConfigLocator.class);
private InputStream in;
/**
* Constructs a XmlClientConfigBuilder.
*
* @throws com.hazelcast.core.HazelcastException if the client XML config is not located.
*/
public XmlClientConfigLocator() {
try {
if (loadFromSystemProperty()) {
return;
}
if (loadFromWorkingDirectory()) {
return;
}
if (loadClientHazelcastXmlFromClasspath()) {
return;
}
loadDefaultConfigurationFromClasspath();
} catch (final RuntimeException e) {
throw new HazelcastException("Failed to load ClientConfig", e);
}
}
public InputStream getIn() {
return in;
}
private void loadDefaultConfigurationFromClasspath() {
LOGGER.info("Loading 'hazelcast-client-default.xml' from classpath.");
in = Config.class.getClassLoader().getResourceAsStream("hazelcast-client-default.xml");
if (in == null) {
throw new HazelcastException("Could not load 'hazelcast-client-default.xml' from classpath");
}
}
private boolean loadClientHazelcastXmlFromClasspath() {
URL url = Config.class.getClassLoader().getResource("hazelcast-client.xml");
if (url == null) {
LOGGER.finest("Could not find 'hazelcast-client.xml' in classpath.");
return false;
}
LOGGER.info("Loading 'hazelcast-client.xml' from classpath.");
in = Config.class.getClassLoader().getResourceAsStream("hazelcast-client.xml");
if (in == null) {
throw new HazelcastException("Could not load 'hazelcast-client.xml' from classpath");
}
return true;
}
private boolean loadFromWorkingDirectory() {
File file = new File("hazelcast-client.xml");
if (!file.exists()) {
LOGGER.finest("Could not find 'hazelcast-client.xml' in working directory.");
return false;
}
LOGGER.info("Loading 'hazelcast-client.xml' from working directory.");
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new HazelcastException("Failed to open file: " + file.getAbsolutePath(), e);
}
return true;
}
private boolean loadFromSystemProperty() {
String configSystemProperty = System.getProperty("hazelcast.client.config");
if (configSystemProperty == null) {
LOGGER.finest("Could not 'hazelcast.client.config' System property");
return false;
}
LOGGER.info("Loading configuration " + configSystemProperty + " from System property 'hazelcast.client.config'");
if (configSystemProperty.startsWith("classpath:")) {
loadSystemPropertyClassPathResource(configSystemProperty);
} else {
loadSystemPropertyFileResource(configSystemProperty);
}
return true;
}
private void loadSystemPropertyFileResource(String configSystemProperty) {
//it is a file.
File configurationFile = new File(configSystemProperty);
LOGGER.info("Using configuration file at " + configurationFile.getAbsolutePath());
if (!configurationFile.exists()) {
String msg = "Config file at '" + configurationFile.getAbsolutePath() + "' doesn't exist.";
throw new HazelcastException(msg);
}
try {
in = new FileInputStream(configurationFile);
} catch (FileNotFoundException e) {
throw new HazelcastException("Failed to open file: " + configurationFile.getAbsolutePath(), e);
}
}
private void loadSystemPropertyClassPathResource(String configSystemProperty) {
//it is a explicit configured classpath resource.
String resource = configSystemProperty.substring("classpath:".length());
LOGGER.info("Using classpath resource at " + resource);
if (resource.isEmpty()) {
throw new HazelcastException("classpath resource can't be empty");
}
in = Config.class.getClassLoader().getResourceAsStream(resource);
if (in == null) {
throw new HazelcastException("Could not load classpath resource: " + resource);
}
}
}
|
package com.goldencarp.lingqianbao.view.custom.visualkeyboard;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.goldencarp.lingqianbao.R;
import java.util.List;
/**
* 支付键盘的适配器
* Created by ayuhani on 2017/6/23.
*/
public class KeyboardAdapter extends RecyclerView.Adapter<KeyboardAdapter.KeyboardHolder> {
private Context context;
private List<String> datas;
private OnKeyboardClickListener listener;
public KeyboardAdapter(Context context, List<String> datas) {
this.context = context;
this.datas = datas;
}
@Override
public KeyboardHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(context).inflate(R.layout.item_key_board, parent, false);
KeyboardHolder holder = new KeyboardHolder(view);
setListener(holder);
return holder;
}
private void setListener(final KeyboardHolder holder) {
holder.tvKey.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (listener != null) {
listener.onKeyClick(view, holder, holder.getAdapterPosition());
}
}
});
holder.rlDel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (listener != null) {
listener.onDeleteClick(view, holder, holder.getAdapterPosition());
}
}
});
}
@Override
public void onBindViewHolder(KeyboardHolder holder, int position) {
if (position == 9) {
holder.tvKey.setText(datas.get(position));
holder.tvKey.setBackgroundResource(R.drawable.selector_item_del);
} else if (position == 11) {
holder.rlDel.setVisibility(View.VISIBLE);
holder.tvKey.setVisibility(View.GONE);
} else {
holder.tvKey.setText(datas.get(position));
}
}
@Override
public int getItemCount() {
return datas == null ? 0 : datas.size();
}
class KeyboardHolder extends RecyclerView.ViewHolder {
public TextView tvKey;
public RelativeLayout rlDel;
private View convertView;
public KeyboardHolder(View itemView) {
super(itemView);
convertView = itemView;
tvKey = itemView.findViewById(R.id.tv_key);
rlDel = itemView.findViewById(R.id.rl_del);
}
public View getconvertView() {
return convertView;
}
}
public interface OnKeyboardClickListener {
void onKeyClick(View view, RecyclerView.ViewHolder holder, int position);
void onDeleteClick(View view, RecyclerView.ViewHolder holder, int position);
}
public void setOnKeyboardClickListener(OnKeyboardClickListener listener) {
this.listener = listener;
}
}
|
<filename>C2CRIBuildDir/projects/C2C-RI/src/RICenterServices/src/org/fhwa/c2cri/infolayer/InformationLayerController.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.infolayer;
import org.fhwa.c2cri.messagemanager.Message;
/**
* The Interface InformationLayerController provides the methods that must be available
* to control a center from an Information Layer perspective. This layer does not deal with
* encoding or transport, but it still has dialog patterns.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public interface InformationLayerController {
/**
* Perform get ec.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performGetEC(String dialog) throws Exception;
/**
* Perform get oc.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param responseMessage the response message
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performGetOC(String dialog, Message responseMessage) throws Exception;
/**
* Perform request response ec.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param requestMessage the request message
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performRequestResponseEC(String dialog, Message requestMessage) throws Exception;
/**
* Perform request response oc receive.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performRequestResponseOCReceive(String dialog) throws Exception;
/**
* Perform request response oc response.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param responseMessage the response message
* @param isErrorResponse the is error response
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performRequestResponseOCResponse(String dialog, Message responseMessage, boolean isErrorResponse) throws Exception;
/**
* Perform subscription ec.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param requestMessage the request message
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performSubscriptionEC(String dialog, Message requestMessage) throws Exception;
/**
* Perform subscription oc receive.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performSubscriptionOCReceive(String dialog) throws Exception;
/**
* Perform subscription oc response.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param responseMessage the response message
* @param isErrorResponse the is error response
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performSubscriptionOCResponse(String dialog, Message responseMessage, boolean isErrorResponse) throws Exception;
/**
* Perform publication oc.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param requestMessage the request message
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performPublicationOC(String dialog, Message requestMessage) throws Exception;
/**
* Perform publication ec receive.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performPublicationECReceive(String dialog) throws Exception;
/**
* Perform publication ec response.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param dialog the dialog
* @param responseMessage the response message
* @param isErrorResponse the is error response
* @return the information layer operation results
* @throws Exception the exception
*/
public InformationLayerOperationResults performPublicationECResponse(String dialog, Message responseMessage, boolean isErrorResponse) throws Exception;
/**
* Sets the disable app layer encoding.
*
* @param disableAppLayerEncoding the new disable app layer encoding
*/
public void setDisableAppLayerEncoding(boolean disableAppLayerEncoding);
/**
* Shutdown.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
public void shutdown();
}
|
//
// Created by <NAME> on 26/08/2017.
//
#include "chrono"
#include "Game.h"
#include "EventBatch.h"
#include "GameManager.h"
#include "UdpWorker.h"
#include "easylogging++.h"
#include "Datagram.h"
using namespace std::chrono;
void GameManager::gameLoop()
{
do {
udpWorker->work(*this);
} while (!canGameStart());
game = Game(random, turningSpeed, maxx, maxy);
game.addPlayers(connectedPlayers);
game.start();
// in case game is over already
LOG(INFO) << "Enqueuing new game datagrams";
broadcastNewDatagrams(game);
while (game.isInProgress()) {
milliseconds endOfFrame =
duration_cast<milliseconds>(system_clock::now().time_since_epoch()) + milliseconds(frameDurationMs);
game.tick();
LOG(INFO) << "Enqueuing tick datagrams";
broadcastNewDatagrams(game);
udpWorker->workUntil(endOfFrame, *this);
}
resetPlayers();
}
void GameManager::processDatagram(const ClientMessage::SelfPacked *buffer, ssize_t length, const sockaddr *socketAddr)
{
ClientMessage message;
try {
if (length == sizeof(ClientMessage::SelfPacked)) {
message = ClientMessage(*buffer);
} else {
message = ClientMessage(*reinterpret_cast<const ClientMessage::SelfPackedNoName *>(buffer));
message.setPlayerName(reinterpret_cast<const char *>(buffer) + sizeof(ClientMessage::SelfPackedNoName),
length - sizeof(ClientMessage::SelfPackedNoName));
}
} catch (ProtocolException &e) {
LOG(WARNING) << "Invalid player name: " << e.what();
}
updateConnectedPlayers(message, socketAddr);
auto datagramBatches = getEventBatches(game, message.getNextExpectedEventNo());
sockaddr_storage sockaddrStorage = Socket::copySockAddrToStorage(socketAddr);
for (auto &&batch : datagramBatches) {
udpWorker->enqueue(std::make_unique<Datagram>(batch, sockaddrStorage));
}
}
void GameManager::updateConnectedPlayers(const ClientMessage &message, const sockaddr *socket)
{
size_t hash = PlayerConnection::getHashFor(socket);
const auto &entry = connectedPlayers.find(hash);
if (entry == connectedPlayers.end()) {
// new socket
addPlayerConnection(hash, socket, message);
} else {
// existing socket
auto &conn = entry->second;
if (conn.getSessionId() == message.getSessionId()) {
// same player, update fields
LOG(INFO) << "Player connection update; "
<< " new turn direction " << static_cast<int>(message.getTurnDirection())
<< " new next expected " << message.getNextExpectedEventNo();
conn.updateLastActive();
conn.setTurnDirection(message.getTurnDirection());
conn.setNextExpectedEvent(message.getNextExpectedEventNo());
} else if (conn.getSessionId() < message.getSessionId()) {
// same socket but different session; disconnect old and connect new
LOG(INFO) << "Player connection replace: greater session id";
connectedPlayers.erase(hash);
addPlayerConnection(hash, socket, message);
} else {
LOG(INFO) << "Player connection ignore; smaller session id";
}
}
activityCheck();
}
void GameManager::activityCheck()
{
if (lastInactivePlayersCheck + 2 < time(NULL)) {
time_t t = time(NULL) - 2;
// standard associative-container erase idiom
for (auto it = connectedPlayers.cbegin(); it != connectedPlayers.cend() /* not hoisted */; /* no increment */) {
if (it->second.getLastActive() < t) {
LOG(WARNING) << "Removing inactive player";
connectedPlayers.erase(it++);
} else {
++it;
}
}
}
}
void GameManager::addPlayerConnection(std::size_t hash, const sockaddr *socket, const ClientMessage &message)
{
if (connectedPlayers.size() < 42 && !isPlayerNameTaken(message.getPlayerName())) {
PlayerConnection pc(socket, message.getSessionId(), message.getTurnDirection(), message.getPlayerName());
connectedPlayers.insert(std::make_pair(hash, pc));
LOG(INFO) << "Player connection added";
}
}
bool GameManager::isPlayerNameTaken(const std::string &name) const
{
// there can be many observing players
if (name.empty()) {
return false;
}
for (auto &&entry : connectedPlayers) {
if (entry.second.getName() == name) {
return true;
}
}
// else
return false;
}
bool GameManager::canGameStart()
{
bool allReady = std::all_of(connectedPlayers.begin(), connectedPlayers.end(),
[](const auto &entry) { return entry.second.isReadyForGame(); });
long participatingPlayers = std::count_if(connectedPlayers.begin(), connectedPlayers.end(),
[](const auto &entry) { return !entry.second.getName().empty(); });
LOG(INFO) << "canGameStart: Players ready: " << allReady << "; participating players: " << participatingPlayers;
return allReady && participatingPlayers > 1;
}
void GameManager::resetPlayers()
{
for (auto &&p : connectedPlayers) {
p.second.resetAfterGame();
}
}
std::vector<EventBatch> GameManager::getEventBatches(const Game &game, uint32_t startEventNumber)
{
const int MAX_DATAGRAM_SIZE = 512;
const int SIZEOF_HEADER = sizeof(uint32_t);
int length = SIZEOF_HEADER;
std::vector<EventBatch> vector;
for (uint32_t eventNumber = startEventNumber; eventNumber < game.getEventHistory().size(); ++eventNumber) {
uint32_t eventSize = game.getEventHistory().at(startEventNumber)->getLength();
if (length + eventSize > MAX_DATAGRAM_SIZE) {
LOG(INFO) << "getEventBatches: producing a batch of length " << length;
vector.emplace_back(EventBatch(game, startEventNumber, eventNumber));
length = SIZEOF_HEADER + eventSize;
startEventNumber = eventNumber;
} else {
length += eventSize;
}
}
// only make an event batch if there any new events
if (length > SIZEOF_HEADER) {
LOG(INFO) << "getEventBatches: producing a batch of length " << length;
uint32_t endEventNo = static_cast<uint32_t>(game.getEventHistory().size());
vector.emplace_back(EventBatch(game, startEventNumber, endEventNo));
} else {
LOG(INFO) << "getEventBatches: no new events to enqueue";
}
return vector;
}
void GameManager::broadcastDatagrams(std::vector<EventBatch> eventBatches)
{
for (auto &&eventBatch : eventBatches) {
for (auto &&player : connectedPlayers) {
udpWorker->enqueue(std::make_unique<Datagram>(eventBatch, player.second.getSocketStorage()));
}
}
}
void GameManager::broadcastNewDatagrams(const Game &game)
{
LOG(INFO) << "broadcastNewDatagrams: calling getEventBatches";
auto eventBatches = getEventBatches(game, game.getFirstNewEventNumber());
broadcastDatagrams(eventBatches);
}
GameManager::GameManager(uint32_t maxx, uint32_t maxy, int roundsPerSecond, int turningSpeed,
long long seed, std::unique_ptr<IUdpWorker> udpWorker)
: connectedPlayers(),
maxx(maxx), maxy(maxy),
frameDurationMs(1000 / roundsPerSecond),
turningSpeed(turningSpeed),
random(seed),
udpWorker(std::move(udpWorker)),
game(random, turningSpeed, maxx, maxy),
lastInactivePlayersCheck(time(NULL))
{}
|
#!/bin/sh
# Check scripts in the I2P source for validity by running with "sh -n
# $SCRIPTNAME". Optionally checks for bashisms if "checkbashisms" is installed.
# Exits 0 if no errors, non-zero otherwise
cd `dirname $0`/../..
# Only Bourne-compatible scripts should be in this list.
SCRIPTFILES="\
./apps/desktopgui/bundle-messages.sh \
./apps/i2psnark/java/bundle-messages.sh \
./apps/i2psnark/launch-i2psnark \
./apps/i2ptunnel/java/bundle-messages-proxy.sh \
./apps/i2ptunnel/java/bundle-messages.sh \
./apps/routerconsole/java/bundle-messages-news.sh \
./apps/routerconsole/java/bundle-messages.sh \
./apps/sam/c/examples/i2p-ping/pinger.sh \
./apps/susidns/src/bundle-messages.sh \
./apps/susimail/bundle-messages.sh \
./core/c/build.sh \
./core/c/jbigi/build-all.sh \
./core/c/jbigi/build_jbigi.sh \
./core/c/jbigi/build.sh \
./core/c/jbigi/mbuild-all.sh \
./core/c/jcpuid/build.sh \
./core/c/mbuild.sh \
./debian/i2p.config \
./debian/i2p-doc.preinst \
./debian/i2p.init \
./debian/i2p.postinst \
./debian/i2p.postrm \
./debian/i2p.preinst \
./debian/libjbigi-jni.preinst \
./debian/repack.sh \
./installer/resources/install_i2p_service_osx.command \
./installer/resources/install_i2p_service_unix \
./installer/resources/locale/bundle-messages.sh \
./installer/resources/makegeoipv6.sh \
./installer/resources/postinstall.sh \
./installer/resources/runplain.sh \
./installer/resources/uninstall_i2p_service_osx.command
./installer/resources/uninstall_i2p_service_unix \
./Slackware/i2p/i2p.SlackBuild \
./Slackware/i2p/doinst.sh \
./Slackware/i2p/rc.i2p \
./tests/scripts/checkcerts.sh \
./tests/scripts/checkpo.sh \
./tests/scripts/checkutf8.sh \
./tests/scripts/checkxml.sh \
./tests/scripts/testjbigi.sh \
"
for script in $SCRIPTFILES; do
echo "Checking $script ..."
if sh -n "$script" ; then : ; else
echo "********* FAILED CHECK FOR $script *************"
FAIL=1
fi
if $(which checkbashisms > /dev/null 2>&1) ; then
checkbashisms $script
fi
done
if [ "$FAIL" != "" ]
then
echo "******** At least one file failed check *********"
else
echo "All files passed"
fi
exit $FAIL
|
<reponame>bsella/brdf
var annotated_dup =
[
[ "ChefDevr", "namespace_chef_devr.html", "namespace_chef_devr" ],
[ "ParameterWindow", "class_parameter_window.html", "class_parameter_window" ],
[ "RThread", "class_r_thread.html", "class_r_thread" ],
[ "WaitingSpinnerWidget", "class_waiting_spinner_widget.html", "class_waiting_spinner_widget" ]
]; |
REPO_PATH=$HOME/.dev-setup
REPO_URL=git@github.com:mskrajnowski/dev-setup.git
echo "Installing Git and apt-add-repository..."
sudo apt-get install -y git software-properties-common
echo "Adding Ansible PPA..."
sudo apt-add-repository -y ppa:ansible/ansible
sudo apt-get update
echo "Installing Ansible..."
sudo apt-get install -y ansible
echo "Cloning $REPO_URL..."
git clone $REPO_URL $REPO_PATH
echo "Running Ansible playbook..."
ansible-playbook -K -i $REPO_PATH/hosts.ini $REPO_PATH/setup.yml
echo "Done!"
|
#!/bin/bash
set -e
INPUT_FILE=$1
OUTPUT_FILE=$2
sed -r "s/^([^=]+=)'(.*)'$/\1\2/g" "$INPUT_FILE" >"$OUTPUT_FILE"
|
#!/bin/sh
# ** AUTO GENERATED **
# 4.2.1.2 - Ensure logging is configured (Not Scored)
echo '*.emerg :omusrmsg:*' >> /etc/rsyslog.d/CIS.conf
echo 'mail.* -/var/log/mail' >> /etc/rsyslog.d/CIS.conf
echo 'mail.info -/var/log/mail.info' >> /etc/rsyslog.d/CIS.conf
echo 'mail.warning -/var/log/mail.warn' >> /etc/rsyslog.d/CIS.conf
echo 'mail.err /var/log/mail.err' >> /etc/rsyslog.d/CIS.conf
echo 'news.crit -/var/log/news/news.crit' >> /etc/rsyslog.d/CIS.conf
echo 'news.err -/var/log/news/news.err' >> /etc/rsyslog.d/CIS.conf
echo 'news.notice -/var/log/news/news.notice' >> /etc/rsyslog.d/CIS.conf
echo '*.=warning;*.=err -/var/log/warn' >> /etc/rsyslog.d/CIS.conf
echo '*.crit /var/log/warn' >> /etc/rsyslog.d/CIS.conf
echo '*.*;mail.none;news.none -/var/log/messages' >> /etc/rsyslog.d/CIS.conf
echo 'local0,local1.* -/var/log/localmessages' >> /etc/rsyslog.d/CIS.conf
echo 'local2,local3.* -/var/log/localmessages' >> /etc/rsyslog.d/CIS.conf
echo 'local4,local5.* -/var/log/localmessages' >> /etc/rsyslog.d/CIS.conf
echo 'local6,local7.* -/var/log/localmessages' >> /etc/rsyslog.d/CIS.conf
|
<filename>Exams/JavaScript-Software-University-master/JavaScript-Software-University-master/JavaScript-Applications/Old-Exams/Airline-Administration/scripts/handlers/user-handler.js
handlers.getRegister = function (ctx) {
ctx.loadPartials({
header: './templates/common/header.hbs',
footer: './templates/common/footer.hbs'
})
.then(function () {
this.partial('./templates/register.hbs');
})
.catch(function (err) {
notifications.handleError(err);
});
};
handlers.getLogin = function (ctx) {
ctx.loadPartials({
header: './templates/common/header.hbs',
footer: './templates/common/footer.hbs'
})
.then(function () {
this.partial('./templates/login.hbs');
})
.catch(function (err) {
notifications.handleError(err);
});
};
handlers.registerUser = function (ctx) {
let username = ctx.params.username;
let password = <PASSWORD>;
let repeatPassword = <PASSWORD>;
if (username.length < 5) {
notifications.showError('Username should be a string with at least 5 characters long.')
} else if (password === '' || repeatPassword === '') {
notifications.showError('Password and repeat password should be at least 1 characer long.');
} else if (password !== <PASSWORD>) {
notifications.showError('Passwords must match.');
} else {
userService.register(username, password)
.then((res) => {
userService.saveSession(res);
notifications.showInfo('User registration successful.');
ctx.redirect('#/home');
})
.catch(function (err) {
notifications.handleError(err);
});
}
};
handlers.logoutUser = function (ctx) {
userService.logout()
.then(() => {
sessionStorage.clear();
notifications.showInfo('Logout successful.');
ctx.redirect('#/login');
})
};
handlers.loginUser = function (ctx) {
let username = ctx.params.username;
let password = ctx.params.pass;
if (username.length < 5) {
notifications.showError('Username should be a string with at least 5 characters long.')
} else if (password === '') {
notifications.showError('Password and repeat password should be at least 1 characer long.');
} else {
userService.login(username, password)
.then((res) => {
userService.saveSession(res);
notifications.showInfo('Login successful.');
ctx.redirect('#/home');
})
.catch(function (err) {
notifications.handleError(err);
});
}
}; |
module Api
module V3
class NodesController < BaseController
before_action :set_current_scenario
before_action :find_node, :only => :show
# GET /api/v3/nodes/:id
#
# Returns the node details in JSON format. If the scenario is missing
# the action returns an empty hash and a 404 status code. Since the
# nodes now aren't stored in a db table we use the key rather than
# the id
#
def show
render :json => @node
end
# POST /api/v3/nodes/:id/stats
#
# Returns a breakdown of some node statistics used by ETLoader to
# calculate network loads in testing grounds.
#
# As node keys can be quite long, and the request may include tens of
# node keys, the request is to be sent as POST with a JSON payload
# with the following schema:
#
# { "keys": {
# "key1": [ .. ],
# "key2": [ .. ],
# "...",
# "keyN": [ .. ]
# }
def stats
keys = permitted_params.to_h.fetch(:keys)
gql = @scenario.gql(prepare: true)
render json: { nodes: Hash[keys.map do |key, graph_attributes|
[key, NodeStatsSerializer.new(key.to_sym, gql, graph_attributes)]
end] }
end
# returns the node topology coordinates, using the old
# node_positions table
#
def topology
topology = TopologySerializer.new(@scenario)
render :json => topology
end
private
def find_node
key = params[:id].presence&.to_sym
gql = @scenario.gql
present_node = node_from_interface(gql.present, key)
render_not_found unless present_node
@node = NodeSerializer.new(
present_node,
node_from_interface(gql.future, key)
)
rescue StandardError => e
render_not_found(errors: [e.message])
end
def permitted_params
params.permit(:scenario_id, keys: {})
end
def node_from_interface(interface, key)
interface.graph.node(key) || interface.molecules.node(key)
end
end
end
end
|
import {DOMA, DTEL} from "./objects/";
import {ABAPGit} from "./formats/abapgit";
import {IFile} from "./_ifile";
import {IObject} from "./objects/_iobject";
import {ObjectType} from "./object_types";
import {Format} from "./formats";
import {HTML} from "./formats/html";
import {IOutput} from "./formats/_ioutput";
export class Registry {
private objects: IObject[];
constructor() {
this.objects = [];
}
// assumption: all files are always in the same format and
// all files needed are in the input
public add(files: IFile[]) {
for (const file of files) {
this.addFile(file);
}
}
public addFile(file: IFile): Registry {
// todo, make this stuff dynamic so it works for all formats
const abapgit = new ABAPGit();
const git = abapgit.check(file);
if (git) {
abapgit.getParser(git.type).parse(file, this.findOrCreateObject(git.type, git.name));
}
// todo, add the other input formats here?
if (git === undefined) {
throw new Error("No parser found for file: " + file.getFilename());
}
return this;
}
public remove(_filename: string) {
// todo
}
public findObject(type: ObjectType, name: string): IObject | undefined {
for (const objj of this.objects) {
if (objj.getType() === type && objj.getName() === name) {
return objj;
}
}
return undefined;
}
public findOrCreateObject(type: ObjectType, name: string): IObject {
let obj = this.findObject(type, name);
if (obj !== undefined) {
return obj;
}
// todo, change this to some dynamic stuff
switch (type) {
case ObjectType.DOMA:
obj = new DOMA(name);
break;
case ObjectType.DTEL:
obj = new DTEL(name);
break;
default:
throw new Error("Registry, unknown object type");
}
this.objects.push(obj);
return obj;
}
public output(format: Format): IFile[] {
let ret: IFile[] = [];
let output: IOutput;
switch (format) {
case Format.HTML:
output = new HTML();
break;
case Format.abapGit:
output = new ABAPGit();
break;
default:
throw new Error("Registry output: unknown format");
}
for (const obj of this.objects) {
ret = ret.concat(output.output(obj));
}
return ret;
}
} |
import { IndexedCollection, isIndexedCollection } from '@collectable/core';
export function hasIn (path: any[], collection: IndexedCollection<any, any>): boolean {
var i = 0, value: any = collection;
while(i < path.length && isIndexedCollection(value)) {
if(!IndexedCollection.has(path[i++], value)) return false;
}
return true;
}
|
<reponame>SinghYuvi/AngularProject<gh_stars>1-10
export interface Iuser{
name : string;
emailId: string;
} |
<reponame>NickGraeff/launchkey-java
package com.iovation.launchkey.sdk.transport.apachehttp; /**
* Copyright 2017 iovation, Inc.
* <p>
* Licensed under the MIT License.
* You may not use this file except in compliance with the License.
* A copy of the License is located in the "LICENSE.txt" file accompanying
* this file. This file is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.iovation.launchkey.sdk.error.CommunicationErrorException;
import com.iovation.launchkey.sdk.error.InvalidResponseException;
import com.iovation.launchkey.sdk.transport.domain.PublicV3PingGetResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.junit.MockitoJUnitRunner;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.Silent.class)
public class ApacheHttpTransportPublicPingGetTest extends ApacheHttpTransportTestBase {
@Test
public void publicPingGetCallsHttpClientWithProperHttpRequestMethod() throws Exception {
transport.publicV3PingGet();
ArgumentCaptor<HttpUriRequest> actual = ArgumentCaptor.forClass(HttpUriRequest.class);
verify(httpClient).execute(actual.capture());
assertEquals("GET", actual.getValue().getMethod());
}
@Test
public void publicPingGetCallsHttpClientWithProperHttpRequestUri() throws Exception {
URI expected = URI.create(baseUrl.concat("/public/v3/ping"));
transport.publicV3PingGet();
verify(httpClient).execute(requestCaptor.capture());
assertEquals(expected, requestCaptor.getValue().getURI());
}
@Test
public void publicPingGetParsesResponseWithProperClass() throws Exception {
transport.publicV3PingGet();
verify(objectMapper).readValue(any(InputStream.class), eq(PublicV3PingGetResponse.class));
}
@Test
public void publicPingGetReturnsParsedJson() throws Exception {
PublicV3PingGetResponse expected = new PublicV3PingGetResponse(new Date());
when(objectMapper.readValue(any(InputStream.class), any(Class.class))).thenReturn(expected);
PublicV3PingGetResponse actual = transport.publicV3PingGet();
assertEquals(expected, actual);
}
@Test(expected = CommunicationErrorException.class)
public void publicPingThrowsCommunicationErrorExceptionWhenHttpClientThrowsIOError() throws Exception {
when(httpClient.execute(any(HttpUriRequest.class))).thenThrow(new IOException());
transport.publicV3PingGet();
}
@Test(expected = InvalidResponseException.class)
public void publicPingThrowsInvalidResponseExceptionWhenObjectParserThrowsJsonMappingException() throws Exception {
when(objectMapper.readValue(any(InputStream.class), any(Class.class)))
.thenThrow(mock(JsonMappingException.class));
transport.publicV3PingGet();
}
@Test(expected = InvalidResponseException.class)
public void publicPingThrowsInvalidResponseExceptionWhenObjectParserThrowsJsonParseException() throws Exception {
when(objectMapper.readValue(any(InputStream.class), any(Class.class)))
.thenThrow(mock(JsonParseException.class));
transport.publicV3PingGet();
}
} |
<filename>base/base_dataset.py
from torch.utils.data import DataLoader, Dataset
import numpy as np
import torch
class BaseDataset(Dataset):
r"""
Base Dataset Object
Arguments:
data (np.ndarray): data
targets (np.ndarray): labels
"""
idx: int # requested data index
x: torch.Tensor
y: torch.Tensor
def __init__(self, data: np.ndarray, targets: np.ndarray):
if len(data) != len(targets):
raise ValueError(
"data and targets must be the same length. "
f"{len(data)} != {len(targets)}"
)
self.data = data
self.targets = targets
def __len__(self):
return len(self.data)
def __getitem__(self, idx: int):
raise NotImplementedError(f"Please verify whether dataset was properly initialized!")
|
#include "TapModel.h"
double TapModel::pushTap (const juce::Time& t)
{
if (tapPoints.size() == 0)
{
tapPoints.push_front (t);
return 0.0;
}
constexpr auto bpmDriftTolerance = 50.0;
const auto prevPoint = tapPoints.front();
tapPoints.push_front (t);
const auto lastTapBpm = msToBPM ((tapPoints.front() - prevPoint).inMilliseconds());
cleanup();
lastDetectedBPM = estimateBPM();
if (fabs (lastDetectedBPM - lastTapBpm) > bpmDriftTolerance)
{
// remove older points
for (auto i = 0; i < (tapPoints.size() - 1); i++)
tapPoints.pop_back();
lastDetectedBPM = estimateBPM();
}
return lastDetectedBPM;
}
double TapModel::estimateBPM()
{
if (tapPoints.size() < 2)
return 0.0;
double sum = 0.0;
for (auto it = (tapPoints.rbegin() + 1); it != tapPoints.rend(); it++)
{
const auto& prev = *(it - 1);
const auto delta = *it - prev;
const auto curBpm = msToBPM (delta.inMilliseconds());
sum += curBpm;
}
return sum / (tapPoints.size() - 1);
}
int TapModel::getXforCurrentRange (const juce::Time& t, int areaWidth) const
{
const auto start = tapPoints.back();
const float range = (tapPoints.front() - start).inMilliseconds();
const auto pos = t - start;
const auto x = juce::roundToInt (range == 0 ? 1 : (pos.inMilliseconds() / range) * (areaWidth - 1));
jassert (x >= 0 && x < areaWidth);
return x;
}
void TapModel::cleanup()
{
if (tapPoints.size() > maxPointsToKeep)
tapPoints.pop_back();
}
void TapModel::clear()
{
tapPoints.clear();
}
double TapModel::msToBPM (std::int64_t ms)
{
auto secs = ms / 1000.0;
return 60.0 / secs;
}
|
export default {
openexchangerates: {
key: 'openexchangerates',
title: 'OpenExchangeRates.org',
icon: 'https://openexchangerates.org/favicon.ico',
timeout: 3600,
},
currencylayer: {
key: 'currencylayer',
title: 'CurrencyLayer.com',
icon: 'https://currencylayer.com/images/icons/currencylayer_shortcut_icon.ico',
timeout: 3600,
},
};
|
module.exports = require('./src/verify.js')
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SWS/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SWS/13-1024+0+512-SS-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
import { Table, Column, Model, HasMany, PrimaryKey } from 'sequelize-typescript';
import { Practice } from "./Practice";
@Table
export class PracticeType extends Model<PracticeType> {
@PrimaryKey
@Column
id: string;
@Column
description: string;
@HasMany(() => Practice)
practices: Practice[];
}
|
package set summary "Simple Realtime Server"
package set webpage "https://ossrs.net"
package set git.url "https://github.com/ossrs/srs.git"
package set version "3.0-r3"
package set src.url "https://github.com/ossrs/srs/archive/v${PACKAGE_VERSION}.tar.gz"
package set src.sum "a28a3b152dd51e58ad96caefa71c4310d29adbb0d1623581c13a5521afe04724"
package set license "MIT"
package set bsystem "make"
package set sourced "trunk"
package set binsrcd 'yes'
build() {
export SRS_EXTRA_FLAGS="$CFLAGS $CPPFLAGS"
case $TARGET_OS_ARCH in
armv7a) CONFIGURE_EXTRA_FLAGS=--arm ;;
aarch64) CONFIGURE_EXTRA_FLAGS=--arm ;;
i686) CONFIGURE_EXTRA_FLAGS=--x86-64 ;;
x86_64) CONFIGURE_EXTRA_FLAGS=--x86-x64 ;;
esac
run ./configure \
--prefix="$ABI_INSTALL_DIR" \
--static \
"$CONFIGURE_EXTRA_FLAGS" \
--cc=$CC \
--cxx=$CXX \
--ar=$AR \
--ld=$LD \
--randlib=$RANLIB &&
makew clean &&
makew &&
makew install
}
|
// Device header file.
#include "stm32h7xx.h"
// Standard library includes.
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
extern uint32_t _sidata, _sdata, _edata, _sbss, _ebss, _siitcm, _sidtcm, _sitcm, _sdtcm, _eitcm, _edtcm, _estack;
// Core system clock speed.
uint32_t SystemCoreClock = 64000000;
// Global systick counter.
volatile uint32_t systick = 0;
// Reset handler: set the stack pointer and branch to main().
__attribute__( ( naked ) ) void reset_handler( void ) {
// Set the stack pointer to the 'end of stack' value.
__asm__( "LDR r0, =_estack\n\t"
"MOV sp, r0" );
// Branch to main().
__asm__( "B main" );
}
// SysTick interrupt handler.
void SysTick_IRQn_handler( void ) {
++systick;
}
// Simple blocking millisecond delay method.
void delay_ms( uint32_t ms ) {
// Calculate the 'end of delay' tick value, then wait for it.
uint32_t next = systick + ms;
while ( systick < next ) { __WFI(); }
}
/**
* Main program.
*/
int main( void ) {
// TODO: Enable SRAM1, SRAM2, SRAM3.
// Enable GPIOC, GPIOD peripherals.
RCC->AHB4ENR |= ( RCC_AHB4ENR_GPIOCEN |
RCC_AHB4ENR_GPIODEN );
// Copy initialized data from .sidata (Flash) to .data (RAM)
memcpy( &_sdata, &_sidata, ( ( void* )&_edata - ( void* )&_sdata ) );
// Clear the .bss section in RAM.
memset( &_sbss, 0x00, ( ( void* )&_ebss - ( void* )&_sbss ) );
// Enable floating-point unit.
SCB->CPACR |= ( 0xF << 20 );
// Configure SysTick to trigger every ms.
SysTick_Config( SystemCoreClock / 1000 );
// Set LED pins (C6, C7, D12) to push-pull output mode.
GPIOC->MODER &= ~( ( 3 << ( 6 * 2 ) ) | ( 3 << ( 7 * 2 ) ) );
GPIOD->MODER &= ~( 3 << ( 12 * 2 ) );
GPIOC->MODER |= ( ( 1 << ( 6 * 2 ) ) | ( 1 << ( 7 * 2 ) ) );
GPIOD->MODER |= ( 1 << ( 12 * 2 ) );
// Pull LED3 high, LED1 / LED2 low.
GPIOC->ODR &= ~( 1 << 6 );
GPIOC->ODR |= ( 1 << 7 );
GPIOD->ODR &= ~( 1 << 12 );
// Done; blink the LEDs every half-second.
while( 1 ) {
delay_ms( 500 );
GPIOC->ODR ^= ( ( 1 << 6 ) | ( 1 << 7 ) );
GPIOD->ODR ^= ( 1 << 12 );
}
return 0; // lol
}
|
<gh_stars>10-100
/*=============================================================================
Boost.Wave: A Standard compliant C++ preprocessor library
http://www.boost.org/
Copyright (c) 2001-2005 <NAME>. Distributed under the Boost
Software License, Version 1.0. (See accompanying file
LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#if !defined(CPP_EXCEPTIONS_HPP_5190E447_A781_4521_A275_5134FF9917D7_INCLUDED)
#define CPP_EXCEPTIONS_HPP_5190E447_A781_4521_A275_5134FF9917D7_INCLUDED
#include <exception>
#include <string>
#include <boost/assert.hpp>
#include <boost/config.hpp>
#include <boost/wave/wave_config.hpp>
///////////////////////////////////////////////////////////////////////////////
// helper macro for throwing exceptions
#if !defined(BOOST_WAVE_THROW)
#ifdef BOOST_NO_STRINGSTREAM
#include <strstream>
#define BOOST_WAVE_THROW(cls, code, msg, act_pos) \
{ \
using namespace boost::wave; \
std::strstream stream; \
stream << cls::severity_text(cls::code) << ": " \
<< cls::error_text(cls::code); \
if ((msg)[0] != 0) stream << ": " << (msg); \
stream << std::ends; \
std::string throwmsg = stream.str(); stream.freeze(false); \
throw cls(throwmsg.c_str(), cls::code, (act_pos).get_line(), \
(act_pos).get_column(), (act_pos).get_file().c_str()); \
} \
/**/
#else
#include <sstream>
#define BOOST_WAVE_THROW(cls, code, msg, act_pos) \
{ \
using namespace boost::wave; \
std::stringstream stream; \
stream << cls::severity_text(cls::code) << ": " \
<< cls::error_text(cls::code); \
if ((msg)[0] != 0) stream << ": " << (msg); \
stream << std::ends; \
throw cls(stream.str().c_str(), cls::code, (act_pos).get_line(), \
(act_pos).get_column(), (act_pos).get_file().c_str()); \
} \
/**/
#endif // BOOST_NO_STRINGSTREAM
#endif // BOOST_WAVE_THROW
///////////////////////////////////////////////////////////////////////////////
namespace boost {
namespace wave {
///////////////////////////////////////////////////////////////////////////////
// exception severity
namespace util {
enum severity {
severity_remark = 0,
severity_warning,
severity_error,
severity_fatal,
severity_commandline_error
};
inline char const *
get_severity(severity level)
{
static char const *severity_text[] =
{
"remark", // severity_remark
"warning", // severity_warning
"error", // severity_error
"fatal error", // severity_fatal
"command line error" // severity_commandline_error
};
BOOST_ASSERT(severity_remark <= level &&
level <= severity_commandline_error);
return severity_text[level];
}
}
///////////////////////////////////////////////////////////////////////////////
// cpp_exception, the base class for all specific C preprocessor exceptions
class cpp_exception
: public std::exception
{
public:
cpp_exception(int line_, int column_, char const *filename_) throw()
: line(line_), column(column_)
{
unsigned int off = 0;
while (off < sizeof(filename) && *filename_)
filename[off++] = *filename_++;
filename[off] = 0;
}
~cpp_exception() throw() {}
virtual char const *what() const throw() = 0; // to be overloaded
virtual char const *description() const throw() = 0;
int line_no() const throw() { return line; }
int column_no() const throw() { return column; }
char const *file_name() const throw() { return filename; }
protected:
char filename[512];
int line;
int column;
};
///////////////////////////////////////////////////////////////////////////////
// preprocessor error
class preprocess_exception :
public cpp_exception
{
public:
enum error_code {
unexpected_error = 0,
macro_redefinition,
macro_insertion_error,
bad_include_file,
bad_include_statement,
ill_formed_directive,
error_directive,
warning_directive,
ill_formed_expression,
missing_matching_if,
missing_matching_endif,
ill_formed_operator,
bad_define_statement,
too_few_macroarguments,
too_many_macroarguments,
empty_macroarguments,
improperly_terminated_macro,
bad_line_statement,
bad_undefine_statement,
bad_macro_definition,
illegal_redefinition,
duplicate_parameter_name,
invalid_concat,
last_line_not_terminated,
ill_formed_pragma_option,
include_nesting_too_deep,
misplaced_operator,
alreadydefined_name,
undefined_macroname,
invalid_macroname,
unexpected_qualified_name,
division_by_zero,
integer_overflow,
illegal_operator_redefinition,
ill_formed_integer_literal,
ill_formed_character_literal,
unbalanced_if_endif,
character_literal_out_of_range
};
preprocess_exception(char const *what_, error_code code, int line_,
int column_, char const *filename_) throw()
: cpp_exception(line_, column_, filename_), level(severity_level(code))
{
unsigned int off = 0;
while (off < sizeof(buffer) && *what_)
buffer[off++] = *what_++;
buffer[off] = 0;
}
~preprocess_exception() throw() {}
virtual char const *what() const throw()
{
return "boost::wave::preprocess_exception";
}
virtual char const *description() const throw()
{
return buffer;
}
util::severity get_severity()
{
return level;
}
static char const *error_text(int code)
{
// error texts in this array must appear in the same order as the items in
// the error enum above
static char const *preprocess_exception_errors[] = {
"unexpected error (should not happen)", // unexpected_error
"illegal macro redefinition", // macro_redefinition
"macro definition failed (out of memory?)", // macro_insertion_error
"could not find include file", // bad_include_file
"ill formed #include directive", // bad_include_statement
"ill formed preprocessor directive", // ill_formed_directive
"encountered #error directive or #pragma wave stop()", // error_directive
"encountered #warning directive", // warning_directive
"ill formed preprocessor expression", // ill_formed_expression
"the #if for this directive is missing", // missing_matching_if
"detected at least one missing #endif directive", // missing_matching_endif
"ill formed preprocessing operator", // ill_formed_operator
"ill formed #define directive", // bad_define_statement
"too few macro arguments", // too_few_macroarguments
"too many macro arguments", // too_many_macroarguments
"empty macro arguments are not supported in pure C++ mode, "
"use variadics mode to allow these", // empty_macroarguments
"improperly terminated macro invocation "
"or replacement-list terminates in partial "
"macro expansion (not supported yet)", // improperly_terminated_macro
"ill formed #line directive", // bad_line_statement
"#undef may not be used on this predefined name", // bad_undefine_statement
"invalid macro definition", // bad_macro_definition
"this predefined name may not be redefined", // illegal_redefinition
"duplicate macro parameter name", // duplicate_parameter_name
"pasting the following two tokens does not "
"give a valid preprocessing token", // invalid_concat
"last line of file ends without a newline", // last_line_not_terminated
"unknown or illformed pragma option", // ill_formed_pragma_option
"include files nested too deep", // include_nesting_too_deep
"misplaced operator defined()", // misplaced_operator
"the name is already used in this scope as "
"a macro or scope name", // alreadydefined_name
"undefined macro or scope name may not be imported", // undefined_macroname
"ill formed macro name", // invalid_macroname
"qualified names are supported in C++0x mode only", // unexpected_qualified_name
"division by zero in preprocessor expression", // division_by_zero
"integer overflow in preprocessor expression", // integer_overflow
"this macro name cannot be used as a as it is an operator in C++", // illegal_operator_redefinition
"ill formed integer literal or integer constant too large", // ill_formed_integer_literal
"ill formed character literal", // ill_formed_character_literal
"unbalanced #if/#endif in include file", // unbalanced_if_endif
"character literal out of range" // character_literal_out_of_range
};
BOOST_ASSERT(unexpected_error <= code &&
code <= character_literal_out_of_range);
return preprocess_exception_errors[code];
}
static util::severity severity_level(int code)
{
static util::severity preprocess_exception_severity[] = {
util::severity_fatal, // unexpected_error
util::severity_warning, // macro_redefinition
util::severity_fatal, // macro_insertion_error
util::severity_error, // bad_include_file
util::severity_error, // bad_include_statement
util::severity_error, // ill_formed_directive
util::severity_fatal, // error_directive
util::severity_warning, // warning_directive
util::severity_error, // ill_formed_expression
util::severity_error, // missing_matching_if
util::severity_error, // missing_matching_endif
util::severity_error, // ill_formed_operator
util::severity_error, // bad_define_statement
util::severity_warning, // too_few_macroarguments
util::severity_warning, // too_many_macroarguments
util::severity_warning, // empty_macroarguments
util::severity_error, // improperly_terminated_macro
util::severity_warning, // bad_line_statement
util::severity_warning, // bad_undefine_statement
util::severity_commandline_error, // bad_macro_definition
util::severity_warning, // illegal_redefinition
util::severity_error, // duplicate_parameter_name
util::severity_error, // invalid_concat
util::severity_warning, // last_line_not_terminated
util::severity_warning, // ill_formed_pragma_option
util::severity_fatal, // include_nesting_too_deep
util::severity_error, // misplaced_operator
util::severity_error, // alreadydefined_name
util::severity_error, // undefined_macroname
util::severity_error, // invalid_macroname
util::severity_error, // unexpected_qualified_name
util::severity_fatal, // division_by_zero
util::severity_error, // integer_overflow
util::severity_error, // illegal_operator_redefinition
util::severity_error, // ill_formed_integer_literal
util::severity_error, // ill_formed_character_literal
util::severity_warning, // unbalanced_if_endif
util::severity_warning // character_literal_out_of_range
};
BOOST_ASSERT(unexpected_error <= code &&
code <= character_literal_out_of_range);
return preprocess_exception_severity[code];
}
static char const *severity_text(int code)
{
return util::get_severity(severity_level(code));
}
private:
char buffer[512];
util::severity level;
};
///////////////////////////////////////////////////////////////////////////////
} // namespace wave
} // namespace boost
#endif // !defined(CPP_EXCEPTIONS_HPP_5190E447_A781_4521_A275_5134FF9917D7_INCLUDED)
|
package com.example.wy.tickto.leftmenu;
import android.content.Intent;
import android.content.SharedPreferences;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.example.wy.tickto.R;
import com.example.wy.tickto.login.LoginActivity;
import com.example.wy.tickto.user_ins.user_instructions;
public class SetActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_set);
//用户须知
TextView user_use = (TextView)findViewById(R.id.user_use);
user_use.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(SetActivity.this, user_instructions.class);
startActivity(intent);
}
});
//切换用户
TextView alter_user = (TextView)findViewById(R.id.alter_user);
alter_user.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(SetActivity.this, LoginActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);//它可以关掉所要到的界面中间的activity
startActivity(intent);
finish();
}
});
//清除缓存
TextView delete_cache = (TextView)findViewById(R.id.delete_cache);
delete_cache.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
SharedPreferences preferences = getSharedPreferences("loginInfo", MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
editor.clear();
editor.commit();
Log.d("SetActivity","缓存清理!!!!");
Toast.makeText(SetActivity.this,"清除缓存成功!",Toast.LENGTH_SHORT).show();
}
});
}
}
|
#!/bin/bash
##
## Starts the UDP socket server
## and waits for incoming connections.
##
CMD="java -jar"
BASE_DIR=`dirname $0`
JAR="${BASE_DIR}/../../../target/socket-server-1.0-SNAPSHOT.jar"
${CMD} ${JAR} $@ |
<reponame>DylanSalisbury/advent-of-code-2019<filename>02/util.py
"""Helper functions."""
def string_to_list_of_integers(s):
return [int(n) for n in s.split(',')]
def execute_intcode(l):
i = 0
while l[i] != 99:
if l[i] == 1:
l[l[i+3]] = l[l[i+1]] + l[l[i+2]]
elif l[i] == 2:
l[l[i+3]] = l[l[i+1]] * l[l[i+2]]
else:
raise ValueError('Unknown opcode ' + l[i])
i += 4
return
|
<reponame>azu/monorepo-sandbox-shipjs<gh_stars>0
'use strict';
const monorepoSandboxX = require('..');
describe('monorepo-sandbox-shipjs-x', () => {
it('needs tests');
});
|
#!/bin/bash
#set -e
php -v
setup_mariadb_data_dir(){
test ! -d "$MARIADB_DATA_DIR" && echo "INFO: $MARIADB_DATA_DIR not found. creating ..." && mkdir -p "$MARIADB_DATA_DIR"
# check if 'mysql' database exists
if [ ! -d "$MARIADB_DATA_DIR/mysql" ]; then
echo "INFO: 'mysql' database doesn't exist under $MARIADB_DATA_DIR. So we think $MARIADB_DATA_DIR is empty."
echo "Copying all data files from the original folder /var/lib/mysql to $MARIADB_DATA_DIR ..."
cp -R /var/lib/mysql/. $MARIADB_DATA_DIR
else
echo "INFO: 'mysql' database already exists under $MARIADB_DATA_DIR."
fi
rm -rf /var/lib/mysql
ln -s $MARIADB_DATA_DIR /var/lib/mysql
chown -R mysql:mysql $MARIADB_DATA_DIR
test ! -d /run/mysqld && echo "INFO: /run/mysqld not found. creating ..." && mkdir -p /run/mysqld
chown -R mysql:mysql /run/mysqld
}
start_mariadb(){
if test ! -e /run/mysqld/mysqld.sock; then
touch /run/mysqld/mysqld.sock
fi
chmod 777 /run/mysqld/mysqld.sock
mysql_install_db --user=mysql --basedir=/usr --datadir=/var/lib/mysql
/usr/bin/mysqld --user=mysql &
# make sure mysql service is started...
port=`netstat -nlt|grep 3306|wc -l`
process=`ps -ef |grep mysql|grep -v grep |wc -l`
try_count=1
while [ $try_count -le 10 ]
do
if [ $port -eq 1 ] && [ $process -eq 1 ]; then
echo "INFO: MariaDB is running... "
break
else
echo "INFO: Haven't found MariaDB Service this time, Wait 10s, try again..."
sleep 10s
let try_count+=1
port=`netstat -nlt|grep 3306|wc -l`
process=`ps -ef |grep mysql|grep -v grep |wc -l`
fi
done
# create default database 'azurelocaldb'
mysql -u root -e "CREATE DATABASE IF NOT EXISTS azurelocaldb; FLUSH PRIVILEGES;"
}
#unzip phpmyadmin
setup_phpmyadmin(){
test ! -d "$PHPMYADMIN_HOME" && echo "INFO: $PHPMYADMIN_HOME not found. creating..." && mkdir -p "$PHPMYADMIN_HOME"
cd $PHPMYADMIN_SOURCE
tar -xf phpMyAdmin.tar.gz -C $PHPMYADMIN_HOME --strip-components=1
cp -R phpmyadmin-config.inc.php $PHPMYADMIN_HOME/config.inc.php
sed -i "/# Add locations of phpmyadmin here./r $PHPMYADMIN_SOURCE/phpmyadmin-locations.txt" /etc/nginx/conf.d/default.conf
cd /
if [ ! $WEBSITES_ENABLE_APP_SERVICE_STORAGE ]; then
echo "INFO: NOT in Azure, chown for "$PHPMYADMIN_HOME
chown -R www-data:www-data $PHPMYADMIN_HOME
fi
}
setup_moodle(){
while test -d "$MOODLE_HOME/moodle"
do
echo "INFO: $MOODLE_HOME/moodle is exist, clean it ..."
# mv is faster than rm.
mv $MOODLE_HOME/moodle /home/bak/moodle$(date +%s)
done
test ! -d "$MOODLE_HOME" && echo "INFO: $MOODLE_HOME not found. creating ..." && mkdir -p "$MOODLE_HOME"
cd $MOODLE_HOME
GIT_REPO=${GIT_REPO:-https://github.com/azureappserviceoss/moodle-linuxappservice-azure}
GIT_BRANCH=${GIT_BRANCH:-master}
echo "INFO: ++++++++++++++++++++++++++++++++++++++++++++++++++:"
echo "REPO: "$GIT_REPO
echo "BRANCH: "$GIT_BRANCH
echo "INFO: ++++++++++++++++++++++++++++++++++++++++++++++++++:"
echo "INFO: Clone from "$GIT_REPO
git clone $GIT_REPO $MOODLE_HOME/moodle && cd $MOODLE_HOME/moodle
if [ "$GIT_BRANCH" != "master" ];then
echo "INFO: Checkout to "$GIT_BRANCH
git fetch origin
git branch --track $GIT_BRANCH origin/$GIT_BRANCH && git checkout $GIT_BRANCH
fi
cp -rf $MOODLE_SOURCE/installlib.php $MOODLE_HOME/moodle/lib/installlib.php
chmod -R 777 $MOODLE_HOME
if [ ! $WEBSITES_ENABLE_APP_SERVICE_STORAGE ]; then
echo "INFO: NOT in Azure, chown for "$MOODLE_HOME
chown -R www-data:www-data $MOODLE_HOME
fi
}
update_db_config(){
DATABASE_HOST=${DATABASE_HOST:-127.0.0.1}
DATABASE_NAME=${DATABASE_NAME:-azurelocaldb}
DATABASE_USERNAME=${DATABASE_USERNAME:-phpmyadmin}
DATABASE_PASSWORD=${DATABASE_PASSWORD:-MS173m_QN}
export DATABASE_HOST DATABASE_NAME DATABASE_USERNAME DATABASE_PASSWORD
}
echo "Setup openrc ..." && openrc && touch /run/openrc/softlevel
DATABASE_TYPE=$(echo ${DATABASE_TYPE}|tr '[A-Z]' '[a-z]')
if [ "${DATABASE_TYPE}" == "local" ]; then
echo "Starting MariaDB and PHPMYADMIN..."
echo 'mysql.default_socket = /run/mysqld/mysqld.sock' >> $PHP_CONF_FILE
echo 'mysqli.default_socket = /run/mysqld/mysqld.sock' >> $PHP_CONF_FILE
#setup MariaDB
echo "INFO: loading local MariaDB and phpMyAdmin ..."
echo "Setting up MariaDB data dir ..."
setup_mariadb_data_dir
echo "Setting up MariaDB log dir ..."
test ! -d "$MARIADB_LOG_DIR" && echo "INFO: $MARIADB_LOG_DIR not found. creating ..." && mkdir -p "$MARIADB_LOG_DIR"
chown -R mysql:mysql $MARIADB_LOG_DIR
echo "Starting local MariaDB ..."
start_mariadb
echo "Granting user for phpMyAdmin ..."
# Set default value of username/password if they are't exist/null.
update_db_config
echo "INFO: ++++++++++++++++++++++++++++++++++++++++++++++++++:"
echo "phpmyadmin username:" $DATABASE_USERNAME
echo "phpmyadmin password:" $DATABASE_PASSWORD
echo "INFO: ++++++++++++++++++++++++++++++++++++++++++++++++++:"
mysql -u root -e "GRANT ALL ON *.* TO \`$DATABASE_USERNAME\`@'localhost' IDENTIFIED BY '$DATABASE_PASSWORD' WITH GRANT OPTION; FLUSH PRIVILEGES;"
echo "Installing phpMyAdmin ..."
setup_phpmyadmin
fi
# setup Moodle
# That config.php doesn't exist means moodle is not installed/configured yet.
if [ ! -e "$MOODLE_HOME/moodle/config.php" ]; then
echo "INFO: $MOODLE_HOME/moodle/config.php not found."
echo "Installing Moodle for the first time ..."
setup_moodle
if [ ${DATABASE_HOST} ]; then
echo "INFO: DB Parameters are setted, Update config.php..."
cd $MOODLE_HOME/moodle && cp $MOODLE_SOURCE/config.php .
chmod 777 config.php
if [ ! $WEBSITES_ENABLE_APP_SERVICE_STORAGE ]; then
echo "INFO: NOT in Azure, chown for wp-config.php"
chown -R www-data:www-data config.php
fi
if [ "${DATABASE_TYPE}" == "local" ]; then
#$CFG->dbtype = 'mariadb';
sed -i "s/getenv('DATABASE_TYPE')/'mariadb'/g" config.php
else
#$CFG->dbtype = 'mysqli';
sed -i "s/getenv('DATABASE_TYPE')/'mysqli'/g" config.php
fi
else
echo "INFO: DATABASE_HOST isn't exist, please fill parameters during installation!"
fi
else
echo "INFO: $MOODLE_HOME/moodle/config.php already exists."
echo "INFO: You can modify it manually as need."
fi
echo "INFO: creating /run/php/php-fpm.sock ..."
test -e /run/php/php-fpm.sock && rm -f /run/php/php-fpm.sock
mkdir -p /run/php
touch /run/php/php-fpm.sock
chown www-data:www-data /run/php/php-fpm.sock
chmod 777 /run/php/php-fpm.sock
# Set Cache path of moodle
mkdir -p $MOODLE_HOME/moodledata/filedir && mkdir -p /var/moodledata
ln -s $MOODLE_HOME/moodledata/filedir /var/moodledata/filedir
chmod -R 777 /var/moodledata && chmod -R 777 $MOODLE_HOME/moodledata/filedir
echo "Starting Redis ..."
redis-server &
echo "Starting Memcached ..."
memcached -u memcache &
if [ ! $WEBSITES_ENABLE_APP_SERVICE_STORAGE ]; then
echo "NOT in AZURE, Start crond, log rotate..."
crond
fi
test ! -d "$SUPERVISOR_LOG_DIR" && echo "INFO: $SUPERVISOR_LOG_DIR not found. creating ..." && mkdir -p "$SUPERVISOR_LOG_DIR"
test ! -e "$SUPERVISOR_LOG_DIR/supervisord.log" && echo "INFO: $SUPERVISOR_LOG_DIR/supervisord.log not found. creating ..." && touch $SUPERVISOR_LOG_DIR/supervisord.log
chmod 777 $SUPERVISOR_LOG_DIR/supervisord.log
test ! -d "$NGINX_LOG_DIR" && echo "INFO: Log folder for nginx/php not found. creating..." && mkdir -p "$NGINX_LOG_DIR"
test ! -e /home/50x.html && echo "INFO: 50x file not found. createing..." && cp /usr/share/nginx/html/50x.html /home/50x.html
test -d "/home/etc/nginx" && mv /etc/nginx /etc/nginx-bak && ln -s /home/etc/nginx /etc/nginx
test ! -d "home/etc/nginx" && mkdir -p /home/etc && mv /etc/nginx /home/etc/nginx && ln -s /home/etc/nginx /etc/nginx
sed -i "s/SSH_PORT/$SSH_PORT/g" /etc/ssh/sshd_config
echo "Starting SSH ..."
echo "Starting php-fpm ..."
echo "Starting Nginx ..."
cd /usr/bin/
supervisord -c /etc/supervisord.conf
|
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/observable/interval';
import 'rxjs/add/operator/take';
import { RaceModel } from './models/race.model';
import { PonyWithPositionModel } from './models/pony.model';
import { HttpService } from './http.service';
import { WsService } from './ws.service';
@Injectable()
export class RaceService {
constructor(private http: HttpService, private ws: WsService) {}
list(): Observable<Array<RaceModel>> {
return this.http.get('/api/races?status=PENDING');
}
get(raceId): Observable<RaceModel> {
return this.http.get(`/api/races/${raceId}`);
}
bet(raceId, ponyId): Observable<RaceModel> {
return this.http.post(`/api/races/${raceId}/bets`, { ponyId });
}
cancelBet(raceId): Observable<any> {
return this.http.delete(`/api/races/${raceId}/bets`);
}
live(raceId): Observable<Array<PonyWithPositionModel>> {
return this.ws.connect(`/race/${raceId}`).map(body => body.ponies);
}
}
|
#!/bin/bash
GOOS=linux GOARCH=amd64 go build -o ./runtime/bin/server ./server/server.go |
require 'spec_helper'
describe HTTP::ResponseBody do
let(:body) { 'Hello, world!' }
let(:response) { double(:response) }
subject { described_class.new(response) }
before do
response.should_receive(:readpartial).and_return(body)
response.should_receive(:readpartial).and_return(nil)
end
it 'streams bodies from responses' do
expect(subject.to_s).to eq body
end
context 'when body empty' do
let(:body) { '' }
it 'returns responds to empty? with true' do
expect(subject).to be_empty
end
end
end
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-NER/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-NER/1024+0+512-N-VB-ADJ-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_and_adjectives_first_two_thirds_full --eval_function last_element_eval |
<reponame>dhruvsha270792/shoppersdealadmin<gh_stars>0
package com.nexusdevs.shoppersdeal.admin.controller;
public class ProductController {
}
|
<reponame>LamiumAmplexicaule/RayTracing<gh_stars>0
package net.henbit.raytracing.nextweek;
import net.henbit.raytracing.Utils;
import net.henbit.raytracing.nextweek.chapter10.Chapter10;
import net.henbit.raytracing.nextweek.chapter2.Chapter2;
import net.henbit.raytracing.nextweek.chapter4.Chapter4;
import net.henbit.raytracing.nextweek.chapter5.Chapter5;
import net.henbit.raytracing.nextweek.chapter6.Chapter6;
import net.henbit.raytracing.nextweek.chapter7.Chapter7;
import net.henbit.raytracing.nextweek.chapter8.Chapter8;
import net.henbit.raytracing.nextweek.chapter9.Chapter9;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import static net.henbit.raytracing.nextweek.RTNextWeek.IMAGE_PATH;
public class All
{
public static void main(String[] args) throws IOException
{
final Chapter2 chapter2 = new Chapter2();
final Chapter4 chapter4 = new Chapter4();
final Chapter5 chapter5 = new Chapter5();
final Chapter6 chapter6 = new Chapter6();
final Chapter7 chapter7 = new Chapter7();
final Chapter8 chapter8 = new Chapter8();
final Chapter9 chapter9 = new Chapter9();
final Chapter10 chapter10 = new Chapter10();
System.out.println("Chapter2");
chapter2.run();
System.out.println("Chapter4");
chapter4.run();
System.out.println("Chapter5");
chapter5.run();
System.out.println("Chapter6");
chapter6.run();
System.out.println("Chapter7");
chapter7.run();
System.out.println("Chapter8");
chapter8.run();
System.out.println("Chapter9");
chapter9.run();
System.out.println("Chapter10");
chapter10.run();
ppm2png();
}
private static void ppm2png() throws IOException
{
for (int i = 2; i < 11; i++)
{
if (i == 3) continue;
BufferedImage ppm = Utils.loadPPM(IMAGE_PATH + "nextweek-chapter" + i + ".ppm");
ImageIO.write(ppm, "png", new File(IMAGE_PATH + "nextweek-chapter" + i + ".png"));
}
}
}
|
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
# training set
spam_emails = ["here is my offer ...", "Click the link to get free money",
"Make money easily!"]
non_spam_emails = ["Just a reminder ...", "Hi, how are you?",
"Reading your emails ..."]
x = spam_emails + non_spam_emails
y = np.array([1]*len(spam_emails) + [0]*len(non_spam_emails))
# create the model
vectorizer = TfidfVectorizer()
x_transformed = vectorizer.fit_transform(x)
clf = MultinomialNB()
clf.fit(x_transformed, y)
def classify_spam_email(email):
x_test = vectorizer.transform([email])
prediction = clf.predict(x_test)[0]
if prediction == 0:
print("Non-spam email")
else:
print("Spam email") |
from selenium import webdriver
# create a new Firefox session
driver = webdriver.Firefox()
driver.implicitly_wait(30)
driver.maximize_window()
# navigate to the application home page
driver.get("https://www.example.com/")
# get the search textbox
search_field = driver.find_element_by_name("q")
search_field.clear()
# enter search keyword and submit
search_field.send_keys("shoes")
search_field.submit()
# get the list of elements which are displayed after the search
# currently on result page using find_elements_by_class_name
product_list = driver.find_elements_by_class_name("product")
# iterate through each element and print the text that is
# name of the product
for product in product_list:
print(product.text)
# close the browser window
driver.quit() |
<gh_stars>1-10
package noleggioAuto;
public class Feedback {
private String descrizione;
private Utente utente;
private Veicolo veicolo;
}
|
#include <stdio.h>
typedef struct {
int peRed;
int peGreen;
int peBlue;
short peFlags;
} palEntry;
void processPaletteEntries(const char* inputFileName, const char* outputFileName) {
FILE* inputFile = fopen(inputFileName, "rb");
FILE* outputFile = fopen(outputFileName, "w");
if (inputFile == NULL || outputFile == NULL) {
fprintf(stderr, "Error opening files\n");
return;
}
palEntry entry;
while (fread(&entry, sizeof(palEntry), 1, inputFile) == 1) {
if (entry.peRed == -1) {
break; // End of file marker
}
fprintf(outputFile, "Color: R=%d G=%d B=%d\n", entry.peRed, entry.peGreen, entry.peBlue);
switch (entry.peFlags) {
case PC_NOCOLLAPSE:
fprintf(outputFile, "\tFlag Value: %d %s\n", (short)entry.peFlags, "PC_NOCOLLAPSE");
break;
case PC_RESERVED:
fprintf(outputFile, "\tFlag Value: %d %s\n", (short)entry.peFlags, "PC_RESERVED");
break;
default:
fprintf(outputFile, "\tFlag Value: %d Unknown\n", (short)entry.peFlags);
break;
}
}
fclose(inputFile);
fclose(outputFile);
} |
names = ['John', 'Jane', 'Julia', 'James', 'Joseph','Jessica']
dictionary = {}
for name in names:
if name[0] not in dictionary:
dictionary[name[0]] = [name]
else:
dictionary[name[0]].append(name)
print(dictionary) |
<reponame>OSWeDev/oswedev<gh_stars>0
import AccessPolicyGroupVO from '../../../shared/modules/AccessPolicy/vos/AccessPolicyGroupVO';
import AccessPolicyVO from '../../../shared/modules/AccessPolicy/vos/AccessPolicyVO';
import PolicyDependencyVO from '../../../shared/modules/AccessPolicy/vos/PolicyDependencyVO';
import APIControllerWrapper from '../../../shared/modules/API/APIControllerWrapper';
import ContextFilterHandler from '../../../shared/modules/ContextFilter/ContextFilterHandler';
import ModuleContextFilter from '../../../shared/modules/ContextFilter/ModuleContextFilter';
import ContextFilterVO from '../../../shared/modules/ContextFilter/vos/ContextFilterVO';
import ContextQueryFieldVO from '../../../shared/modules/ContextFilter/vos/ContextQueryFieldVO';
import ContextQueryVO from '../../../shared/modules/ContextFilter/vos/ContextQueryVO';
import ManualTasksController from '../../../shared/modules/Cron/ManualTasksController';
import ModuleDAO from '../../../shared/modules/DAO/ModuleDAO';
import IRange from '../../../shared/modules/DataRender/interfaces/IRange';
import NumRange from '../../../shared/modules/DataRender/vos/NumRange';
import NumSegment from '../../../shared/modules/DataRender/vos/NumSegment';
import TimeSegment from '../../../shared/modules/DataRender/vos/TimeSegment';
import TSRange from '../../../shared/modules/DataRender/vos/TSRange';
import Dates from '../../../shared/modules/FormatDatesNombres/Dates/Dates';
import IDistantVOBase from '../../../shared/modules/IDistantVOBase';
import MatroidController from '../../../shared/modules/Matroid/MatroidController';
import ModuleTableField from '../../../shared/modules/ModuleTableField';
import ModuleParams from '../../../shared/modules/Params/ModuleParams';
import DefaultTranslationManager from '../../../shared/modules/Translation/DefaultTranslationManager';
import DefaultTranslation from '../../../shared/modules/Translation/vos/DefaultTranslation';
import ModuleTrigger from '../../../shared/modules/Trigger/ModuleTrigger';
import DAG from '../../../shared/modules/Var/graph/dagbase/DAG';
import VarDAGNode from '../../../shared/modules/Var/graph/VarDAGNode';
import ModuleVar from '../../../shared/modules/Var/ModuleVar';
import VarsController from '../../../shared/modules/Var/VarsController';
import SlowVarVO from '../../../shared/modules/Var/vos/SlowVarVO';
import VarCacheConfVO from '../../../shared/modules/Var/vos/VarCacheConfVO';
import VarConfIds from '../../../shared/modules/Var/vos/VarConfIds';
import VarConfVO from '../../../shared/modules/Var/vos/VarConfVO';
import VarDataBaseVO from '../../../shared/modules/Var/vos/VarDataBaseVO';
import VarDataValueResVO from '../../../shared/modules/Var/vos/VarDataValueResVO';
import VOsTypesManager from '../../../shared/modules/VOsTypesManager';
import ConsoleHandler from '../../../shared/tools/ConsoleHandler';
import ObjectHandler from '../../../shared/tools/ObjectHandler';
import RangeHandler from '../../../shared/tools/RangeHandler';
import ThreadHandler from '../../../shared/tools/ThreadHandler';
import ThrottleHelper from '../../../shared/tools/ThrottleHelper';
import StackContext from '../../StackContext';
import ModuleAccessPolicyServer from '../AccessPolicy/ModuleAccessPolicyServer';
import ModuleBGThreadServer from '../BGThread/ModuleBGThreadServer';
import ModuleContextFilterServer from '../ContextFilter/ModuleContextFilterServer';
import ModuleDAOServer from '../DAO/ModuleDAOServer';
import DAOPostCreateTriggerHook from '../DAO/triggers/DAOPostCreateTriggerHook';
import DAOPostDeleteTriggerHook from '../DAO/triggers/DAOPostDeleteTriggerHook';
import DAOPostUpdateTriggerHook from '../DAO/triggers/DAOPostUpdateTriggerHook';
import DAOPreCreateTriggerHook from '../DAO/triggers/DAOPreCreateTriggerHook';
import DAOPreUpdateTriggerHook from '../DAO/triggers/DAOPreUpdateTriggerHook';
import DAOUpdateVOHolder from '../DAO/vos/DAOUpdateVOHolder';
import ForkedTasksController from '../Fork/ForkedTasksController';
import ModuleServerBase from '../ModuleServerBase';
import ModuleServiceBase from '../ModuleServiceBase';
import ModulesManagerServer from '../ModulesManagerServer';
import PerfMonAdminTasksController from '../PerfMon/PerfMonAdminTasksController';
import PerfMonConfController from '../PerfMon/PerfMonConfController';
import PushDataServerController from '../PushData/PushDataServerController';
import VarsdatasComputerBGThread from './bgthreads/VarsdatasComputerBGThread';
import DataSourceControllerBase from './datasource/DataSourceControllerBase';
import NotifVardatasParam from './notifs/NotifVardatasParam';
import VarCronWorkersHandler from './VarCronWorkersHandler';
import VarsComputeController from './VarsComputeController';
import VarsDatasProxy from './VarsDatasProxy';
import VarsDatasVoUpdateHandler from './VarsDatasVoUpdateHandler';
import VarServerControllerBase from './VarServerControllerBase';
import VarsPerfMonServerController from './VarsPerfMonServerController';
import VarsServerCallBackSubsController from './VarsServerCallBackSubsController';
import VarsServerController from './VarsServerController';
import VarsTabsSubsController from './VarsTabsSubsController';
export default class ModuleVarServer extends ModuleServerBase {
public static TASK_NAME_getSimpleVarDataCachedValueFromParam = 'Var.getSimpleVarDataCachedValueFromParam';
public static TASK_NAME_delete_varconf_from_cache = 'Var.delete_varconf_from_cache';
public static TASK_NAME_update_varconf_from_cache = 'Var.update_varconf_from_cache';
public static TASK_NAME_delete_varcacheconf_from_cache = 'Var.delete_varcacheconf_from_cache';
public static TASK_NAME_update_varcacheconf_from_cache = 'Var.update_varcacheconf_from_cache';
public static TASK_NAME_exec_in_computation_hole = 'Var.exec_in_computation_hole';
public static TASK_NAME_wait_for_computation_hole = 'Var.wait_for_computation_hole';
public static TASK_NAME_invalidate_cache_exact_and_parents = 'VarsDatasProxy.invalidate_cache_exact_and_parents';
public static TASK_NAME_invalidate_cache_intersection_and_parents = 'VarsDatasProxy.invalidate_cache_intersection_and_parents';
public static TASK_NAME_invalidate_imports_for_u = 'VarsDatasProxy.invalidate_imports_for_u';
public static TASK_NAME_invalidate_imports_for_c = 'VarsDatasProxy.invalidate_imports_for_c';
public static PARAM_NAME_limit_nb_ts_ranges_on_param_by_context_filter = 'Var.limit_nb_ts_ranges_on_param_by_context_filter';
public static getInstance() {
if (!ModuleVarServer.instance) {
ModuleVarServer.instance = new ModuleVarServer();
}
return ModuleVarServer.instance;
}
private static instance: ModuleVarServer = null;
public update_varcacheconf_from_cache = ThrottleHelper.getInstance().declare_throttle_with_stackable_args(
this.update_varcacheconf_from_cache_throttled.bind(this), 200, { leading: true, trailing: true });
public update_varconf_from_cache = ThrottleHelper.getInstance().declare_throttle_with_stackable_args(
this.update_varconf_from_cache_throttled.bind(this), 200, { leading: true, trailing: true });
private constructor() {
super(ModuleVar.getInstance().name);
}
public async configure() {
let PML__VarsdatasComputerBGThread__do_calculation_run = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsdatasComputerBGThread__do_calculation_run);
let PML__VarServerControllerBase__computeValue = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarServerControllerBase__computeValue);
let PML__VarsDatasProxy__handle_buffer = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__handle_buffer);
let PML__VarsDatasProxy__get_exact_param_from_buffer_or_bdd = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__get_exact_param_from_buffer_or_bdd);
let PML__VarsDatasProxy__prepend_var_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__prepend_var_datas);
let PML__VarsDatasProxy__get_var_datas_or_ask_to_bgthread = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__get_var_datas_or_ask_to_bgthread);
let PML__VarsDatasProxy__append_var_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__append_var_datas);
let PML__VarsDatasProxy__get_exact_params_from_buffer_or_bdd = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__get_exact_params_from_buffer_or_bdd);
let PML__VarsDatasProxy__get_vars_to_compute_from_buffer_or_bdd = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__get_vars_to_compute_from_buffer_or_bdd);
let PML__VarsDatasProxy__update_existing_buffered_older_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__update_existing_buffered_older_datas);
let PML__VarsDatasProxy__get_vars_to_compute_from_bdd = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__get_vars_to_compute_from_bdd);
let PML__VarsDatasProxy__filter_var_datas_by_indexes = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasProxy__filter_var_datas_by_indexes);
let PML__VarsComputeController__compute = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__compute);
let PML__VarsComputeController__cache_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__cache_datas);
let PML__VarsComputeController__deploy_deps = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__deploy_deps);
let PML__VarsComputeController__load_nodes_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__load_nodes_datas);
let PML__VarsComputeController__compute_node = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__compute_node);
let PML__VarsComputeController__create_tree = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__create_tree);
let PML__VarsComputeController__handle_deploy_deps = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__handle_deploy_deps);
let PML__VarsComputeController__try_load_cache_complet = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__try_load_cache_complet);
let PML__VarsComputeController__try_load_cache_partiel = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__try_load_cache_partiel);
let PML__VarsComputeController__get_node_deps = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsComputeController__get_node_deps);
let PML__DataSourcesController__load_node_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__DataSourcesController__load_node_datas);
let PML__DataSourceControllerBase__load_node_data = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__DataSourceControllerBase__load_node_data);
let PML__VarsPerfsController__update_perfs_in_bdd = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsPerfsController__update_perfs_in_bdd);
let PML__VarsDatasVoUpdateHandler__handle_buffer = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasVoUpdateHandler__handle_buffer);
let PML__VarsDatasVoUpdateHandler__invalidate_datas_and_parents = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasVoUpdateHandler__invalidate_datas_and_parents);
let PML__VarsDatasVoUpdateHandler__update_param = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasVoUpdateHandler__update_param);
let PML__VarsDatasVoUpdateHandler__find_invalid_datas_and_push_for_update = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsDatasVoUpdateHandler__find_invalid_datas_and_push_for_update);
let PML__VarsCacheController__partially_clean_bdd_cache = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsCacheController__partially_clean_bdd_cache);
let PML__VarsImportsHandler__load_imports_and_split_nodes = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsImportsHandler__load_imports_and_split_nodes);
let PML__VarsImportsHandler__split_nodes = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsImportsHandler__split_nodes);
let PML__VarsImportsHandler__aggregate_imports_and_remaining_datas = await PerfMonConfController.getInstance().registerPerformanceType(VarsPerfMonServerController.PML__VarsImportsHandler__aggregate_imports_and_remaining_datas);
PerfMonAdminTasksController.getInstance().register_perfmon_pack("VARs", [
PML__VarsdatasComputerBGThread__do_calculation_run,
PML__VarServerControllerBase__computeValue,
PML__VarsDatasProxy__handle_buffer,
PML__VarsDatasProxy__get_exact_param_from_buffer_or_bdd,
PML__VarsDatasProxy__prepend_var_datas,
PML__VarsDatasProxy__get_var_datas_or_ask_to_bgthread,
PML__VarsDatasProxy__append_var_datas,
PML__VarsDatasProxy__get_exact_params_from_buffer_or_bdd,
PML__VarsDatasProxy__get_vars_to_compute_from_buffer_or_bdd,
PML__VarsDatasProxy__update_existing_buffered_older_datas,
PML__VarsDatasProxy__get_vars_to_compute_from_bdd,
PML__VarsDatasProxy__filter_var_datas_by_indexes,
PML__VarsComputeController__compute,
PML__VarsComputeController__cache_datas,
PML__VarsComputeController__deploy_deps,
PML__VarsComputeController__load_nodes_datas,
PML__VarsComputeController__compute_node,
PML__VarsComputeController__create_tree,
PML__VarsComputeController__handle_deploy_deps,
PML__VarsComputeController__try_load_cache_complet,
PML__VarsComputeController__try_load_cache_partiel,
PML__VarsComputeController__get_node_deps,
PML__DataSourcesController__load_node_datas,
PML__DataSourceControllerBase__load_node_data,
PML__VarsPerfsController__update_perfs_in_bdd,
PML__VarsDatasVoUpdateHandler__handle_buffer,
PML__VarsDatasVoUpdateHandler__invalidate_datas_and_parents,
PML__VarsDatasVoUpdateHandler__update_param,
PML__VarsDatasVoUpdateHandler__find_invalid_datas_and_push_for_update,
PML__VarsCacheController__partially_clean_bdd_cache,
PML__VarsImportsHandler__load_imports_and_split_nodes,
PML__VarsImportsHandler__split_nodes,
PML__VarsImportsHandler__aggregate_imports_and_remaining_datas
]);
VarsTabsSubsController.getInstance();
VarsServerCallBackSubsController.getInstance();
ModuleBGThreadServer.getInstance().registerBGThread(VarsdatasComputerBGThread.getInstance());
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({ 'fr-fr': 'Valeur' }, 'var.desc_mode.var_data.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({ 'fr-fr': 'Description' }, 'var.desc_mode.var_description.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({ 'fr-fr': 'Paramètres' }, 'var.desc_mode.var_params.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({ 'fr-fr': 'Dépendances' }, 'var.desc_mode.var_deps.___LABEL___'));
let postCTrigger: DAOPostCreateTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPostCreateTriggerHook.DAO_POST_CREATE_TRIGGER);
let postUTrigger: DAOPostUpdateTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPostUpdateTriggerHook.DAO_POST_UPDATE_TRIGGER);
let postDTrigger: DAOPostDeleteTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPostDeleteTriggerHook.DAO_POST_DELETE_TRIGGER);
let preCTrigger: DAOPreCreateTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPreCreateTriggerHook.DAO_PRE_CREATE_TRIGGER);
let preUTrigger: DAOPreUpdateTriggerHook = ModuleTrigger.getInstance().getTriggerHook(DAOPreUpdateTriggerHook.DAO_PRE_UPDATE_TRIGGER);
// Trigger sur les varcacheconfs pour mettre à jour les confs en cache en même temps qu'on les modifie dans l'outil
postCTrigger.registerHandler(VarCacheConfVO.API_TYPE_ID, this.onCVarCacheConf);
postUTrigger.registerHandler(VarCacheConfVO.API_TYPE_ID, this.onUVarCacheConf);
postDTrigger.registerHandler(VarCacheConfVO.API_TYPE_ID, this.onPostDVarCacheConf);
postCTrigger.registerHandler(VarConfVO.API_TYPE_ID, this.onCVarConf);
postUTrigger.registerHandler(VarConfVO.API_TYPE_ID, this.onUVarConf);
postDTrigger.registerHandler(VarConfVO.API_TYPE_ID, this.onPostDVarConf);
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Calculée'
}, 'var_data.value_type.computed'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Importée'
}, 'var_data.value_type.import'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'TOUT supprimer ? Même les imports ?'
}, 'vars_datas_explorer_actions.delete_cache_and_import_intersection.body.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'ATTENTION'
}, 'vars_datas_explorer_actions.delete_cache_and_import_intersection.title.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Suppression en cours...'
}, 'vars_datas_explorer_actions.delete_cache_and_import_intersection.start.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Suppression terminée'
}, 'vars_datas_explorer_actions.delete_cache_and_import_intersection.ok.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'A tester'
}, 'slow_var.type.needs_test'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Refusée'
}, 'slow_var.type.denied'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'En cours de test'
}, 'slow_var.type.tesing'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Datasources'
}, 'var.desc_mode.var_datasources.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Vider l\'arbre'
}, 'var_desc.clearDag.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Actualiser la HeatMap des deps'
}, 'var_desc.refreshDependenciesHeatmap.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': '<b>Variable</b><hr><ul>'
}, 'VarDataRefComponent.var_data_value_tooltip_prefix.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': '<li>Dernière mise à jour le <b>{formatted_date}</b><br><i>{value}</i></li>'
}, 'VarDataRefComponent.var_data_value_tooltip.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': '<li>Import ou saisie le <b>{formatted_date}</b><br><i>{value}</i></li>'
}, 'VarDataRefComponent.var_data_value_import_tooltip.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Index + Entrée'
}, 'vars_datas_explorer_visualization.param_from_index.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': '</ul>'
}, 'VarDataRefComponent.var_data_value_tooltip_suffix.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'SlowVars'
}, 'menu.menuelements.admin.SlowVarVO.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'En cours de test'
}, 'slow_var.type.testing'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Matroids calculés'
}, 'var.desc_mode.computed_datas_matroids.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Matroids chargés'
}, 'var.desc_mode.loaded_datas_matroids.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Valeur totale des matroids chargés'
}, 'var.desc_mode.loaded_datas_matroids_sum_value.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Variable invalidée, calcul en cours...'
}, 'var.desc_mode.update_var_data.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Invalidation impossible sur un import'
}, 'var.desc_mode.update_var_data.not_allowed_on_imports.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Indicateurs - Objectif'
}, 'fields.labels.ref.module_psa_primes_indicateur.___LABEL____var_objectif_id'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Indicateurs - Réalisé'
}, 'fields.labels.ref.module_psa_primes_indicateur.___LABEL____var_realise_id'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Performance'
}, 'fields.labels.ref.module_var_var_perf.___LABEL____var_id'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Variable lente'
}, 'fields.labels.ref.module_var_slow_var.___LABEL____var_id'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Var conf cache'
}, 'menu.menuelements.admin.VarCacheConfVO.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Nombre de deps'
}, 'var.desc_mode.dependencies_number.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Deps en % de l\'arbre'
}, 'var.desc_mode.dependencies_tree_prct.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': '% de vars enregistrées'
}, 'var_desc_registrations.vardag_registered_prct.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': '% de vars enregistrées / var_id'
}, 'var_desc_registrations.vardag_registered_prct_by_var_id.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Taille de l\'arbre'
}, 'var_desc_registrations.vardag_size.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Données importées/aggrégées'
}, 'var_desc.aggregated_var_datas.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Cache des modifications de VO vidé. Prêt pour le redémarrage'
}, 'force_empty_vars_datas_vo_update_cache.done'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Import ?'
}, 'var_desc.var_data_is_import.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Date màj : {last_update}'
}, 'var_desc.var_data_last_update.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Explication du calcul'
}, 'var_desc.explaination.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Markers'
}, 'var.desc_mode.var_markers.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Actualiser le graph'
}, 'var_desc.create_graph.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'StepByStep'
}, 'var_desc.pause.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Variable'
}, 'var_desc.var_controller_label.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Valeur non formatée'
}, 'var_desc.var_data_label.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Source de données'
}, 'var_desc.var_ds_label.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Supprimer le cache par intersection'
}, 'vars_datas_explorer_actions.delete_cache_intersection.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Afficher les données exactes'
}, 'vars_datas_explorer_actions.get_exact.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Calculer ce paramètre'
}, 'vars_datas_explorer_actions.show_exact.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Afficher les données incluses'
}, 'vars_datas_explorer_actions.get_included.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Afficher les données intersectées'
}, 'vars_datas_explorer_actions.get_intersection.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Invalider le cache par intersection'
}, 'vars_datas_explorer_actions.invalidate_cache_intersection.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Invalider l\'arbre en cache par intersection'
}, 'vars_datas_explorer_actions.invalidate_cache_intersection_and_depstree.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Supprimer le cache et les imports par intersection'
}, 'vars_datas_explorer_actions.delete_cache_and_import_intersection.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Variables'
}, 'vars_datas_explorer_filters.vars_confs.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Performances'
}, 'menu.menuelements.admin.VarPerfVO.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Afficher'
}, 'var_desc_explain.show_help_tooltip.visible.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Masquer'
}, 'var_desc_explain.show_help_tooltip.hidden.___LABEL___'));
DefaultTranslationManager.getInstance().registerDefaultTranslation(new DefaultTranslation({
'fr-fr': 'Explication synthétique publique'
}, 'var_desc.public.___LABEL___'));
// ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_getSimpleVarDataCachedValueFromParam, this.getSimpleVarDataCachedValueFromParam.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_wait_for_computation_hole, this.wait_for_computation_hole.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_delete_varconf_from_cache, this.delete_varconf_from_cache.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_update_varconf_from_cache, this.update_varconf_from_cache.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_delete_varcacheconf_from_cache, this.delete_varcacheconf_from_cache.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_update_varcacheconf_from_cache, this.update_varcacheconf_from_cache.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_exec_in_computation_hole, this.exec_in_computation_hole.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_invalidate_cache_exact_and_parents, this.invalidate_cache_exact_and_parents.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_invalidate_cache_intersection_and_parents, this.invalidate_cache_intersection_and_parents.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_invalidate_imports_for_u, this.invalidate_imports_for_u.bind(this));
ForkedTasksController.getInstance().register_task(ModuleVarServer.TASK_NAME_invalidate_imports_for_c, this.invalidate_imports_for_c.bind(this));
ModuleServiceBase.getInstance().post_modules_installation_hooks.push(() => {
/**
* Ajout des triggers d'invalidation des données de cache en BDD
* - on part de la liste des vars qui ont un cache et des datasources
* api_type_id => les vos des datasources
*/
for (let api_type_id in VarsServerController.getInstance().registered_vars_controller_by_api_type_id) {
postCTrigger.registerHandler(api_type_id, this.invalidate_var_cache_from_vo_cd);
postUTrigger.registerHandler(api_type_id, this.invalidate_var_cache_from_vo_u);
postDTrigger.registerHandler(api_type_id, this.invalidate_var_cache_from_vo_cd);
}
/**
* On ajoute les trigger preC et preU pour mettre à jour l'index bdd avant insert
* api_type_id => les vos des vars datas
*/
for (let api_type_id in VarsServerController.getInstance().varcacheconf_by_api_type_ids) {
preCTrigger.registerHandler(api_type_id, this.prepare_bdd_index_for_c);
preUTrigger.registerHandler(api_type_id, this.prepare_bdd_index_for_u);
// On invalide l'arbre par intersection si on passe un type en import, ou si on change la valeur d'un import, ou si on passe de import à calculé
postCTrigger.registerHandler(api_type_id, this.invalidate_imports_for_c);
postUTrigger.registerHandler(api_type_id, this.invalidate_imports_for_u);
}
VarsServerController.getInstance().init_varcontrollers_dag();
});
ManualTasksController.getInstance().registered_manual_tasks_by_name[ModuleVar.MANUAL_TASK_NAME_force_empty_vars_datas_vo_update_cache] =
VarsDatasVoUpdateHandler.getInstance().force_empty_vars_datas_vo_update_cache;
ManualTasksController.getInstance().registered_manual_tasks_by_name[ModuleVar.MANUAL_TASK_NAME_switch_add_computation_time_to_learning_base] =
VarsdatasComputerBGThread.getInstance().switch_add_computation_time_to_learning_base;
ManualTasksController.getInstance().registered_manual_tasks_by_name[ModuleVar.MANUAL_TASK_NAME_switch_force_1_by_1_computation] =
VarsdatasComputerBGThread.getInstance().switch_force_1_by_1_computation;
await ModuleVarServer.getInstance().load_slowvars();
}
/**
* Trigger qui gère l'invalidation des vars en fonction du vo passé en param
* On doit par ailleurs utiliser un buffer des invalidations pour pas tout invalider en boucle => exemple sur un import de 100 facture 1 jour,
* le CA du jour devrait être invalidé une fois
* @param vo
*/
public async invalidate_var_cache_from_vo_cd(vo: IDistantVOBase): Promise<void> {
try {
VarsDatasVoUpdateHandler.getInstance().register_vo_cud([vo]);
} catch (error) {
ConsoleHandler.getInstance().error('invalidate_var_cache_from_vo:type:' + vo._type + ':id:' + vo.id + ':' + vo + ':' + error);
}
}
/**
* Trigger qui gère l'invalidation des vars en fonction des vos passés en param
* On doit par ailleurs utiliser un buffer des invalidations pour pas tout invalider en boucle => exemple sur un import de 100 facture 1 jour,
* le CA du jour devrait être invalidé une fois
* @param vo_update_handler
*/
public async invalidate_var_cache_from_vo_u(vo_update_handler: DAOUpdateVOHolder<IDistantVOBase>): Promise<void> {
try {
VarsDatasVoUpdateHandler.getInstance().register_vo_cud([vo_update_handler]);
} catch (error) {
ConsoleHandler.getInstance().error('invalidate_var_cache_from_vo:type:' + vo_update_handler.post_update_vo._type + ':id:' + vo_update_handler.post_update_vo.id + ':' + vo_update_handler.post_update_vo + ':' + error);
}
}
public async invalidate_imports_for_c(vo: VarDataBaseVO): Promise<void> {
return new Promise(async (resolve, reject) => {
if (!await ForkedTasksController.getInstance().exec_self_on_bgthread_and_return_value(
reject,
VarsdatasComputerBGThread.getInstance().name,
ModuleVarServer.TASK_NAME_invalidate_imports_for_c,
resolve,
vo)) {
return;
}
// Si on crée une data en import, on doit forcer le recalcul, si on crée en calcul aucun impact
if (vo.value_type == VarDataBaseVO.VALUE_TYPE_IMPORT) {
// Quand on reçoit un import / met à jour un import on doit aussi informer par notif tout le monde
await VarsTabsSubsController.getInstance().notify_vardatas([new NotifVardatasParam([vo])]);
await VarsServerCallBackSubsController.getInstance().notify_vardatas([vo]);
// et mettre à jour la version potentiellement en cache actuellement
await VarsDatasProxy.getInstance().update_existing_buffered_older_datas([vo]);
await ModuleVar.getInstance().invalidate_cache_intersection_and_parents([vo]);
}
resolve();
});
}
public async invalidate_imports_for_u(vo_update_handler: DAOUpdateVOHolder<VarDataBaseVO>): Promise<void> {
return new Promise(async (resolve, reject) => {
if (!await ForkedTasksController.getInstance().exec_self_on_bgthread_and_return_value(
reject,
VarsdatasComputerBGThread.getInstance().name,
ModuleVarServer.TASK_NAME_invalidate_imports_for_u,
resolve,
vo_update_handler)) {
return;
}
// Si on modifier la valeur d'un import, ou si on change le type de valeur, on doit invalider l'arbre
if ((vo_update_handler.post_update_vo.value_type != vo_update_handler.pre_update_vo.value_type) ||
((vo_update_handler.post_update_vo.value_type == VarDataBaseVO.VALUE_TYPE_IMPORT) && (vo_update_handler.post_update_vo.value != vo_update_handler.pre_update_vo.value))) {
// Quand on reçoit un import / met à jour un import on doit aussi informer par notif tout le monde
await VarsTabsSubsController.getInstance().notify_vardatas([new NotifVardatasParam([vo_update_handler.post_update_vo])]);
await VarsServerCallBackSubsController.getInstance().notify_vardatas([vo_update_handler.post_update_vo]);
// et mettre à jour la version potentiellement en cache actuellement
await VarsDatasProxy.getInstance().update_existing_buffered_older_datas([vo_update_handler.post_update_vo]);
await ModuleVar.getInstance().invalidate_cache_intersection_and_parents([vo_update_handler.post_update_vo]);
}
resolve();
});
}
public async prepare_bdd_index_for_c(vo: VarDataBaseVO) {
// Si on est sur un import et sans date, on force une date
if (
(
(vo.value_type == VarDataBaseVO.VALUE_TYPE_IMPORT) || (vo.value_type == VarDataBaseVO.VALUE_TYPE_DENIED)
) && (!vo.value_ts)) {
vo.value_ts = Dates.now();
}
// vo['_bdd_only_index'] = vo._bdd_only_index;
return true;
}
public async prepare_bdd_index_for_u(vo_update_handler: DAOUpdateVOHolder<VarDataBaseVO>) {
// Si on est sur un import et sans date, on force une date
if (
(
(vo_update_handler.post_update_vo.value_type == VarDataBaseVO.VALUE_TYPE_IMPORT) ||
(vo_update_handler.post_update_vo.value_type == VarDataBaseVO.VALUE_TYPE_DENIED)
) && (!vo_update_handler.post_update_vo.value_ts)) {
vo_update_handler.post_update_vo.value_ts = Dates.now();
}
// vo_update_handler.post_update_vo['_bdd_only_index'] = vo_update_handler.post_update_vo._bdd_only_index;
return true;
}
/**
* Demande MANUELLE d'invalidation
*/
public async invalidate_cache_exact(vos: VarDataBaseVO[]) {
if ((!vos) || (!vos.length)) {
return;
}
vos = vos.filter((vo) => {
if (!vo.check_param_is_valid(vo._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
return false;
}
return true;
});
let vos_by_type_id: { [api_type_id: string]: VarDataBaseVO[] } = {};
for (let i in vos) {
let vo = vos[i];
if (!vos_by_type_id[vo._type]) {
vos_by_type_id[vo._type] = [];
}
vos_by_type_id[vo._type].push(vo);
}
for (let api_type_id in vos_by_type_id) {
let vos_type = vos_by_type_id[api_type_id];
let bdd_vos: VarDataBaseVO[] = await ModuleDAO.getInstance().getVosByExactMatroids(api_type_id, vos_type, null);
// Impossible d'invalider un import mais on accepte de recalculer à la demande manuelle un denied
bdd_vos = bdd_vos.filter((bdd_vo) => (bdd_vo.value_type !== VarDataBaseVO.VALUE_TYPE_IMPORT));
if (bdd_vos && bdd_vos.length) {
for (let j in bdd_vos) {
let bdd_vo = bdd_vos[j];
bdd_vo.value_ts = null;
if (bdd_vo.value_type == VarDataBaseVO.VALUE_TYPE_DENIED) {
bdd_vo.value_type = VarDataBaseVO.VALUE_TYPE_COMPUTED;
let slowvar: SlowVarVO = await ModuleDAO.getInstance().getNamedVoByName<SlowVarVO>(SlowVarVO.API_TYPE_ID, bdd_vo.index);
if (slowvar) {
await ModuleDAO.getInstance().deleteVOs([slowvar]);
}
}
}
await ModuleDAO.getInstance().insertOrUpdateVOs(bdd_vos);
await VarsDatasProxy.getInstance().append_var_datas(bdd_vos);
}
}
}
public async invalidate_cache_exact_and_parents(vos: VarDataBaseVO[]): Promise<boolean> {
return new Promise(async (resolve, reject) => {
if (!await ForkedTasksController.getInstance().exec_self_on_bgthread_and_return_value(
reject,
VarsdatasComputerBGThread.getInstance().name,
ModuleVarServer.TASK_NAME_invalidate_cache_exact_and_parents,
resolve,
vos)) {
return;
}
if ((!vos) || (!vos.length)) {
resolve(true);
return;
}
vos = vos.filter((vo) => {
if (!vo.check_param_is_valid(vo._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
return false;
}
return true;
});
let vos_by_type_id: { [api_type_id: string]: VarDataBaseVO[] } = {};
for (let i in vos) {
let vo = vos[i];
if (!vos_by_type_id[vo._type]) {
vos_by_type_id[vo._type] = [];
}
vos_by_type_id[vo._type].push(vo);
}
let vos_by_index: { [index: string]: VarDataBaseVO } = {};
for (let api_type_id in vos_by_type_id) {
let vos_type = vos_by_type_id[api_type_id];
let bdd_vos: VarDataBaseVO[] = await ModuleDAO.getInstance().getVosByExactMatroids(api_type_id, vos_type, null);
if (bdd_vos && bdd_vos.length) {
for (let j in bdd_vos) {
let bdd_vo = bdd_vos[j];
vos_by_index[bdd_vo.index] = bdd_vo;
}
}
}
await VarsDatasVoUpdateHandler.getInstance().invalidate_datas_and_parents(vos_by_index);
resolve(true);
});
}
public async invalidate_cache_intersection_and_parents(vos: VarDataBaseVO[]): Promise<boolean> {
return new Promise(async (resolve, reject) => {
if (!await ForkedTasksController.getInstance().exec_self_on_bgthread_and_return_value(
reject,
VarsdatasComputerBGThread.getInstance().name,
ModuleVarServer.TASK_NAME_invalidate_cache_intersection_and_parents,
resolve,
vos)) {
return;
}
if ((!vos) || (!vos.length)) {
resolve(true);
return;
}
vos = vos.filter((vo) => {
if (!vo.check_param_is_valid(vo._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
return false;
}
return true;
});
let vos_by_index: { [index: string]: VarDataBaseVO } = {};
for (let i in vos) {
let vo = vos[i];
vos_by_index[vo.index] = vo;
}
// invalidate intersected && parents
await VarsDatasVoUpdateHandler.getInstance().invalidate_datas_and_parents(vos_by_index);
resolve(true);
});
}
/**
* On vide le cache des vars, pas les imports
*/
public async delete_all_cache() {
for (let api_type_id in VarsServerController.getInstance().varcacheconf_by_api_type_ids) {
let moduletable = VOsTypesManager.getInstance().moduleTables_by_voType[api_type_id];
await ModuleDAOServer.getInstance().query('DELETE from ' + moduletable.full_name + ' where value_type = ' + VarDataBaseVO.VALUE_TYPE_COMPUTED + ';');
}
}
public async delete_cache_intersection(vos: VarDataBaseVO[]) {
if ((!vos) || (!vos.length)) {
return;
}
for (let i in vos) {
let vo = vos[i];
if (!vo.check_param_is_valid(vo._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
continue;
}
let moduletable_vardata = VOsTypesManager.getInstance().moduleTables_by_voType[vo._type];
let query: string = ModuleDAOServer.getInstance().getWhereClauseForFilterByMatroidIntersection(vo._type, vo, null);
if (moduletable_vardata.is_segmented) {
let ranges: NumRange[] = ModuleDAOServer.getInstance().get_all_ranges_from_segmented_table(moduletable_vardata);
await RangeHandler.getInstance().foreach_ranges(ranges, async (segment: number) => {
let request: string = 'delete from ' + moduletable_vardata.get_segmented_full_name(segment) + ' t where ' +
query + ' and value_type=' + VarDataBaseVO.VALUE_TYPE_COMPUTED + ';';
await ModuleServiceBase.getInstance().db.query(request);
}, moduletable_vardata.table_segmented_field_segment_type);
} else {
let request: string = 'delete from ' + moduletable_vardata.full_name + ' t where ' +
query + ' and value_type=' + VarDataBaseVO.VALUE_TYPE_COMPUTED + ';';
await ModuleServiceBase.getInstance().db.query(request);
}
}
}
public async delete_cache_and_imports_intersection(vos: VarDataBaseVO[]) {
if ((!vos) || (!vos.length)) {
return;
}
for (let i in vos) {
let vo = vos[i];
if (!vo.check_param_is_valid(vo._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
continue;
}
let moduletable_vardata = VOsTypesManager.getInstance().moduleTables_by_voType[vo._type];
let query: string = ModuleDAOServer.getInstance().getWhereClauseForFilterByMatroidIntersection(vo._type, vo, null);
if (moduletable_vardata.is_segmented) {
let ranges: NumRange[] = ModuleDAOServer.getInstance().get_all_ranges_from_segmented_table(moduletable_vardata);
await RangeHandler.getInstance().foreach_ranges(ranges, async (segment: number) => {
let request: string = 'delete from ' + moduletable_vardata.get_segmented_full_name(segment) + ' t where ' +
query + ';';
await ModuleServiceBase.getInstance().db.query(request);
}, moduletable_vardata.table_segmented_field_segment_type);
} else {
let request: string = 'delete from ' + moduletable_vardata.full_name + ' t where ' +
query + ';';
await ModuleServiceBase.getInstance().db.query(request);
}
}
}
public registerServerApiHandlers() {
// APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_INVALIDATE_MATROID, this.invalidate_matroid.bind(this));
// APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_register_matroid_for_precalc, this.register_matroid_for_precalc.bind(this));
// APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getSimpleVarDataValueSumFilterByMatroids, this.getSimpleVarDataValueSumFilterByMatroids.bind(this));
// APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getSimpleVarDataCachedValueFromParam, this.getSimpleVarDataCachedValueFromParam.bind(this));
// APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_configureVarCache, this.configureVarCache.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_register_params, this.register_params.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_unregister_params, this.unregister_params.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_get_var_id_by_names, this.get_var_id_by_names.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getVarControllerVarsDeps, this.getVarControllerVarsDeps.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getVarControllerDSDeps, this.getVarControllerDSDeps.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getParamDependencies, this.getParamDependencies.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getVarParamDatas, this.getVarParamDatas.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getVarParamFromContextFilters, this.getVarParamFromContextFilters.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_getAggregatedVarDatas, this.getAggregatedVarDatas.bind(this));
// APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_invalidate_cache_intersection, this.invalidate_cache_intersection.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_delete_cache_intersection, this.delete_cache_intersection.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_delete_cache_and_imports_intersection, this.delete_cache_and_imports_intersection.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_invalidate_cache_exact, this.invalidate_cache_exact.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_invalidate_cache_exact_and_parents, this.invalidate_cache_exact_and_parents.bind(this));
APIControllerWrapper.getInstance().registerServerApiHandler(ModuleVar.APINAME_invalidate_cache_intersection_and_parents, this.invalidate_cache_intersection_and_parents.bind(this));
}
public registerCrons(): void {
VarCronWorkersHandler.getInstance();
}
/**
* On définit les droits d'accès du module
*/
public async registerAccessPolicies(): Promise<void> {
let group: AccessPolicyGroupVO = new AccessPolicyGroupVO();
group.translatable_name = ModuleVar.POLICY_GROUP;
group = await ModuleAccessPolicyServer.getInstance().registerPolicyGroup(group, new DefaultTranslation({
'fr-fr': 'Variables'
}));
let POLICY_FO_ACCESS: AccessPolicyVO = new AccessPolicyVO();
POLICY_FO_ACCESS.group_id = group.id;
POLICY_FO_ACCESS.default_behaviour = AccessPolicyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED_TO_ALL_BUT_ADMIN;
POLICY_FO_ACCESS.translatable_name = ModuleVar.POLICY_FO_ACCESS;
POLICY_FO_ACCESS = await ModuleAccessPolicyServer.getInstance().registerPolicy(POLICY_FO_ACCESS, new DefaultTranslation({
'fr-fr': 'Accès aux Variables sur le front'
}), await ModulesManagerServer.getInstance().getModuleVOByName(this.name));
let desc_mode_access: AccessPolicyVO = new AccessPolicyVO();
desc_mode_access.group_id = group.id;
desc_mode_access.default_behaviour = AccessPolicyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED_TO_ALL_BUT_ADMIN;
desc_mode_access.translatable_name = ModuleVar.POLICY_DESC_MODE_ACCESS;
desc_mode_access = await ModuleAccessPolicyServer.getInstance().registerPolicy(desc_mode_access, new DefaultTranslation({
'fr-fr': 'Accès au "Mode description"'
}), await ModulesManagerServer.getInstance().getModuleVOByName(this.name));
let bo_access: AccessPolicyVO = new AccessPolicyVO();
bo_access.group_id = group.id;
bo_access.default_behaviour = AccessPolicyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED_TO_ALL_BUT_ADMIN;
bo_access.translatable_name = ModuleVar.POLICY_BO_ACCESS;
bo_access = await ModuleAccessPolicyServer.getInstance().registerPolicy(bo_access, new DefaultTranslation({
'fr-fr': 'Administration des vars'
}), await ModulesManagerServer.getInstance().getModuleVOByName(this.name));
let bo_varconf_access: AccessPolicyVO = new AccessPolicyVO();
bo_varconf_access.group_id = group.id;
bo_varconf_access.default_behaviour = AccessPolicyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED_TO_ALL_BUT_ADMIN;
bo_varconf_access.translatable_name = ModuleVar.POLICY_BO_VARCONF_ACCESS;
bo_varconf_access = await ModuleAccessPolicyServer.getInstance().registerPolicy(bo_varconf_access, new DefaultTranslation({
'fr-fr': 'Configuration des types de vars'
}), await ModulesManagerServer.getInstance().getModuleVOByName(this.name));
let access_dependency: PolicyDependencyVO = new PolicyDependencyVO();
access_dependency.default_behaviour = PolicyDependencyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED;
access_dependency.src_pol_id = bo_varconf_access.id;
access_dependency.depends_on_pol_id = bo_access.id;
access_dependency = await ModuleAccessPolicyServer.getInstance().registerPolicyDependency(access_dependency);
let bo_imported_access: AccessPolicyVO = new AccessPolicyVO();
bo_imported_access.group_id = group.id;
bo_imported_access.default_behaviour = AccessPolicyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED_TO_ALL_BUT_ADMIN;
bo_imported_access.translatable_name = ModuleVar.POLICY_BO_IMPORTED_ACCESS;
bo_imported_access = await ModuleAccessPolicyServer.getInstance().registerPolicy(bo_imported_access, new DefaultTranslation({
'fr-fr': 'Configuration des données importées'
}), await ModulesManagerServer.getInstance().getModuleVOByName(this.name));
access_dependency = new PolicyDependencyVO();
access_dependency.default_behaviour = PolicyDependencyVO.DEFAULT_BEHAVIOUR_ACCESS_DENIED;
access_dependency.src_pol_id = bo_imported_access.id;
access_dependency.depends_on_pol_id = bo_access.id;
access_dependency = await ModuleAccessPolicyServer.getInstance().registerPolicyDependency(access_dependency);
}
/**
* Fonction ayant pour but d'être appelée sur le thread de computation des vars
*/
public async wait_for_computation_hole(): Promise<boolean> {
return new Promise(async (resolve, reject) => {
if (!await ForkedTasksController.getInstance().exec_self_on_main_process_and_return_value(
reject, VarsServerCallBackSubsController.TASK_NAME_get_vars_datas, resolve)) {
return;
}
let start_time = Dates.now();
let real_start_time = start_time;
while (
ObjectHandler.getInstance().hasAtLeastOneAttribute(VarsDatasVoUpdateHandler.getInstance().ordered_vos_cud)
||
ObjectHandler.getInstance().hasAtLeastOneAttribute(await VarsDatasProxy.getInstance().get_vars_to_compute_from_buffer_or_bdd(1, 1, 1, 1))
) {
await ThreadHandler.getInstance().sleep(10000);
let actual_time = Dates.now();
if (actual_time > (start_time + 60)) {
start_time = actual_time;
ConsoleHandler.getInstance().warn('ModuleVarServer:wait_for_computation_hole:Risque de boucle infinie:' + real_start_time + ':' + actual_time);
}
}
resolve(true);
});
}
/**
* Objectif : lancer un comportement dans un trou forcé d'exec des vars
* Fonction ayant pour but d'être appelée sur le thread de computation des vars
* FIXME : POURQUOI ? await ForkedTasksController.getInstance().exec_self_on_main_process_and_return_value(reject, VarsServerCallBackSubsController.TASK_NAME_get_vars_datas, resolve
*/
public async exec_in_computation_hole(cb: () => {}, interval_sleep_ms: number = 10000, timeout_ms: number = 60000): Promise<boolean> {
return new Promise(async (resolve, reject) => {
if (!ForkedTasksController.getInstance().exec_self_on_bgthread_and_return_value(
reject,
VarsdatasComputerBGThread.getInstance().name,
ModuleVarServer.TASK_NAME_exec_in_computation_hole,
resolve,
cb, interval_sleep_ms, timeout_ms)) {
return;
}
// FIXME je vois pas pourquoi
// if (!await ForkedTasksController.getInstance().exec_self_on_main_process_and_return_value(
// reject, VarsServerCallBackSubsController.TASK_NAME_get_vars_datas, resolve)) {
// return;
// }
let start_time = Dates.now();
let real_start_time = start_time;
while (
VarsdatasComputerBGThread.getInstance().semaphore ||
ObjectHandler.getInstance().hasAtLeastOneAttribute(VarsDatasVoUpdateHandler.getInstance().ordered_vos_cud)
||
ObjectHandler.getInstance().hasAtLeastOneAttribute(await VarsDatasProxy.getInstance().get_vars_to_compute_from_buffer_or_bdd(1, 1, 1, 1))
) {
await ThreadHandler.getInstance().sleep(interval_sleep_ms);
let actual_time = Dates.now();
if (actual_time > (start_time + (timeout_ms / 1000))) {
start_time = actual_time;
ConsoleHandler.getInstance().warn('ModuleVarServer:exec_in_computation_hole:Risque de boucle infinie:' + real_start_time + ':' + actual_time);
}
}
VarsdatasComputerBGThread.getInstance().semaphore = true;
try {
await cb();
} catch (err) {
ConsoleHandler.getInstance().error("ModuleVarServer:exec_in_computation_hole:cb:" + err);
}
VarsdatasComputerBGThread.getInstance().semaphore = false;
resolve(true);
});
}
private async onCVarCacheConf(vcc: VarCacheConfVO) {
if (!vcc) {
return;
}
await ForkedTasksController.getInstance().broadexec(ModuleVarServer.TASK_NAME_update_varcacheconf_from_cache, vcc);
}
private async onUVarCacheConf(vo_update_handler: DAOUpdateVOHolder<VarCacheConfVO>) {
await ForkedTasksController.getInstance().broadexec(ModuleVarServer.TASK_NAME_update_varcacheconf_from_cache, vo_update_handler.post_update_vo);
}
private async onCVarConf(vcc: VarConfVO) {
if (!vcc) {
return;
}
await ForkedTasksController.getInstance().broadexec(ModuleVarServer.TASK_NAME_update_varconf_from_cache, vcc);
}
private async onUVarConf(vo_update_handler: DAOUpdateVOHolder<VarConfVO>) {
await ForkedTasksController.getInstance().broadexec(ModuleVarServer.TASK_NAME_update_varconf_from_cache, vo_update_handler.post_update_vo);
}
private update_varcacheconf_from_cache_throttled(vccs: VarCacheConfVO[]) {
for (let i in vccs) {
let vcc = vccs[i];
VarsServerController.getInstance().update_registered_varcacheconf(vcc.id, vcc);
}
}
private delete_varcacheconf_from_cache(vcc: VarCacheConfVO) {
VarsServerController.getInstance().delete_registered_varcacheconf(vcc.id);
}
private update_varconf_from_cache_throttled(vcs: VarConfVO[]) {
for (let i in vcs) {
let vc = vcs[i];
VarsServerController.getInstance().update_registered_varconf(vc.id, vc);
}
}
private delete_varconf_from_cache(vc: VarConfVO) {
VarsServerController.getInstance().delete_registered_varconf(vc.id);
}
private async onPostDVarConf(vc: VarConfVO) {
if (!vc) {
return;
}
await ForkedTasksController.getInstance().broadexec(ModuleVarServer.TASK_NAME_delete_varconf_from_cache, vc);
}
private async onPostDVarCacheConf(vcc: VarCacheConfVO) {
if (!vcc) {
return;
}
await ForkedTasksController.getInstance().broadexec(ModuleVarServer.TASK_NAME_delete_varcacheconf_from_cache, vcc);
}
private async get_var_id_by_names(): Promise<VarConfIds> {
let res: VarConfIds = new VarConfIds();
let var_confs: VarConfVO[] = await ModuleDAO.getInstance().getVos<VarConfVO>(VarConfVO.API_TYPE_ID);
res.var_id_by_names = {};
for (let i in var_confs) {
let var_conf = var_confs[i];
res.var_id_by_names[var_conf.name] = var_conf.id;
}
return res;
}
/**
* Fonction qui demande l'abonnement d'un socket (celui par lequel arrive la demande) sur la mise à jour des
* valeurs des vardatas correspondants aux params. Et si on a déjà une valeur à fournir, alors on l'envoie directement
* @param api_param
*/
private async register_params(params: VarDataBaseVO[]): Promise<void> {
if (!params) {
return;
}
/**
* On commence par refuser les params mal construits (champs null)
*/
params = this.filter_null_fields_params(params);
let uid = StackContext.getInstance().get('UID');
let client_tab_id = StackContext.getInstance().get('CLIENT_TAB_ID');
VarsTabsSubsController.getInstance().register_sub(uid, client_tab_id, params ? params.map((param) => param.index) : []);
/**
* Si on trouve des datas existantes et valides en base, on les envoie, sinon on indique qu'on attend ces valeurs
*/
let notifyable_vars: VarDataBaseVO[] = [];
let needs_computation: VarDataBaseVO[] = [];
await VarsDatasProxy.getInstance().get_var_datas_or_ask_to_bgthread(params, notifyable_vars, needs_computation);
if (notifyable_vars && notifyable_vars.length) {
let vars_to_notif: VarDataValueResVO[] = [];
notifyable_vars.forEach((notifyable_var) => vars_to_notif.push(new VarDataValueResVO().set_from_vardata(notifyable_var)));
await PushDataServerController.getInstance().notifyVarsDatas(uid, client_tab_id, vars_to_notif);
}
// let promises = [];
// let vars_to_notif: VarDataValueResVO[] = [];
// let needs_var_computation: boolean = false;
// for (let i in params) {
// let param = params[i];
// if (!param.check_param_is_valid(param._type)) {
// ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
// continue;
// }
// // TODO FIXME promises.length
// if (promises.length >= 10) {
// await Promise.all(promises);
// promises = [];
// }
// promises.push((async () => {
// let in_db_data: VarDataBaseVO = await ModuleVarServer.getInstance().get_var_data_or_ask_to_bgthread(param);
// if (!in_db_data) {
// needs_var_computation = true;
// return;
// }
// vars_to_notif.push(new VarDataValueResVO().set_from_vardata(in_db_data));
// })());
// }
// await Promise.all(promises);
// if (vars_to_notif && vars_to_notif.length) {
// await PushDataServerController.getInstance().notifyVarsDatas(uid, client_tab_id, vars_to_notif);
// }
}
private filter_null_fields_params(params: VarDataBaseVO[]): VarDataBaseVO[] {
let res: VarDataBaseVO[] = [];
for (let i in params) {
let param = params[i];
if (!param) {
continue;
}
let matroid_fields = MatroidController.getInstance().getMatroidFields(param._type);
if (!matroid_fields) {
continue;
}
let filter = false;
for (let j in matroid_fields) {
let matroid_field = matroid_fields[j];
if ((!param[matroid_field.field_id]) || (!(param[matroid_field.field_id] as IRange[]).length) ||
((param[matroid_field.field_id] as IRange[]).indexOf(null) >= 0)) {
filter = true;
ConsoleHandler.getInstance().error("Registered wrong Matroid:" + JSON.stringify(param) + ':refused');
break;
}
}
if (filter) {
continue;
}
res.push(param);
}
return res;
}
/**
* Fonction qui demande la suppression de l'abonnement d'un socket (celui par lequel arrive la demande) sur la mise à jour des
* valeurs des vardatas correspondants aux params. Donc on les supprime de l'abonnement et c'est tout
* @param api_param
*/
private async unregister_params(params: VarDataBaseVO[]): Promise<void> {
if (!params) {
return;
}
let uid = StackContext.getInstance().get('UID');
let client_tab_id = StackContext.getInstance().get('CLIENT_TAB_ID');
VarsTabsSubsController.getInstance().unregister_sub(uid, client_tab_id, params.map((param) => param.check_param_is_valid(param._type) ? param.index : null));
}
private async getVarControllerDSDeps(text: string): Promise<string[]> {
if ((!text) || (!VarsController.getInstance().var_conf_by_name[text])) {
return null;
}
let var_controller = VarsServerController.getInstance().registered_vars_controller_[text];
let res: string[] = [];
let deps: DataSourceControllerBase[] = var_controller.getDataSourcesDependencies();
for (let i in deps) {
res.push(deps[i].name);
}
return res;
}
private async getVarControllerVarsDeps(text: string): Promise<{ [dep_name: string]: string }> {
if ((!text) || (!VarsController.getInstance().var_conf_by_name[text])) {
return null;
}
let var_controller = VarsServerController.getInstance().registered_vars_controller_[text];
let res: { [dep_name: string]: string } = {};
let deps: { [dep_name: string]: VarServerControllerBase<any> } = var_controller.getVarControllerDependencies();
for (let i in deps) {
res[i] = deps[i].varConf.name;
}
return res;
}
private async getParamDependencies(param: VarDataBaseVO): Promise<{ [dep_id: string]: VarDataBaseVO }> {
if (!param) {
return null;
}
if (!param.check_param_is_valid(param._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
return null;
}
let var_controller = VarsServerController.getInstance().registered_vars_controller_[VarsController.getInstance().var_conf_by_id[param.var_id].name];
if (!var_controller) {
return null;
}
let dag: DAG<VarDAGNode> = new DAG();
let varDAGNode: VarDAGNode = VarDAGNode.getInstance(dag, param);
let predeps = var_controller.getDataSourcesPredepsDependencies();
if (predeps) {
for (let i in predeps) {
let predep = predeps[i];
let cache = {};
await predep.get_data(param, cache);
await predep.load_node_data(varDAGNode, cache);
}
}
return var_controller.getParamDependencies(varDAGNode);
}
private async getAggregatedVarDatas(param: VarDataBaseVO): Promise<{ [var_data_index: string]: VarDataBaseVO }> {
let var_dag: DAG<VarDAGNode> = new DAG();
let deployed_vars_datas: { [index: string]: boolean } = {};
let vars_datas: { [index: string]: VarDataBaseVO } = {
[param.index]: param
};
let ds_cache: { [ds_name: string]: { [ds_data_index: string]: any } } = {};
let node = VarDAGNode.getInstance(var_dag, param);
await VarsComputeController.getInstance().deploy_deps(node, deployed_vars_datas, vars_datas, ds_cache);
return node.aggregated_datas;
}
private async getVarParamDatas(param: VarDataBaseVO): Promise<{ [ds_name: string]: string }> {
if (!param) {
return null;
}
/**
* On limite à 10k caractères par ds et si on dépasse on revoie '[... >10k ...]' pour indiquer qu'on
* a filtré et garder un json valide
*/
let value_size_limit: number = 10000;
if (!param.check_param_is_valid(param._type)) {
ConsoleHandler.getInstance().error('Les champs du matroid ne correspondent pas à son typage');
return null;
}
let var_controller = VarsServerController.getInstance().registered_vars_controller_[VarsController.getInstance().var_conf_by_id[param.var_id].name];
if (!var_controller) {
return null;
}
let datasources_values: { [ds_name: string]: any; } = {};
let datasources_deps: DataSourceControllerBase[] = var_controller.getDataSourcesDependencies();
// WARNING on se base sur un fake node par ce que je vois pas comment faire autrement...
let dag: DAG<VarDAGNode> = new DAG();
let varDAGNode: VarDAGNode = VarDAGNode.getInstance(dag, param);
for (let i in datasources_deps) {
let datasource_dep = datasources_deps[i];
let cache = {};
await datasource_dep.get_data(param, cache);
await datasource_dep.load_node_data(varDAGNode, cache);
let data = varDAGNode.datasources[datasource_dep.name];
let data_jsoned: string = null;
try {
data_jsoned = JSON.stringify(data);
} catch (error) {
ConsoleHandler.getInstance().error('getVarParamDatas:failed JSON:' + error);
}
if ((!data_jsoned) || (!data_jsoned.length)) {
continue;
}
if (data_jsoned.length > value_size_limit) {
datasources_values[datasource_dep.name] = "[... >10ko ...]";
} else {
datasources_values[datasource_dep.name] = data_jsoned;
}
}
return datasources_values;
}
private async getVarParamFromContextFilters(
var_name: string,
get_active_field_filters: { [api_type_id: string]: { [field_id: string]: ContextFilterVO } },
custom_filters: { [var_param_field_name: string]: ContextFilterVO },
active_api_type_ids: string[]
): Promise<VarDataBaseVO> {
if (!var_name) {
return null;
}
let var_conf = VarsController.getInstance().var_conf_by_name[var_name];
if (!var_conf) {
return null;
}
let var_param: VarDataBaseVO = VarDataBaseVO.createNew(var_name);
let matroid_fields = MatroidController.getInstance().getMatroidFields(var_conf.var_data_vo_type);
let field_promises: Array<Promise<any>> = [];
let cleaned_active_field_filters = ContextFilterHandler.getInstance().clean_context_filters_for_request(get_active_field_filters);
let refuse_param: boolean = false;
for (let i in matroid_fields) {
let matroid_field_ = matroid_fields[i];
field_promises.push((async (matroid_field) => {
// TODO FIXME les tsranges pour le moment on max_range il faut réfléchir à la meilleure solution pour gérer ces filtrages de dates
switch (matroid_field.field_type) {
case ModuleTableField.FIELD_TYPE_numrange_array:
if (matroid_field.has_relation) {
let context_query: ContextQueryVO = new ContextQueryVO();
context_query.base_api_type_id = matroid_field.manyToOne_target_moduletable.vo_type;
context_query.active_api_type_ids = active_api_type_ids;
context_query.filters = ContextFilterHandler.getInstance().get_filters_from_active_field_filters(cleaned_active_field_filters);
context_query.fields = [
new ContextQueryFieldVO(matroid_field.manyToOne_target_moduletable.vo_type, matroid_field.target_field, 'id')
];
let ids_db: Array<{ id: number }> = await ModuleContextFilterServer.getInstance().select_vos(context_query);
if (!ids_db) {
var_param[matroid_field.field_id] = [RangeHandler.getInstance().getMaxNumRange()];
break;
}
let ids: number[] = [];
ids_db.forEach((id_db) => ids.push(parseInt(id_db.toString())));
var_param[matroid_field.field_id] = RangeHandler.getInstance().get_ids_ranges_from_list(ids);
} else {
var_param[matroid_field.field_id] = [RangeHandler.getInstance().getMaxNumRange()];
}
break;
case ModuleTableField.FIELD_TYPE_hourrange_array:
var_param[matroid_field.field_id] = [RangeHandler.getInstance().getMaxHourRange()];
break;
case ModuleTableField.FIELD_TYPE_tstzrange_array:
if (!!custom_filters[matroid_field.field_id]) {
// Sur ce système on a un problème il faut limiter à tout prix le nombre de possibilités renvoyées.
// on compte en nombre de range et non en cardinal
// et on limite à la limite configurée dans l'application
let limit_nb_range = await ModuleParams.getInstance().getParamValueAsInt(ModuleVarServer.PARAM_NAME_limit_nb_ts_ranges_on_param_by_context_filter, 100);
var_param[matroid_field.field_id] = this.get_ts_ranges_from_custom_filter(custom_filters[matroid_field.field_id], limit_nb_range);
if (!var_param[matroid_field.field_id]) {
refuse_param = true;
return;
}
}
var_param[matroid_field.field_id] = [RangeHandler.getInstance().getMaxTSRange()];
break;
}
})(matroid_field_));
}
await Promise.all(field_promises);
return refuse_param ? null : var_param;
}
private get_ts_ranges_from_custom_filter(custom_filter: ContextFilterVO, limit_nb_range): TSRange[] {
let res: TSRange[] = [];
/**
* On va chercher par type, et on décide d'un ordre de priorité. Le but étant d'être le plus discriminant possible pour éviter de dépasser la limite du nombre de ranges
* Par exemple sur un filtre 2019, 2020 | janvier, février, mars | lundi, jeudi
* si on prend lundi, jeudi en premier, sur un max_range initial, on se retrouve avec une "infinité" de ranges.
* par contre si on commence par limiter à 2019 et 2020 on a 1 range, puis 2 avec le découpage mois, puis ~60 avec les découpages lundi et jeudi donc là ça passe
*/
if (!custom_filter) {
return [RangeHandler.getInstance().getMaxTSRange()];
}
/**
* Si on a pas de filtre année, on peut de toutes façons rien faire
*/
let year = ContextFilterHandler.getInstance().find_context_filter_by_type(custom_filter, ContextFilterVO.TYPE_DATE_YEAR);
if (!year) {
return [RangeHandler.getInstance().getMaxTSRange()];
}
let tsranges = this.get_ts_ranges_from_custom_filter_year(year, limit_nb_range);
if (!tsranges) {
return null;
}
let month = ContextFilterHandler.getInstance().find_context_filter_by_type(custom_filter, ContextFilterVO.TYPE_DATE_MONTH);
if (!!month) {
tsranges = this.get_ts_ranges_from_custom_filter_month(tsranges, month, limit_nb_range);
}
let week = ContextFilterHandler.getInstance().find_context_filter_by_type(custom_filter, ContextFilterVO.TYPE_DATE_WEEK);
if (!!week) {
throw new Error('Not implemented');
// tsranges = this.get_ts_ranges_from_custom_filter_week(tsranges, week, limit_nb_range);
}
let dow = ContextFilterHandler.getInstance().find_context_filter_by_type(custom_filter, ContextFilterVO.TYPE_DATE_DOW);
if (!!dow) {
tsranges = this.get_ts_ranges_from_custom_filter_dow(tsranges, dow, limit_nb_range);
}
let dom = ContextFilterHandler.getInstance().find_context_filter_by_type(custom_filter, ContextFilterVO.TYPE_DATE_DOM);
if (!!dom) {
tsranges = this.get_ts_ranges_from_custom_filter_dom(tsranges, dom, limit_nb_range);
}
return tsranges;
}
private get_ts_ranges_from_custom_filter_dom(tsranges: TSRange[], custom_filter: ContextFilterVO, limit_nb_range): TSRange[] {
let numranges: NumRange[] = null;
if (custom_filter.param_numeric != null) {
numranges = [RangeHandler.getInstance().create_single_elt_NumRange(custom_filter.param_numeric, NumSegment.TYPE_INT)];
}
numranges = numranges ? numranges : custom_filter.param_numranges;
if ((!numranges) || (!numranges.length)) {
return tsranges;
}
if ((RangeHandler.getInstance().getCardinalFromArray(tsranges) * numranges.length) > limit_nb_range) {
return null;
}
let res: TSRange[] = [];
RangeHandler.getInstance().foreach_ranges_sync(tsranges, (day: number) => {
RangeHandler.getInstance().foreach_ranges_sync(numranges, (dom: number) => {
if (dom == Dates.date(day)) {
res.push(RangeHandler.getInstance().create_single_elt_TSRange(day, TimeSegment.TYPE_DAY));
}
});
}, TimeSegment.TYPE_DAY);
if (res && res.length) {
res = RangeHandler.getInstance().getRangesUnion(res);
}
return res;
}
private get_ts_ranges_from_custom_filter_dow(tsranges: TSRange[], custom_filter: ContextFilterVO, limit_nb_range): TSRange[] {
let numranges: NumRange[] = null;
if (custom_filter.param_numeric != null) {
numranges = [RangeHandler.getInstance().create_single_elt_NumRange(custom_filter.param_numeric, NumSegment.TYPE_INT)];
}
numranges = numranges ? numranges : custom_filter.param_numranges;
if ((!numranges) || (!numranges.length)) {
return tsranges;
}
if ((RangeHandler.getInstance().getCardinalFromArray(tsranges) * numranges.length) > limit_nb_range) {
return null;
}
let res: TSRange[] = [];
RangeHandler.getInstance().foreach_ranges_sync(tsranges, (day: number) => {
RangeHandler.getInstance().foreach_ranges_sync(numranges, (dow: number) => {
if (dow == Dates.isoWeekday(day)) {
res.push(RangeHandler.getInstance().create_single_elt_TSRange(day, TimeSegment.TYPE_DAY));
}
});
}, TimeSegment.TYPE_DAY);
if (res && res.length) {
res = RangeHandler.getInstance().getRangesUnion(res);
}
return res;
}
private get_ts_ranges_from_custom_filter_month(tsranges: TSRange[], custom_filter: ContextFilterVO, limit_nb_range): TSRange[] {
let numranges: NumRange[] = null;
if (custom_filter.param_numeric != null) {
numranges = [RangeHandler.getInstance().create_single_elt_NumRange(custom_filter.param_numeric, NumSegment.TYPE_INT)];
}
numranges = numranges ? numranges : custom_filter.param_numranges;
if ((!numranges) || (!numranges.length)) {
return tsranges;
}
if ((RangeHandler.getInstance().getCardinalFromArray(tsranges) * numranges.length) > limit_nb_range) {
return null;
}
let res: TSRange[] = [];
RangeHandler.getInstance().foreach_ranges_sync(tsranges, (year: number) => {
RangeHandler.getInstance().foreach_ranges_sync(numranges, (month_i: number) => {
res.push(RangeHandler.getInstance().create_single_elt_TSRange(Dates.add(year, month_i, TimeSegment.TYPE_MONTH), TimeSegment.TYPE_MONTH));
});
});
if (res && res.length) {
res = RangeHandler.getInstance().getRangesUnion(res);
}
return res;
}
private get_ts_ranges_from_custom_filter_year(custom_filter: ContextFilterVO, limit_nb_range): TSRange[] {
if (custom_filter.param_numeric != null) {
return [RangeHandler.getInstance().create_single_elt_NumRange(custom_filter.param_numeric, NumSegment.TYPE_INT)];
}
if (custom_filter.param_numranges && (custom_filter.param_numranges.length > limit_nb_range)) {
return null;
}
return custom_filter.param_numranges;
}
private async load_slowvars() {
let filter = new ContextFilterVO();
filter.field_id = 'type';
filter.vo_type = SlowVarVO.API_TYPE_ID;
filter.filter_type = ContextFilterVO.TYPE_NUMERIC_EQUALS;
filter.param_numeric = SlowVarVO.TYPE_DENIED;
let query: ContextQueryVO = new ContextQueryVO();
query.base_api_type_id = SlowVarVO.API_TYPE_ID;
query.active_api_type_ids = [SlowVarVO.API_TYPE_ID];
query.filters = [filter];
query.limit = 0;
query.offset = 0;
let items: SlowVarVO[] = await ModuleContextFilter.getInstance().select_vos<SlowVarVO>(query);
VarsDatasProxy.getInstance().denied_slowvars = {};
for (let i in items) {
let item = items[i];
VarsDatasProxy.getInstance().denied_slowvars[item.name] = item;
}
}
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N/13-512+512+512-shuffled-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_third_sixth --eval_function penultimate_sixth_eval |
<gh_stars>1-10
package redis
import (
"errors"
"github.com/NYTimes/video-transcoding-api/db"
"github.com/NYTimes/video-transcoding-api/db/redis/storage"
"github.com/go-redis/redis"
)
const localPresetsSetKey = "localpresets"
func (r *redisRepository) CreateLocalPreset(localPreset *db.LocalPreset) error {
if _, err := r.GetLocalPreset(localPreset.Name); err == nil {
return db.ErrLocalPresetAlreadyExists
}
return r.saveLocalPreset(localPreset)
}
func (r *redisRepository) UpdateLocalPreset(localPreset *db.LocalPreset) error {
if _, err := r.GetLocalPreset(localPreset.Name); err == db.ErrLocalPresetNotFound {
return err
}
return r.saveLocalPreset(localPreset)
}
func (r *redisRepository) saveLocalPreset(localPreset *db.LocalPreset) error {
fields, err := r.storage.FieldMap(localPreset)
if err != nil {
return err
}
if localPreset.Name == "" {
return errors.New("preset name missing")
}
localPresetKey := r.localPresetKey(localPreset.Name)
return r.storage.RedisClient().Watch(func(tx *redis.Tx) error {
err := tx.HMSet(localPresetKey, fields).Err()
if err != nil {
return err
}
return tx.SAdd(localPresetsSetKey, localPreset.Name).Err()
}, localPresetKey)
}
func (r *redisRepository) DeleteLocalPreset(localPreset *db.LocalPreset) error {
err := r.storage.Delete(r.localPresetKey(localPreset.Name))
if err != nil {
if err == storage.ErrNotFound {
return db.ErrLocalPresetNotFound
}
return err
}
r.storage.RedisClient().SRem(localPresetsSetKey, localPreset.Name)
return nil
}
func (r *redisRepository) GetLocalPreset(name string) (*db.LocalPreset, error) {
localPreset := db.LocalPreset{Name: name, Preset: db.Preset{}}
err := r.storage.Load(r.localPresetKey(name), &localPreset)
if err == storage.ErrNotFound {
return nil, db.ErrLocalPresetNotFound
}
return &localPreset, err
}
func (r *redisRepository) localPresetKey(name string) string {
return "localpreset:" + name
}
|
def quick_sort(arr):
if len(arr) < 2:
return arr
else:
pivot = arr[0]
less = [i for i in arr[1:] if i > pivot]
greater = [i for i in arr[1:] if i <= pivot]
return quick_sort(less) + [pivot] + quick_sort(greater)
# Using quicksort to sort array
arr = quick_sort(arr)
# reversing the array
arr.reverse()
print(arr) |
<reponame>thinblock/workers-microservice
import { createQueue, Job } from 'kue';
import * as moment from 'moment';
import { logger } from '../utils/logger';
import { saveLastRun } from '../utils/jobs_service';
import { publishMessage, triggerNotification } from '../utils/helpers';
import { config } from './env';
import { oneLine } from 'common-tags';
const queue = createQueue({ redis: config.db });
queue.process('cron-periodical-worker', function (job, done) {
const jobData = job.data;
evaluateConditions(jobData)
.then(() => {
logger.info(oneLine`
[i] Job (${jobData._id}) with QueueId: ${job.id} finished successfully
`);
done();
})
.catch((e) => {
logger.error(oneLine`
[Err] Job (${jobData._id}) with QueueId: ${job.id} errored
`, e);
done(e);
});
});
async function evaluateConditions(jobData: any) {
const conditions: ITriggerCondition[] = jobData.trigger.conditions;
const actions: any[] = jobData.actions;
const lastRunDate = jobData.last_run && jobData.last_run.date;
let errored = null;
const saveSuccessRunDate = async () => {
try {
await saveLastRun(jobData._id, jobData.timestamp);
} catch (e) {
logger.info(oneLine`
[Err] Error occurred while saving last_run info of Job: ${jobData._id}
`, e);
}
};
try {
// If there is no last run date, then this is the first time they're being run
// Save the last run date and return
if (!lastRunDate) {
logger.info(oneLine`
[i] Didn't find any last_run_date for Job: ${jobData._id}, Saving current date!
`);
await saveSuccessRunDate();
return true;
}
// If there was a last_run_date and its diff with current date is FAR Greater than the
// saved condition (in mins) than save the last_date with current timestamp and return;
const lastDiffFromNowMins = Math.round((moment().diff(moment(lastRunDate)) / 1000) / 60);
const conditionArg = Math.round(
parseFloat(conditions[0] ? <string> conditions[0].argument.value : '0')
);
if (lastDiffFromNowMins > (conditionArg + 1 )) {
logger.info(oneLine`
[i] Found last_run_date FAR Greater than current time for Job: ${jobData._id},
Saving current date!
`);
await saveSuccessRunDate();
return true;
}
// Else, evaluate the conditions
const timeDiff = moment(jobData.timestamp).diff(moment(lastRunDate));
const lastRunDiffInMins = Math.round((timeDiff / 1000) / 60); // miliseconds to secs to mins
const evaluatedConditions = conditions.map((condition) => {
// Cron Periodical time will have only 1 condition and that'll be
// when: 'last_run_date_diff'
// operation: $eq
// argument: {
// type: 'number'
// value: X // value in minutes
// }
logger.info(`jobData.timestamp`);
return lastRunDiffInMins >= condition.argument.value;
});
// If all conditions are true, publish actions events
if (evaluatedConditions.reduce((error, cond) => error && cond, true)) {
logger.info(`[i] All Conditions true for Job: ${jobData._id}`);
logger.info(`[i] Publishing events for ${actions.length} actions with Job: ${jobData._id}`);
try {
triggerNotification({
jobId: jobData._id,
event: `every_${conditionArg}_mins`,
data: {
current_run: jobData.timestamp,
last_run: lastRunDate
}
});
} catch (e) {
logger.error(oneLine`[Error] Error while notifying event for Job: ${jobData._id}`);
}
await saveSuccessRunDate();
await publishActions(actions);
logger.info(oneLine`
[i] Published events for ${actions.length} actions was successfull
with Job: ${jobData._id}
`);
}
} catch (e) {
logger.info(oneLine`
[Err] Error occurred while publishing events for the actions of Job: ${jobData._id}
`, e);
errored = e;
}
if (errored) {
// TODO: if error, add it to failed jobs queue
throw errored;
}
return true;
}
async function publishActions(actions: any[]) {
await Promise.all(actions.map((obj: any) => (
publishMessage(
obj.action.sns_topic_arn,
JSON.stringify({
id: obj.action._id, params: obj.params,
params_schema: obj.action.params_schema
})
)
)));
}
export function enqueueJob(data: any) {
return new Promise<Job>((resolve, reject) => {
const job = queue.create('cron-periodical-worker', data).save((err: Error) => {
if (err) {
return reject(err);
}
return resolve(job);
});
});
}
interface ITriggerCondition {
when: string;
_id: string;
argument: {
type: string;
value: string|number|boolean
};
operation: string;
}
export default queue; |
<reponame>wuximing/dsshop<filename>admin/vue2/element-admin-v3/node_modules/@antv/component/lib/util/event.js<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.propagationDelegate = void 0;
var graph_event_1 = require("@antv/g-base/lib/event/graph-event");
/**
*
* @param group 分组
* @param eventName 事件名
* @param eventObject 事件对象
*/
function propagationDelegate(group, eventName, eventObject) {
var event = new graph_event_1.default(eventName, eventObject);
event.target = group;
event.propagationPath.push(group); // 从当前 group 开始触发 delegation
group.emitDelegation(eventName, event);
var parent = group.getParent();
// 执行冒泡
while (parent) {
// 委托事件要先触发
parent.emitDelegation(eventName, event);
event.propagationPath.push(parent);
parent = parent.getParent();
}
}
exports.propagationDelegate = propagationDelegate;
//# sourceMappingURL=event.js.map |
const Tesseract = require('tesseract.js');
const config = require("../../../config.json");
const Viola = require("../../structures/utils.js");
module.exports = {
config: {
name: 'ocr',
aliases: ['readimage'],
cooldown: 25,
category: 'Util',
usage: 'ocr [image url]',
description: 'Read text in image',
userPerms: [],
clientPerms: []
},
run: async (client, message, args) => {
let ocr = args[0] || message.attachments.first().url
if (!ocr) return;
if (!ocr.endsWith('.png')) return message.channel.send('**~ The image is not a png!**')
message.channel.startTyping();
Tesseract.recognize(
`${ocr}`,
'eng'
).then(({ data: { text } }, err) => {
message.channel.send(text);
message.channel.stopTyping();
})
},
};
|
# Environment Variables
PATH="$PATH:/usr/local/bin"
APP_NAME="Petclinic"
CFN_KEYPAIR="call-ansible.key"
APP_STACK_NAME="Call-$APP_NAME-dev-${BUILD_NUMBER}.key"
CFN_TEMPLATE="./infrastructure/dev-docker-swarm-infrastructure-cfn-template.yml"
AWS_REGION="us-east-1"
export ANSIBLE_PRIVATE_KEY_FILE="${WORKSPACE}/${CFN_KEYPAIR}"
export ANSIBLE_HOST_KEY_CHECKING=False
export APP_STACK_NAME="Call-$APP_NAME-App-${BUILD_NUMBER}"
# Create key pair for `Ansible` using AWS CLI
aws ec2 create-key-pair --region ${AWS_REGION} --key-name ${CFN_KEYPAIR} --query "KeyMaterial" --output text > ${CFN_KEYPAIR}
chmod 400 ${CFN_KEYPAIR}
# Create Docker Swarm infrastructure with AWS Cloudformation using AWS CLI
aws cloudformation create-stack --region ${AWS_REGION} --stack-name ${APP_STACK_NAME} --capabilities CAPABILITY_IAM --template-body file://${CFN_TEMPLATE} --parameters ParameterKey=KeyPairName,ParameterValue=${CFN_KEYPAIR}
# Setup a Docker Swarm environment
# Update dynamic environment
sed -i "s/APP_STACK_NAME/$APP_STACK_NAME/" ./ansible/inventory/dev_stack_dynamic_inventory_aws_ec2.yaml
# Swarm Setup for all nodes (instances)
ansible-playbook -i ./ansible/inventory/dev_stack_dynamic_inventory_aws_ec2.yaml -b ./ansible/playbooks/pb_setup_for_all_docker_swarm_instances.yaml
# Swarm Setup for Grand Master node
ansible-playbook -i ./ansible/inventory/dev_stack_dynamic_inventory_aws_ec2.yaml -b ./ansible/playbooks/pb_initialize_docker_swarm.yaml
# Swarm Setup for Other Managers nodes
ansible-playbook -i ./ansible/inventory/dev_stack_dynamic_inventory_aws_ec2.yaml -b ./ansible/playbooks/pb_join_docker_swarm_managers.yaml
# Swarm Setup for Workers nodes
ansible-playbook -i ./ansible/inventory/dev_stack_dynamic_inventory_aws_ec2.yaml -b ./ansible/playbooks/pb_join_docker_swarm_workers.yaml
# Tear down the Docker Swarm infrastructure using AWS CLI
aws cloudformation delete-stack --region ${AWS_REGION} --stack-name ${AWS_STACK_NAME}
# Delete existing key pair using AWS CLI
aws ec2 delete-key-pair --region ${AWS_REGION} --key-name ${CFN_KEYPAIR}
rm -rf ${CFN_KEYPAIR} |
#!/bin/bash --
MY_DIR=`dirname $0`
source $MY_DIR/../SetEnv
mkdir -p ../data/tmp
bsub -q production-rh6 -o $PL/data/tmp -e $PL/data/tmp -J "spombe[1-6]%10" "sh Load_Chromosome.sh"
|
#!/usr/bin/env python3
from .pages.arduino import ArduinoAvrdudePage, ArduinoResetPage, ArduinoFindSerialPage
from .base import Wizard
class HexUploaderWizardMixin:
def setupPages(self):
self.addPage(ArduinoAvrdudePage(self, mainWindow=self.mainWindow()))
self.addPage(ArduinoResetPage(self, mainWindow=self.mainWindow()))
self.addPage(ArduinoFindSerialPage(self, mainWindow=self.mainWindow()))
super().setupPages()
class HexUploaderWizard(HexUploaderWizardMixin, Wizard):
pass
|
def num_common_letters(string1, string2):
"""
Finds the number of letters that are common to two given strings
Parameters:
string1 (string): first string
string2 (string): second string
Returns:
comm_letter_count (int): Number of letters that are common to two strings
"""
# Initialize the variables
comm_letter_count = 0
# Iterate through the letters in the first string
for c1 in string1:
# Iterate through the letters in the second string
for c2 in string2:
# Check if the letters are the same
if c1 == c2:
comm_letter_count += 1
return comm_letter_count
if __name__ == '__main__':
string1 = 'Hello World'
string2 = 'Goodbye World'
print(num_common_letters(string1, string2)) |
package com.github.barteks2x.wogmodmanager;
import android.os.AsyncTask;
import android.util.Log;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.GridView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.goofans.gootool.addins.Addin;
import com.goofans.gootool.addins.AddinFactory;
import com.goofans.gootool.addins.AddinFormatException;
import com.goofans.gootool.addins.AddinInstaller;
import com.goofans.gootool.model.Configuration;
import com.goofans.gootool.util.ProgressListener;
import com.goofans.gootool.wog.ConfigurationWriterTask;
import com.goofans.gootool.wog.WorldOfGoo;
import com.goofans.gootool.wog.WorldOfGooAndroid;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class GoomodInstaller implements View.OnClickListener {
private WogMmActivity wogMmActivity;
private ProgressBar pb;
private TextView text;
private GridView modsGrid;
public GoomodInstaller(WogMmActivity wogMmActivity, ProgressBar pb, TextView text, GridView modsGrid) {
this.wogMmActivity = wogMmActivity;
this.pb = pb;
this.text = text;
this.modsGrid = modsGrid;
}
@Override
public void onClick(View v) {
new AsyncTask<Void, ProgressData, Void>() {
private Configuration cfg;
@Override
protected void onPreExecute() {
wogMmActivity.disableButtons();
pb.setVisibility(View.VISIBLE);
text.setVisibility(View.VISIBLE);
try {
cfg = WorldOfGoo.getTheInstance().readConfiguration();
} catch (IOException e) {
throw new RuntimeException(e);
}
cfg.setWatermark("Modded using GooMan");
ModListDynamicGridViewAdapter a = (ModListDynamicGridViewAdapter) modsGrid.getAdapter();
List<String> enabled = cfg.getEnabledAddins();
enabled.clear();
for(int i = 0; i < a.getCount(); i++) {
ModListDynamicGridViewAdapter.GoomodEntry entry = (ModListDynamicGridViewAdapter.GoomodEntry) a.getItem(i);
if(entry.isEnabled())
enabled.add(entry.getId());
}
}
@Override
protected void onPostExecute(Void nothing) {
wogMmActivity.enableButtons();
pb.setVisibility(View.INVISIBLE);
text.setText("");
}
@Override
protected Void doInBackground(Void... params) {
ConfigurationWriterTask cwt = new ConfigurationWriterTask(cfg);
cwt.addListener(new ProgressListener() {
String task = "";
@Override
public void beginStep(String taskDescription, boolean progressAvailable) {
publishProgress(new ProgressData(task = taskDescription, progressAvailable ? 0 : 0.5f));
}
@Override
public void progressStep(float percent) {
publishProgress(new ProgressData(task, percent));
}
});
try {
cwt.run();
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
protected void onProgressUpdate(ProgressData... i) {
ProgressData pd = i[i.length-1];
pb.setProgress((int) (pd.progress * 100));
text.setText(pd.name);
}
}.execute();
}
}
|
<gh_stars>1-10
/*
Copyright 2020, Verizon Media
Licensed under the terms of the MIT license. See the LICENSE file in the project root for license terms.
*/
import {PLATFORM} from 'aurelia-pal';
export class PillProperties {
public pillCols = [
{
_class: 'monospaced',
colClass: 't150',
colHeadName: 'name',
colHeadValue: 'Name',
},
{
_class: 'monospaced',
colHeadName: 'value',
colHeadValue: 'Value',
},
{
colHeadName: 'description',
colHeadValue: 'Description',
},
{
_class: 'monospaced',
colClass: 't175',
colHeadName: 'default',
colHeadValue: 'Default',
},
];
public pillProperties = [
{
default: 'var(--c_subOneMain)',
description: 'Set the color',
name: 'color',
value: 'CSS Color',
},
{
description: 'Optional: Set the URL the pill should link to.',
name: 'href',
value: 'URL',
},
{
description: 'Set the icon to be in the pill.',
name: 'icon',
value: 'Any icon',
},
{
description: 'Sets which direction there the spacing shows up.',
name: 'spacing',
value: 'spacingRight | spacingLeft',
},
];
public pillTableCols = [
{
colHeadName: 'name',
colHeadValue: 'Name',
view: PLATFORM.moduleName('resources/components/tables/td-contents/c-td-pill/c-td-pill.html'),
viewModel: PLATFORM.moduleName('resources/components/tables/td-contents/c-td-pill/c-td-pill'),
},
{
colHeadName: 'description',
colHeadValue: 'Description',
view: PLATFORM.moduleName('resources/components/tables/td-contents/c-td-pill/c-td-pill.html'),
viewModel: PLATFORM.moduleName('resources/components/tables/td-contents/c-td-pill/c-td-pill'),
},
];
public pillTableData = [
{
description: 'more text here',
descriptionPill: 'Desc Pill',
name: 'stuff here',
namePill: 'Name Pill',
},
];
public testFunction() {
// eslint-disable-next-line no-alert
window.alert('Clicked');
}
}
|
package core.framework.log.message;
import core.framework.api.json.Property;
/**
* @author neo
*/
public class PerformanceStatMessage {
@Property(name = "total_elapsed")
public Long totalElapsed;
@Property(name = "count")
public Integer count;
@Property(name = "read_entries")
public Integer readEntries;
@Property(name = "write_entries")
public Integer writeEntries;
}
|
from django.urls import path
from .views import HomePageView
urlpatterns = [
path("", HomePageView.as_view(), name="home"),
]
# in views.py
from django.shortcuts import render
from django.views.generic import TemplateView
class HomePageView(TemplateView):
template_name = "home.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["heading"] = "Welcome!"
return context
# in home.html
<h1>{{ heading }}</h1> |
package extension
import (
"bytes"
_ "embed"
"errors"
"fmt"
"io"
"io/fs"
"io/ioutil"
"net/http"
"os"
"os/exec"
"path"
"path/filepath"
"runtime"
"strings"
"sync"
"github.com/cli/cli/v2/api"
"github.com/cli/cli/v2/git"
"github.com/cli/cli/v2/internal/config"
"github.com/cli/cli/v2/internal/ghrepo"
"github.com/cli/cli/v2/pkg/extensions"
"github.com/cli/cli/v2/pkg/findsh"
"github.com/cli/cli/v2/pkg/iostreams"
"github.com/cli/safeexec"
"gopkg.in/yaml.v3"
)
type Manager struct {
dataDir func() string
lookPath func(string) (string, error)
findSh func() (string, error)
newCommand func(string, ...string) *exec.Cmd
platform func() (string, string)
client *http.Client
config config.Config
io *iostreams.IOStreams
}
func NewManager(io *iostreams.IOStreams) *Manager {
return &Manager{
dataDir: config.DataDir,
lookPath: safeexec.LookPath,
findSh: findsh.Find,
newCommand: exec.Command,
platform: func() (string, string) {
ext := ""
if runtime.GOOS == "windows" {
ext = ".exe"
}
return fmt.Sprintf("%s-%s", runtime.GOOS, runtime.GOARCH), ext
},
io: io,
}
}
func (m *Manager) SetConfig(cfg config.Config) {
m.config = cfg
}
func (m *Manager) SetClient(client *http.Client) {
m.client = client
}
func (m *Manager) Dispatch(args []string, stdin io.Reader, stdout, stderr io.Writer) (bool, error) {
if len(args) == 0 {
return false, errors.New("too few arguments in list")
}
var exe string
extName := args[0]
forwardArgs := args[1:]
exts, _ := m.list(false)
var ext Extension
for _, e := range exts {
if e.Name() == extName {
ext = e
exe = ext.Path()
break
}
}
if exe == "" {
return false, nil
}
var externalCmd *exec.Cmd
if ext.IsBinary() || runtime.GOOS != "windows" {
externalCmd = m.newCommand(exe, forwardArgs...)
} else if runtime.GOOS == "windows" {
// Dispatch all extension calls through the `sh` interpreter to support executable files with a
// shebang line on Windows.
shExe, err := m.findSh()
if err != nil {
if errors.Is(err, exec.ErrNotFound) {
return true, errors.New("the `sh.exe` interpreter is required. Please install Git for Windows and try again")
}
return true, err
}
forwardArgs = append([]string{"-c", `command "$@"`, "--", exe}, forwardArgs...)
externalCmd = m.newCommand(shExe, forwardArgs...)
}
externalCmd.Stdin = stdin
externalCmd.Stdout = stdout
externalCmd.Stderr = stderr
return true, externalCmd.Run()
}
func (m *Manager) List(includeMetadata bool) []extensions.Extension {
exts, _ := m.list(includeMetadata)
r := make([]extensions.Extension, len(exts))
for i, v := range exts {
val := v
r[i] = &val
}
return r
}
func (m *Manager) list(includeMetadata bool) ([]Extension, error) {
dir := m.installDir()
entries, err := ioutil.ReadDir(dir)
if err != nil {
return nil, err
}
var results []Extension
for _, f := range entries {
if !strings.HasPrefix(f.Name(), "gh-") {
continue
}
var ext Extension
var err error
if f.IsDir() {
ext, err = m.parseExtensionDir(f)
if err != nil {
return nil, err
}
results = append(results, ext)
} else {
ext, err = m.parseExtensionFile(f)
if err != nil {
return nil, err
}
results = append(results, ext)
}
}
if includeMetadata {
m.populateLatestVersions(results)
}
return results, nil
}
func (m *Manager) parseExtensionFile(fi fs.FileInfo) (Extension, error) {
ext := Extension{isLocal: true}
id := m.installDir()
exePath := filepath.Join(id, fi.Name(), fi.Name())
if !isSymlink(fi.Mode()) {
// if this is a regular file, its contents is the local directory of the extension
p, err := readPathFromFile(filepath.Join(id, fi.Name()))
if err != nil {
return ext, err
}
exePath = filepath.Join(p, fi.Name())
}
ext.path = exePath
return ext, nil
}
func (m *Manager) parseExtensionDir(fi fs.FileInfo) (Extension, error) {
id := m.installDir()
if _, err := os.Stat(filepath.Join(id, fi.Name(), manifestName)); err == nil {
return m.parseBinaryExtensionDir(fi)
}
return m.parseGitExtensionDir(fi)
}
func (m *Manager) parseBinaryExtensionDir(fi fs.FileInfo) (Extension, error) {
id := m.installDir()
exePath := filepath.Join(id, fi.Name(), fi.Name())
ext := Extension{path: exePath, kind: BinaryKind}
manifestPath := filepath.Join(id, fi.Name(), manifestName)
manifest, err := os.ReadFile(manifestPath)
if err != nil {
return ext, fmt.Errorf("could not open %s for reading: %w", manifestPath, err)
}
var bm binManifest
err = yaml.Unmarshal(manifest, &bm)
if err != nil {
return ext, fmt.Errorf("could not parse %s: %w", manifestPath, err)
}
repo := ghrepo.NewWithHost(bm.Owner, bm.Name, bm.Host)
remoteURL := ghrepo.GenerateRepoURL(repo, "")
ext.url = remoteURL
ext.currentVersion = bm.Tag
return ext, nil
}
func (m *Manager) parseGitExtensionDir(fi fs.FileInfo) (Extension, error) {
id := m.installDir()
exePath := filepath.Join(id, fi.Name(), fi.Name())
remoteUrl := m.getRemoteUrl(fi.Name())
currentVersion := m.getCurrentVersion(fi.Name())
return Extension{
path: exePath,
url: remoteUrl,
isLocal: false,
currentVersion: currentVersion,
kind: GitKind,
}, nil
}
// getCurrentVersion determines the current version for non-local git extensions.
func (m *Manager) getCurrentVersion(extension string) string {
gitExe, err := m.lookPath("git")
if err != nil {
return ""
}
dir := m.installDir()
gitDir := "--git-dir=" + filepath.Join(dir, extension, ".git")
cmd := m.newCommand(gitExe, gitDir, "rev-parse", "HEAD")
localSha, err := cmd.Output()
if err != nil {
return ""
}
return string(bytes.TrimSpace(localSha))
}
// getRemoteUrl determines the remote URL for non-local git extensions.
func (m *Manager) getRemoteUrl(extension string) string {
gitExe, err := m.lookPath("git")
if err != nil {
return ""
}
dir := m.installDir()
gitDir := "--git-dir=" + filepath.Join(dir, extension, ".git")
cmd := m.newCommand(gitExe, gitDir, "config", "remote.origin.url")
url, err := cmd.Output()
if err != nil {
return ""
}
return strings.TrimSpace(string(url))
}
func (m *Manager) populateLatestVersions(exts []Extension) {
size := len(exts)
type result struct {
index int
version string
}
ch := make(chan result, size)
var wg sync.WaitGroup
wg.Add(size)
for idx, ext := range exts {
go func(i int, e Extension) {
defer wg.Done()
version, _ := m.getLatestVersion(e)
ch <- result{index: i, version: version}
}(idx, ext)
}
wg.Wait()
close(ch)
for r := range ch {
ext := &exts[r.index]
ext.latestVersion = r.version
}
}
func (m *Manager) getLatestVersion(ext Extension) (string, error) {
if ext.isLocal {
return "", localExtensionUpgradeError
}
if ext.IsBinary() {
repo, err := ghrepo.FromFullName(ext.url)
if err != nil {
return "", err
}
r, err := fetchLatestRelease(m.client, repo)
if err != nil {
return "", err
}
return r.Tag, nil
} else {
gitExe, err := m.lookPath("git")
if err != nil {
return "", err
}
extDir := filepath.Dir(ext.path)
gitDir := "--git-dir=" + filepath.Join(extDir, ".git")
cmd := m.newCommand(gitExe, gitDir, "ls-remote", "origin", "HEAD")
lsRemote, err := cmd.Output()
if err != nil {
return "", err
}
remoteSha := bytes.SplitN(lsRemote, []byte("\t"), 2)[0]
return string(remoteSha), nil
}
}
func (m *Manager) InstallLocal(dir string) error {
name := filepath.Base(dir)
targetLink := filepath.Join(m.installDir(), name)
if err := os.MkdirAll(filepath.Dir(targetLink), 0755); err != nil {
return err
}
return makeSymlink(dir, targetLink)
}
type binManifest struct {
Owner string
Name string
Host string
Tag string
// TODO I may end up not using this; just thinking ahead to local installs
Path string
}
func (m *Manager) Install(repo ghrepo.Interface) error {
isBin, err := isBinExtension(m.client, repo)
if err != nil {
return fmt.Errorf("could not check for binary extension: %w", err)
}
if isBin {
return m.installBin(repo)
}
hs, err := hasScript(m.client, repo)
if err != nil {
return err
}
if !hs {
return errors.New("extension is uninstallable: missing executable")
}
protocol, _ := m.config.Get(repo.RepoHost(), "git_protocol")
return m.installGit(ghrepo.FormatRemoteURL(repo, protocol), m.io.Out, m.io.ErrOut)
}
func (m *Manager) installBin(repo ghrepo.Interface) error {
var r *release
r, err := fetchLatestRelease(m.client, repo)
if err != nil {
return err
}
platform, ext := m.platform()
var asset *releaseAsset
for _, a := range r.Assets {
if strings.HasSuffix(a.Name, platform+ext) {
asset = &a
break
}
}
if asset == nil {
return fmt.Errorf(
"%[1]s unsupported for %[2]s. Open an issue: `gh issue create -R %[3]s/%[1]s -t'Support %[2]s'`",
repo.RepoName(), platform, repo.RepoOwner())
}
name := repo.RepoName()
targetDir := filepath.Join(m.installDir(), name)
// TODO clean this up if function errs?
err = os.MkdirAll(targetDir, 0755)
if err != nil {
return fmt.Errorf("failed to create installation directory: %w", err)
}
binPath := filepath.Join(targetDir, name)
binPath += ext
err = downloadAsset(m.client, *asset, binPath)
if err != nil {
return fmt.Errorf("failed to download asset %s: %w", asset.Name, err)
}
manifest := binManifest{
Name: name,
Owner: repo.RepoOwner(),
Host: repo.RepoHost(),
Path: binPath,
Tag: r.Tag,
}
bs, err := yaml.Marshal(manifest)
if err != nil {
return fmt.Errorf("failed to serialize manifest: %w", err)
}
manifestPath := filepath.Join(targetDir, manifestName)
f, err := os.OpenFile(manifestPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return fmt.Errorf("failed to open manifest for writing: %w", err)
}
defer f.Close()
_, err = f.Write(bs)
if err != nil {
return fmt.Errorf("failed write manifest file: %w", err)
}
return nil
}
func (m *Manager) installGit(cloneURL string, stdout, stderr io.Writer) error {
exe, err := m.lookPath("git")
if err != nil {
return err
}
name := strings.TrimSuffix(path.Base(cloneURL), ".git")
targetDir := filepath.Join(m.installDir(), name)
externalCmd := m.newCommand(exe, "clone", cloneURL, targetDir)
externalCmd.Stdout = stdout
externalCmd.Stderr = stderr
return externalCmd.Run()
}
var localExtensionUpgradeError = errors.New("local extensions can not be upgraded")
var upToDateError = errors.New("already up to date")
var noExtensionsInstalledError = errors.New("no extensions installed")
func (m *Manager) Upgrade(name string, force bool) error {
// Fetch metadata during list only when upgrading all extensions.
// This is a performance improvement so that we don't make a
// bunch of unecessary network requests when trying to upgrade a single extension.
fetchMetadata := name == ""
exts, _ := m.list(fetchMetadata)
if len(exts) == 0 {
return noExtensionsInstalledError
}
if name == "" {
return m.upgradeExtensions(exts, force)
}
for _, f := range exts {
if f.Name() != name {
continue
}
var err error
// For single extensions manually retrieve latest version since we forgo
// doing it during list.
f.latestVersion, err = m.getLatestVersion(f)
if err != nil {
return err
}
return m.upgradeExtension(f, force)
}
return fmt.Errorf("no extension matched %q", name)
}
func (m *Manager) upgradeExtensions(exts []Extension, force bool) error {
var failed bool
for _, f := range exts {
fmt.Fprintf(m.io.Out, "[%s]: ", f.Name())
err := m.upgradeExtension(f, force)
if err != nil {
if !errors.Is(err, localExtensionUpgradeError) &&
!errors.Is(err, upToDateError) {
failed = true
}
fmt.Fprintf(m.io.Out, "%s\n", err)
continue
}
fmt.Fprintf(m.io.Out, "upgrade complete\n")
}
if failed {
return errors.New("some extensions failed to upgrade")
}
return nil
}
func (m *Manager) upgradeExtension(ext Extension, force bool) error {
if ext.isLocal {
return localExtensionUpgradeError
}
if !ext.UpdateAvailable() {
return upToDateError
}
var err error
if ext.IsBinary() {
err = m.upgradeBinExtension(ext)
} else {
// Check if git extension has changed to a binary extension
var isBin bool
repo, repoErr := repoFromPath(filepath.Join(ext.Path(), ".."))
if repoErr == nil {
isBin, _ = isBinExtension(m.client, repo)
}
if isBin {
err = m.Remove(ext.Name())
if err != nil {
return fmt.Errorf("failed to migrate to new precompiled extension format: %w", err)
}
return m.installBin(repo)
}
err = m.upgradeGitExtension(ext, force)
}
return err
}
func (m *Manager) upgradeGitExtension(ext Extension, force bool) error {
exe, err := m.lookPath("git")
if err != nil {
return err
}
dir := filepath.Dir(ext.path)
if force {
if err := m.newCommand(exe, "-C", dir, "fetch", "origin", "HEAD").Run(); err != nil {
return err
}
return m.newCommand(exe, "-C", dir, "reset", "--hard", "origin/HEAD").Run()
}
return m.newCommand(exe, "-C", dir, "pull", "--ff-only").Run()
}
func (m *Manager) upgradeBinExtension(ext Extension) error {
repo, err := ghrepo.FromFullName(ext.url)
if err != nil {
return fmt.Errorf("failed to parse URL %s: %w", ext.url, err)
}
return m.installBin(repo)
}
func (m *Manager) Remove(name string) error {
targetDir := filepath.Join(m.installDir(), "gh-"+name)
if _, err := os.Lstat(targetDir); os.IsNotExist(err) {
return fmt.Errorf("no extension found: %q", targetDir)
}
return os.RemoveAll(targetDir)
}
func (m *Manager) installDir() string {
return filepath.Join(m.dataDir(), "extensions")
}
//go:embed ext_tmpls/goBinMain.go.txt
var mainGoTmpl string
//go:embed ext_tmpls/goBinWorkflow.yml
var goBinWorkflow []byte
//go:embed ext_tmpls/otherBinWorkflow.yml
var otherBinWorkflow []byte
//go:embed ext_tmpls/script.sh
var scriptTmpl string
//go:embed ext_tmpls/buildScript.sh
var buildScript []byte
func (m *Manager) Create(name string, tmplType extensions.ExtTemplateType) error {
exe, err := m.lookPath("git")
if err != nil {
return err
}
if err := m.newCommand(exe, "init", "--quiet", name).Run(); err != nil {
return err
}
if tmplType == extensions.GoBinTemplateType {
return m.goBinScaffolding(exe, name)
} else if tmplType == extensions.OtherBinTemplateType {
return m.otherBinScaffolding(exe, name)
}
script := fmt.Sprintf(scriptTmpl, name)
if err := writeFile(filepath.Join(name, name), []byte(script), 0755); err != nil {
return err
}
return m.newCommand(exe, "-C", name, "add", name, "--chmod=+x").Run()
}
func (m *Manager) otherBinScaffolding(gitExe, name string) error {
if err := writeFile(filepath.Join(name, ".github", "workflows", "release.yml"), otherBinWorkflow, 0644); err != nil {
return err
}
buildScriptPath := filepath.Join("script", "build.sh")
if err := writeFile(filepath.Join(name, buildScriptPath), buildScript, 0755); err != nil {
return err
}
if err := m.newCommand(gitExe, "-C", name, "add", buildScriptPath, "--chmod=+x").Run(); err != nil {
return err
}
return m.newCommand(gitExe, "-C", name, "add", ".").Run()
}
func (m *Manager) goBinScaffolding(gitExe, name string) error {
goExe, err := m.lookPath("go")
if err != nil {
return fmt.Errorf("go is required for creating Go extensions: %w", err)
}
if err := writeFile(filepath.Join(name, ".github", "workflows", "release.yml"), goBinWorkflow, 0644); err != nil {
return err
}
mainGo := fmt.Sprintf(mainGoTmpl, name)
if err := writeFile(filepath.Join(name, "main.go"), []byte(mainGo), 0644); err != nil {
return err
}
host, err := m.config.DefaultHost()
if err != nil {
return err
}
currentUser, err := api.CurrentLoginName(api.NewClientFromHTTP(m.client), host)
if err != nil {
return err
}
goCmds := [][]string{
{"mod", "init", fmt.Sprintf("%s/%s/%s", host, currentUser, name)},
{"mod", "tidy"},
{"build"},
}
ignore := fmt.Sprintf("/%[1]s\n/%[1]s.exe\n", name)
if err := writeFile(filepath.Join(name, ".gitignore"), []byte(ignore), 0644); err != nil {
return err
}
for _, args := range goCmds {
goCmd := m.newCommand(goExe, args...)
goCmd.Dir = name
if err := goCmd.Run(); err != nil {
return fmt.Errorf("failed to set up go module: %w", err)
}
}
return m.newCommand(gitExe, "-C", name, "add", ".").Run()
}
func isSymlink(m os.FileMode) bool {
return m&os.ModeSymlink != 0
}
func writeFile(p string, contents []byte, mode os.FileMode) error {
if dir := filepath.Dir(p); dir != "." {
if err := os.MkdirAll(dir, 0755); err != nil {
return err
}
}
return os.WriteFile(p, contents, mode)
}
// reads the product of makeSymlink on Windows
func readPathFromFile(path string) (string, error) {
f, err := os.Open(path)
if err != nil {
return "", err
}
defer f.Close()
b := make([]byte, 1024)
n, err := f.Read(b)
return strings.TrimSpace(string(b[:n])), err
}
func isBinExtension(client *http.Client, repo ghrepo.Interface) (isBin bool, err error) {
var r *release
r, err = fetchLatestRelease(client, repo)
if err != nil {
httpErr, ok := err.(api.HTTPError)
if ok && httpErr.StatusCode == 404 {
err = nil
return
}
return
}
for _, a := range r.Assets {
dists := possibleDists()
for _, d := range dists {
suffix := d
if strings.HasPrefix(d, "windows") {
suffix += ".exe"
}
if strings.HasSuffix(a.Name, suffix) {
isBin = true
break
}
}
}
return
}
func repoFromPath(path string) (ghrepo.Interface, error) {
remotes, err := git.RemotesForPath(path)
if err != nil {
return nil, err
}
if len(remotes) == 0 {
return nil, fmt.Errorf("no remotes configured for %s", path)
}
var remote *git.Remote
for _, r := range remotes {
if r.Name == "origin" {
remote = r
break
}
}
if remote == nil {
remote = remotes[0]
}
return ghrepo.FromURL(remote.FetchURL)
}
func possibleDists() []string {
return []string{
"aix-ppc64",
"android-386",
"android-amd64",
"android-arm",
"android-arm64",
"darwin-amd64",
"darwin-arm64",
"dragonfly-amd64",
"freebsd-386",
"freebsd-amd64",
"freebsd-arm",
"freebsd-arm64",
"illumos-amd64",
"ios-amd64",
"ios-arm64",
"js-wasm",
"linux-386",
"linux-amd64",
"linux-arm",
"linux-arm64",
"linux-mips",
"linux-mips64",
"linux-mips64le",
"linux-mipsle",
"linux-ppc64",
"linux-ppc64le",
"linux-riscv64",
"linux-s390x",
"netbsd-386",
"netbsd-amd64",
"netbsd-arm",
"netbsd-arm64",
"openbsd-386",
"openbsd-amd64",
"openbsd-arm",
"openbsd-arm64",
"openbsd-mips64",
"plan9-386",
"plan9-amd64",
"plan9-arm",
"solaris-amd64",
"windows-386",
"windows-amd64",
"windows-arm",
}
}
|
<filename>spec/models/monthly_service_metrics_spec.rb<gh_stars>1-10
require 'rails_helper'
RSpec.describe MonthlyServiceMetrics, type: :model do
describe '.between' do
it 'returns the metrics between the start and end months, inclusive' do
service = FactoryGirl.create(:service)
FactoryGirl.create(:monthly_service_metrics, service: service, month: YearMonth.new(2017, 4))
metrics1 = FactoryGirl.create(:monthly_service_metrics, service: service, month: YearMonth.new(2017, 5))
metrics2 = FactoryGirl.create(:monthly_service_metrics, service: service, month: YearMonth.new(2017, 6))
FactoryGirl.create(:monthly_service_metrics, service: service, month: YearMonth.new(2017, 7))
may = YearMonth.new(2017, 5)
june = YearMonth.new(2017, 6)
expect(MonthlyServiceMetrics.between(may, june)).to match_array([metrics1, metrics2])
end
end
describe '#publish_date' do
it "returns nil if there's no month" do
metrics = FactoryGirl.build(:monthly_service_metrics, month: nil)
expect(metrics.publish_date).to be_nil
end
it 'returns the first of the month, 2 months from the given month' do
metrics = FactoryGirl.build(:monthly_service_metrics, month: YearMonth.new(2017, 11))
expect(metrics.publish_date).to eq(Date.new(2018, 1, 1))
end
end
describe '#next_metrics_due_date' do
it "returns nil if there's no month" do
metrics = FactoryGirl.build(:monthly_service_metrics, month: nil)
expect(metrics.next_metrics_due_date).to be_nil
end
it 'returns the first of the month, 2 months from the given month' do
metrics = FactoryGirl.build(:monthly_service_metrics, month: YearMonth.new(2017, 11))
expect(metrics.next_metrics_due_date).to eq(Date.new(2018, 1, 1))
end
end
end
|
<reponame>altcatalin/canvas-td
### import ###
from __future__ import division
from math import *
import ctypes
### variables ###
pi2 = pi * 2
### methods ###
def inRadius(target, obj, rad):
return (obj.x - target.x)*(obj.x - target.x) + (obj.y - target.y)*(obj.y - target.y) < rad*rad
def move(obj, target, speed):
distx = target.x - obj.x
disty = target.y - obj.y
angle = atan2(disty, distx)
obj.x += speed * cos(angle)
obj.y += speed * sin(angle)
return abs(distx) + abs(disty) < 2
_randx = ctypes.c_uint32(10240)
_randy = ctypes.c_uint32(12345)
_randz = ctypes.c_uint32(67890)
_randw = ctypes.c_uint32(32768)
_randd = 2 ** 32
def rand(n):
global _randx, _randy, _randz, _randw, _randd
t = ctypes.c_uint32(_randx.value ^ (_randx.value << 15))
_randx = _randy
_randy = _randz
_randz = _randw
_randw = ctypes.c_uint32((_randw.value ^ (_randw.value >> 21)) ^ (t.value ^ (t.value >> 4)))
return floor((_randw / _randd) * (n + 1)) |
<reponame>ndinakar/Phase4-SCSB-Gateway
package org.recap.controller;
import java.util.Date;
import org.recap.PropertyKeyConstants;
import org.recap.ScsbCommonConstants;
import org.recap.model.ScheduleJobRequest;
import org.recap.model.ScheduleJobResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by rajeshbabuk on 5/4/17.
*/
@RestController
@RequestMapping("/scheduleService")
public class ScheduleJobsController extends AbstractController {
private static final Logger logger = LoggerFactory.getLogger(ScheduleJobsController.class);
@Value("${" + PropertyKeyConstants.SCSB_BATCH_SCHEDULE_URL + "}")
private String scsbScheduleUrl;
/**
* Gets scsb schedule url.
*
* @return the scsb schedule url
*/
public String getScsbScheduleUrl() {
return scsbScheduleUrl;
}
/**
* This method is exposed as scheduler service for other microservices to schedule or reschedule or unschedule a job.
*
* @param scheduleJobRequest the schedule job request
* @return the schedule job response
*/
@PostMapping(value="/scheduleJob")
public ScheduleJobResponse scheduleJob(@RequestBody ScheduleJobRequest scheduleJobRequest) {
ScheduleJobResponse scheduleJobResponse = new ScheduleJobResponse();
try {
HttpEntity<ScheduleJobRequest> httpEntity = new HttpEntity<>(scheduleJobRequest, getRestHeaderService().getHttpHeaders());
ResponseEntity<ScheduleJobResponse> responseEntity = restTemplate.exchange(getScsbScheduleUrl() + ScsbCommonConstants.URL_SCHEDULE_JOBS, HttpMethod.POST, httpEntity, ScheduleJobResponse.class);
scheduleJobResponse = responseEntity.getBody();
} catch (Exception e) {
logger.error(ScsbCommonConstants.LOG_ERROR,e);
scheduleJobResponse.setMessage(e.getMessage());
}
return scheduleJobResponse;
}
@GetMapping(value="/logger-test")
public ScheduleJobResponse customLoggerTest() {
ScheduleJobResponse scheduleJobResponse = new ScheduleJobResponse();
scheduleJobResponse.setMessage("Scheduler job response");
scheduleJobResponse.setNextRunTime(new Date());
try {
logger.info("Inside the customLoggerTest method - ScheduleJobResponse : {}", scheduleJobResponse);
} catch (Exception e) {
logger.error(ScsbCommonConstants.LOG_ERROR,e);
}
return scheduleJobResponse;
}
}
|
/**
* @fileoverview Provide the HurlerTough class.
* @author <EMAIL> (<NAME>)
*/
/**
* Constructor for the Hurler class, baddie who walks around and hurls rocks.
* @constructor
* @extends {ace.BaseClass}
*/
ace.HurlerTough = function(game, room) {
ace.base(this, game, room);
this.name = 'HurlerTough';
};
ace.inherits(ace.HurlerTough, ace.Hurler);
/**
* What happens when this guy is spawned.
* @param {lengine.Runner} The runner object.
*/
ace.HurlerTough.prototype.onSpawn = function(runner) {
this.hitPoints = 2;
this.walkSpeed = 2 + ace.randomInt(2);
} |
#!/bin/bash
set -e
function die {
echo -e "ERROR: $@" 1>&2
exit 1
}
for prog in awk curl xsltproc osm2pgsql; do
which $prog >/dev/null 2>&1 || \
die "Unable to find program $prog - please install it and make sure" \
"it is present in your \$PATH."
done
url=$1
if [[ -z $url ]]; then
die "Usage: test-data-update-osm.sh <URL to element> [database name]\n For example:" \
"test-data-update-osm.sh http://www.openstreetmap.org/node/3958246944"
fi
db=$2
if [[ -z $db ]]; then
db=$MZ_DATABASE
fi
if [[ -z $db ]]; then
die "Usage: test-data-update-osm.sh <URL to element> [database name]\n Unable to" \
"figure out database name. Please either provide it as the second" \
"parameter or as the environment variable \$MZ_DATABASE."
fi
if [[ ! -e test-data-osm-template.xsl ]]; then
die "Could not find file 'test-data-osm-template.xsl', make sure you are running from" \
"the vector-datasource root dir."
fi
if [[ ! -e osm2pgsql.style ]]; then
die "Could not find file 'osm2pgsql.style', make sure you are running" \
"from the vector-datasource root dir."
fi
typ=`echo $url | awk -F / '{print $4;}'`
if [[ $typ != 'node' && $typ != 'way' && $typ != 'relation' ]]; then
die "Could not understand URL as an OSM element type. URLs should look" \
"like this: http://www.openstreetmap.org/node/3958246944"
fi
id=`echo $url | awk -F / '{print $5;}'`
echo $id
if [[ ! $id =~ ^[0-9]+$ ]]; then
die "Could not understand URL as an OSM element ID. URLs should look" \
"like this: http://www.openstreetmap.org/node/3958246944"
fi
api_url="https://www.openstreetmap.org/api/0.6/$typ/$id"
if [[ $typ == 'way' || $typ == 'relation' ]]; then
api_url="$api_url/full"
fi
curl -o update.osm $api_url
xsltproc test-data-osm-template.xsl update.osm > update.osc
osm2pgsql -s -C 1024 -S osm2pgsql.style --hstore-all -d $db -a update.osc
rm -f update.osc update.osm
echo "Done!"
|
import joinCapitalized from './joinCapitalized'
function toFloat (x) {
return parseFloat(x)
}
export default function ({
id,
name: {title, first, last},
location: {coordinates: {latitude, longitude}}
}) {
return {
id,
name: joinCapitalized([first, last]),
coordinates: [longitude, latitude].map(toFloat)
}
}
|
const logger = require('./libs/loggerLibs')
logger.info('node.js info')
logger.error('node.js error')
logger.debug('node.js debug')
|
<reponame>dphochman/presearch
/**
* presearch-git-input.ts - prepare git for presearch.
*/
|
#!/bin/bash
# create multiresolution windows icon
#mainnet
ICON_SRC=../../src/qt/res/icons/spoomy.png
ICON_DST=../../src/qt/res/icons/spoomy.ico
convert ${ICON_SRC} -resize 16x16 spoomy-16.png
convert ${ICON_SRC} -resize 32x32 spoomy-32.png
convert ${ICON_SRC} -resize 48x48 spoomy-48.png
convert spoomy-16.png spoomy-32.png spoomy-48.png ${ICON_DST}
#testnet
ICON_SRC=../../src/qt/res/icons/zumy_testnet.png
ICON_DST=../../src/qt/res/icons/zumy_testnet.ico
convert ${ICON_SRC} -resize 16x16 spoomy-16.png
convert ${ICON_SRC} -resize 32x32 spoomy-32.png
convert ${ICON_SRC} -resize 48x48 spoomy-48.png
convert spoomy-16.png spoomy-32.png spoomy-48.png ${ICON_DST}
rm spoomy-16.png spoomy-32.png spoomy-48.png
|
def get_largest_palindrome_in_range(min, max):
max_palindrome = 0
# Iterate over range
for x in range(min, max+1):
# Check if number is a palindrome
if str(x) == str(x)[::-1]:
# If this palindrome is larger, update the maximum
if x > max_palindrome:
max_palindrome = x
return max_palindrome |
#include "OverlayMount.hpp"
#include <iostream>
using namespace std::string_literals;
int main() {
boost::filesystem::path target = "/root/overlay_test/merged"s;
boost::filesystem::path upper = "/root/overlay_test/upper"s;
std::vector<boost::filesystem::path> lower_vec;
lower_vec.push_back(boost::filesystem::path("/root/overlay_test/lower1"s));
lower_vec.push_back(boost::filesystem::path("/root/overlay_test/lower2"s));
for (auto p : lower_vec) {
std::cout << "lower: " << p << std::endl;
}
std::cout << "upper: " << upper << std::endl;
std::cout << "target: " << target << std::endl;
OverlayMount(target, lower_vec, upper);
return 0;
} |
// Wallace-Sidhree---Futures-Past---Progressive-Rock.jpg
// Wallace-Sidhree---Transformation---Piano-Solo.jpg
// Wallace-Sidhree---Homecoming---Piano-Solo.jpg
// Wallace-Sidhree---Remembrance---Piano-Solo.jpg
// Wallace-Sidhree---Discovery---Piano-Solo.jpg
// Wallace-Sidhree---Resolution---Piano-Solo.jpg
// Wallace-Sidhree---Golden-Days.jpg
// Wallace-Sidhree---Organsm---Hard-Jazz.jpg
import { article, listenToBadges, relatedArtists } from './index';
export const getSongPageDetails = release => {
let songPageDetails = '';
if (release === 'transformation') {
songPageDetails = {
homeUrl: "https://sidhree.com/piano-solo/transformation",
articleTitle: "Transformation, a piano-solo by <NAME>",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Transformation---Piano-Solo---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
} else if (release === 'homecoming') {
songPageDetails = {
homeUrl: "https://sidhree.com/piano-solo/homecoming",
articleTitle: "Homecoming, a piano-solo by <NAME>",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Homecoming---Piano-Solo---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
} else if (release === 'remembrance') {
songPageDetails = {
homeUrl: "https://sidhree.com/piano-solo/remembrance",
articleTitle: "Remembrance, a piano-solo by <NAME>",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Remembrance---Piano-Solo---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
} else if (release === 'discovery') {
songPageDetails = {
homeUrl: "https://sidhree.com/piano-solo/discovery",
articleTitle: "Discovery, a piano-solo by <NAME>",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Discovery---Piano-Solo---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
} else if (release === 'resolution') {
songPageDetails = {
homeUrl: "https://sidhree.com/piano-solo/resolution",
articleTitle: "Resolution, a piano-solo by <NAME>",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Resolution---Piano-Solo---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
} else if (release === 'golden-days') {
songPageDetails = {
homeUrl: "https://sidhree.com/piano-solo/golden-days",
articleTitle: "Golden Days, a piano-solo EP by <NAME>",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Golden-Days---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
} else if (release === 'organsm') {
songPageDetails = {
homeUrl: "https://sidhree.com/hard-jazz/organsm",
articleTitle: "Organsm",
description: "",
image: "/static/img/music/release/Wallace-Sidhree---Organsm---Hard-Jazz---1200.jpg",
imageType: "image/png",
imageWidth: "1200",
imageHeight: "1200",
};
}
return songPageDetails;
};
export const getReleaseDetails = release => {
let releaseDetails = '';
if (release === 'transformation') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '04:54',
image: "/static/img/music/release/Wallace-Sidhree---Transformation---Piano-Solo---640.jpg",
isAlbum: false,
isrc: 'NOW6M1901010',
songName: 'Transformation',
submissionDate: 'July 25, 2019',
releaseDate: 'August 16, 2019',
spotifyAlbum: '2edx3iNIHWxj52aazCU6SR',
spotifyTrack: '2P7f4mz5sdeXjtSWfNiYay',
upc: '0631060268767',
}
} else if (release === 'homecoming') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '03:39',
image: "/static/img/music/release/Wallace-Sidhree---Homecoming---Piano-Solo---640.jpg",
isAlbum: false,
isrc: 'NOW6M1901020',
songName: 'Homecoming',
submissionDate: 'August 17, 2019',
releaseDate: 'September 13, 2019',
spotifyAlbum: '6OngcHCkwHDGCsRCxxPYzG',
spotifyTrack: '58LvenhIyyNiSuDgMpE1Sl',
upc: '0631060500225',
};
} else if (release === 'remembrance') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '04:12',
image: "/static/img/music/release/Wallace-Sidhree---Remembrance---Piano-Solo---640.jpg",
isAlbum: false,
isrc: 'NOW6M1901030',
songName: 'Remembrance',
submissionDate: 'August 27, 2019',
releaseDate: 'October 14, 2019',
spotifyAlbum: '6eqXspnsxeypNAyXKB4tEG',
spotifyTrack: '2CgIfbApZQRcEWcLJwEGiO',
upc: '0631060342764',
};
} else if (release === 'discovery') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '03:08',
image: "/static/img/music/release/Wallace-Sidhree---Discovery---Piano-Solo---640.jpg",
isAlbum: false,
isrc: 'NOW6M1901040',
songName: 'Discovery',
submissionDate: 'October 06, 2019',
releaseDate: 'November 15, 2019',
spotifyAlbum: '7J3NPKHOZPvyLn44ONEUct',
spotifyTrack: '2h3rBOiGpaCog96q1MnDvv',
upc: '0631060572352',
};
} else if (release === 'resolution') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '02:30',
image: "/static/img/music/release/Wallace-Sidhree---Resolution---Piano-Solo---640.jpg",
isAlbum: false,
isrc: 'NOW6M1901050',
songName: 'Resolution',
submissionDate: 'November 15, 2019',
releaseDate: 'December 13, 2019',
spotifyAlbum: '68nRAVTaKPDzKl2Hy6f3WX',
spotifyTrack: '1EWai0RFqbxiN9fEEOjTPz',
upc: '0713929058228',
};
} else if (release === 'golden-days') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '18:23',
image: "/static/img/music/release/Wallace-Sidhree---Golden-Days---640.jpg",
isAlbum: true,
isrc: '',
songName: 'Golden Days',
submissionDate: 'November 15, 2019',
releaseDate: 'December 20, 2019',
spotifyAlbum: '7t0AGGzMjrgDpvgNQUYER4',
spotifyTrack: '',
spotifyAlbumHeight: '235',
upc: '191061543961',
};
} else if (release === 'organsm') {
releaseDetails = {
byMastered: '<NAME>',
byMixed: '<NAME>',
duration: '04:24',
image: "/static/img/music/release/Wallace-Sidhree---Organsm---Hard-Jazz---640.jpg",
isAlbum: true,
isrc: '',
songName: 'Organsm',
submissionDate: 'April 06, 2020',
releaseDate: 'April 17, 2020',
spotifyAlbum: '45B5EHRo8X7RYuRuWUge5z',
spotifyTrack: '',
spotifyAlbumHeight: '173',
upc: '195266724132',
};
}
return releaseDetails;
};
export const getReleaseRelatedArtists = release => {
let releaseRelatedArtists = '';
if (release === 'transformation') {
releaseRelatedArtists = [
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2uFUBdaVGtyMqckSeCl0Qj',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3LtlJprzuq0Ii8p8YFZXai',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/00sazWvoTLOqg5MFwC68Um',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3duTXsC49HoPt4f4EySDKf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0mWi0Jr9Ir8GPGVBpQqI1W',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/359LKbZWLk6u3IpyNE2CQG',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VKfXEWzhUi9siHBDTI02Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VZNmg4vCnew4Pavo8zDdW',
},
];
} else if (release === 'homecoming') {
releaseRelatedArtists = [
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2uFUBdaVGtyMqckSeCl0Qj',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3LtlJprzuq0Ii8p8YFZXai',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/00sazWvoTLOqg5MFwC68Um',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3duTXsC49HoPt4f4EySDKf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0mWi0Jr9Ir8GPGVBpQqI1W',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/359LKbZWLk6u3IpyNE2CQG',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VKfXEWzhUi9siHBDTI02Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VZNmg4vCnew4Pavo8zDdW',
},
];
} else if (release === 'remembrance') {
releaseRelatedArtists = [
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2uFUBdaVGtyMqckSeCl0Qj',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3LtlJprzuq0Ii8p8YFZXai',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/00sazWvoTLOqg5MFwC68Um',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3duTXsC49HoPt4f4EySDKf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0mWi0Jr9Ir8GPGVBpQqI1W',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/359LKbZWLk6u3IpyNE2CQG',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VKfXEWzhUi9siHBDTI02Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VZNmg4vCnew4Pavo8zDdW',
},
];
} else if (release === 'discovery') {
releaseRelatedArtists = [
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2uFUBdaVGtyMqckSeCl0Qj',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3LtlJprzuq0Ii8p8YFZXai',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/00sazWvoTLOqg5MFwC68Um',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3duTXsC49HoPt4f4EySDKf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0mWi0Jr9Ir8GPGVBpQqI1W',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/359LKbZWLk6u3IpyNE2CQG',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VKfXEWzhUi9siHBDTI02Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VZNmg4vCnew4Pavo8zDdW',
},
];
} else if (release === 'resolution') {
releaseRelatedArtists = [
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2uFUBdaVGtyMqckSeCl0Qj',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3LtlJprzuq0Ii8p8YFZXai',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/00sazWvoTLOqg5MFwC68Um',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3duTXsC49HoPt4f4EySDKf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0mWi0Jr9Ir8GPGVBpQqI1W',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/359LKbZWLk6u3IpyNE2CQG',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VKfXEWzhUi9siHBDTI02Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VZNmg4vCnew4Pavo8zDdW',
},
];
} else if (release === 'golden-days') {
releaseRelatedArtists = [
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2uFUBdaVGtyMqckSeCl0Qj',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3LtlJprzuq0Ii8p8YFZXai',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/00sazWvoTLOqg5MFwC68Um',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/3duTXsC49HoPt4f4EySDKf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0mWi0Jr9Ir8GPGVBpQqI1W',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/359LKbZWLk6u3IpyNE2CQG',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VKfXEWzhUi9siHBDTI02Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2VZNmg4vCnew4Pavo8zDdW',
},
];
} else if (release === 'organsm') {
releaseRelatedArtists = [
{
artistName: 'Elephant9',
artistSpotifyUrl: 'https://open.spotify.com/artist/7fX6TkG03KYZv7jAaZKC5v',
},
{
artistName: 'Krokofant',
artistSpotifyUrl: 'https://open.spotify.com/artist/23A1NMMpoNpJkSlq4GwJUy',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/5anYQcPEvvSc4zFfO2ZoOC',
},
{
artistName: 'Niacin',
artistSpotifyUrl: 'https://open.spotify.com/artist/7hQwy8CWLxLlh0pw2tO4YW',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/0GXvIHZC6fUFoclPbf7Lbm',
},
{
artistName: `<NAME>`,
artistSpotifyUrl: 'https://open.spotify.com/artist/23ElmuKLosPuHchIPXuq5G',
},
{
artistName: 'JÜ',
artistSpotifyUrl: 'https://open.spotify.com/artist/0hjeGYbdwj5BaxROxu1OxD',
},
{
artistName: 'Møster',
artistSpotifyUrl: 'https://open.spotify.com/artist/3zvJ3MlnXfDnmw4aIWtBZf',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/2W0JpUXpt6nY3k1gblaUNc',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/33Ynrn4NZsANMBJsFFqB7Y',
},
{
artistName: '<NAME>',
artistSpotifyUrl: 'https://open.spotify.com/artist/68HFSFMCZzyRjkkm9bv5Vt',
},
];
} else {
// Fallback to 'Futures Past'
releaseRelatedArtists = relatedArtists;
}
return releaseRelatedArtists;
};
export const getListenToDetails = release => {
let listenToDetails = [];
if (release === 'transformation') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/transformation-single/1476617288',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/B4bktfsrp24jy7nzdkoihfwltcq',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/track/2P7f4mz5sdeXjtSWfNiYay',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/track/transformation-piano-solo',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/track/114511282',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/watch?v=tH-h5FWJ9sA',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/105891422',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Transformation-Wallace-Sidhr%C3%A9e/dp/B07VXRVCP7/',
service: 'Amazon',
badge: 'amazon',
},
];
} else if (release === 'homecoming') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/homecoming-single/1478447196',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/Bzjsecyz3cwq2wpcw4gsxiblvze',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/track/58LvenhIyyNiSuDgMpE1Sl',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/track/homecoming',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/track/116128713',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/watch?v=y1rSLsPysQw',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/108445422',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Homecoming-Wallace-Sidhr%C3%A9e/dp/B07WP6NKPB/',
service: 'Amazon',
badge: 'amazon',
},
];
} else if (release === 'remembrance') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/remembrance-single/1479919853',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/Bngbi2kvrqnx3munysou5hv7nha',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/track/2CgIfbApZQRcEWcLJwEGiO',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/track/remembrance',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/track/117117525',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/watch?v=Z-cinnTOwTY',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/109896102',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Remembrance-Wallace-Sidhr%C3%A9e/dp/B07XF6SQ3C/',
service: 'Amazon',
badge: 'amazon',
},
];
} else if (release === 'discovery') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/discovery-single/1484066259',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/Bkoaxi2aybkt5z7zkivsmmludai',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/track/2h3rBOiGpaCog96q1MnDvv',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/track/discovery',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/track/120527889',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/watch?v=B0wFqOx-Zs8',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/115457792',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Discovery-Wallace-Sidhr%C3%A9e/dp/B07Z9JJRF7/',
service: 'Amazon',
badge: 'amazon',
},
];
} else if (release === 'resolution') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/resolution-single/1489483812',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/Bhf7wrmoymolxchxmakgr2u526u',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/track/1EWai0RFqbxiN9fEEOjTPz',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/track/resolution',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/track/123840236',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/watch?v=yGq1DcqGLUo',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/120807712',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Resolution-Wallace-Sidhr%C3%A9e/dp/B08232NT5F/',
service: 'Amazon',
badge: 'amazon',
},
];
} else if (release === 'golden-days') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/golden-days-ep/1489691489',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/B2s2sky4opwbmdpjw3yvcxikbfy',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/album/7t0AGGzMjrgDpvgNQUYER4',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/album/golden-days',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/album/124016292',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/playlist?list=OLAK5uy_nKK1WamMHIbwCr4Ir-aLOBYvJWxC6N6KM',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/120988732',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Golden-Days-Wallace-Sidhr%C3%A9e/dp/B0824VRKZK/',
service: 'Amazon',
badge: 'amazon',
},
];
} else if (release === 'organsm') {
listenToDetails = [
{
url: 'https://music.apple.com/no/album/organsm-single/1506627443',
service: 'Apple Music',
badge: 'itunes',
},
{
url: 'https://play.google.com/music/preview/B4f5arv4whdvgkm7fjv3o4u2dh4',
service: 'Google Play Music',
badge: 'google-play-music',
},
{
url: 'https://open.spotify.com/album/45B5EHRo8X7RYuRuWUge5z',
service: 'Spotify',
badge: 'spotify',
},
{
url: 'https://sidhree.bandcamp.com/album/organsm',
service: 'Bandcamp',
badge: 'bandcamp',
},
{
url: 'https://tidal.com/browse/album/136589334',
service: 'Tidal',
badge: 'tidal',
},
{
url: 'https://music.youtube.com/playlist?list=OLAK5uy_lkYVw-HXAZy3Qp05sc7Sjutgh4kYOxpvM',
service: 'Youtube Music',
badge: 'youtube-music',
},
{
url: 'https://www.deezer.com/us/album/140356412',
service: 'Deezer',
badge: 'deezer',
},
{
url: 'https://www.amazon.com/Organsm-Wallace-Sidhr%C3%A9e/dp/B086TWD8J5/',
service: 'Amazon',
badge: 'amazon',
},
];
} else {
// Fallback to 'Futures Past'
listenToDetails = listenToBadges;
}
return listenToDetails;
};
export const getArticleDetails = release => {
let articleDetails = {};
if (release === 'transformation') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— Wallace Sidhrée</p>
`
};
} else if (release === 'homecoming') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— Wallace Sidhrée</p>
`
};
} else if (release === 'remembrance') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— <NAME></p>
`
};
} else if (release === 'discovery') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— <NAME></p>
`
};
} else if (release === 'resolution') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— <NAME></p>
`
};
} else if (release === 'golden-days') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— <NAME></p>
`
};
} else if (release === 'organsm') {
articleDetails = {
articleDate: '',
articleHeading: ``,
articleSubheading: '',
articleBody: `
<p class="gutter-bottom"></p>
<p class="open-sans-light-italic">— <NAME></p>
`
};
} else {
// Fallback to 'Futures Past'
articleDetails = article;
}
return articleDetails;
};
|
// Mem.cpp
// <NAME>, 10th February 1997.
#include <MFHeader.h>
//---------------------------------------------------------------
BOOL SetupMemory(void)
{
return TRUE;
}
//---------------------------------------------------------------
void ResetMemory(void)
{
}
//---------------------------------------------------------------
void *MemAlloc(ULONG size)
{
size = (size+3)&0xfffffffc;
return (void*)malloc(size);
}
//---------------------------------------------------------------
void MemFree(void *mem_ptr)
{
free(mem_ptr);
}
//---------------------------------------------------------------
|
def find_curly_braces_content(input_string: str) -> str:
start_index = -1
end_index = -1
bracket_count = 0
has_find_start_tag = False
has_find_end_tag = False
for index, c in enumerate(input_string):
if c == '{':
if not has_find_start_tag:
start_index = index + 1
bracket_count += 1
has_find_start_tag = True
elif c == '}':
bracket_count -= 1
if has_find_start_tag and bracket_count == 0:
has_find_end_tag = True
end_index = index
break
if has_find_start_tag and has_find_end_tag:
return input_string[start_index:end_index]
else:
return "" |
# import required packages
import pandas as pd
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import LabelEncoder
# read data
data = pd.read_csv(file_name)
# label encode target variable
le = LabelEncoder()
data.target = le.fit_transform(data.target)
# separate labels and features
X = data.drop(columns = "target")
y = data["target"]
# model
knn = KNeighborsClassifier(n_neighbors = 5)
knn.fit(X, y) |
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2017.4.1 (64-bit)
#
# Filename : dds_compiler_0.sh
# Simulator : Cadence Incisive Enterprise Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Mon Oct 07 18:11:05 -0400 2019
# SW Build 2117270 on Tue Jan 30 15:32:00 MST 2018
#
# Copyright 1986-2017 Xilinx, Inc. All Rights Reserved.
#
# usage: dds_compiler_0.sh [-help]
# usage: dds_compiler_0.sh [-lib_map_path]
# usage: dds_compiler_0.sh [-noclean_files]
# usage: dds_compiler_0.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'dds_compiler_0.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Directory path for design sources and include directories (if any) wrt this path
ref_dir="."
# Override directory with 'export_sim_ref_dir' env path value if set in the shell
if [[ (! -z "$export_sim_ref_dir") && ($export_sim_ref_dir != "") ]]; then
ref_dir="$export_sim_ref_dir"
fi
# Set the compiled library directory
ref_lib_dir="."
# Command line options
irun_opts="-64bit -v93 -relax -access +rwc -namemap_mixgen"
# Design libraries
design_libs=(xil_defaultlib xpm xbip_utils_v3_0_8 axi_utils_v2_0_4 xbip_pipe_v3_0_4 xbip_bram18k_v3_0_4 mult_gen_v12_0_13 xbip_dsp48_wrapper_v3_0_4 xbip_dsp48_addsub_v3_0_4 xbip_dsp48_multadd_v3_0_4 dds_compiler_v6_0_15)
# Simulation root library directory
sim_lib_dir="ies_lib"
# Script info
echo -e "dds_compiler_0.sh - Script generated by export_simulation (Vivado v2017.4.1 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
execute
}
# RUN_STEP: <execute>
execute()
{
irun $irun_opts \
-reflib "$ref_lib_dir/unisim:unisim" \
-reflib "$ref_lib_dir/unisims_ver:unisims_ver" \
-reflib "$ref_lib_dir/secureip:secureip" \
-reflib "$ref_lib_dir/unimacro:unimacro" \
-reflib "$ref_lib_dir/unimacro_ver:unimacro_ver" \
-top xil_defaultlib.dds_compiler_0 \
-f run.f \
-top glbl \
glbl.v
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./dds_compiler_0.sh -help\" for more information)\n"
exit 1
else
ref_lib_dir=$2
fi
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Create design library directory paths
create_lib_dir()
{
if [[ -e $sim_lib_dir ]]; then
rm -rf $sim_lib_dir
fi
for (( i=0; i<${#design_libs[*]}; i++ )); do
lib="${design_libs[i]}"
lib_dir="$sim_lib_dir/$lib"
if [[ ! -e $lib_dir ]]; then
mkdir -p $lib_dir
fi
done
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(ncsim.key irun.key irun.log waves.shm irun.history .simvision INCA_libs)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./dds_compiler_0.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: dds_compiler_0.sh [-help]\n\
Usage: dds_compiler_0.sh [-lib_map_path]\n\
Usage: dds_compiler_0.sh [-reset_run]\n\
Usage: dds_compiler_0.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
#!/usr/bin/env bash
#set -x
set -o pipefail
#set -e
if [ $# -ne 1 ]; then
serverfile=servers_none
fi
if [ ! -f $1 ]; then
echo "$1 don't exist"
exit
fi
sh clear-all.sh
sh deploy-all.sh
sh start-all.sh
sleep 10
# Test sl
./bfs_client ls /
# File put
./bfs_client put ./bfs_client /bfs_client
# File put rewrite
./bfs_client put ./bfs_client /bfs_client
# Test mkdir
./bfs_client mkdir /bin
# Test move
./bfs_client mv /bfs_client /bin/bfs_client
echo Test atomic rename
./bfs_client put ./bfs_client /bfs_client
./bfs_client mv /bfs_client /bin/bfs_client
# Test get
./bfs_client get /bin/bfs_client ./binary
diff ./bfs_client ./binary > /dev/null
rm -rf ./binary
# More test for base operations
./bfs_client ls /
./bfs_client mkdir /home/user
./bfs_client touchz /home/user/flag
./bfs_client ls /home/user
# Test rmr
./bfs_client rmr /home/user
./bfs_client ls /home
# Now we can list a nonexistent item
#./bfs_client ls /home/user
# Put & get empty file
touch empty_file1
./bfs_client put ./empty_file1 /ef
./bfs_client get /ef ./empty_file2
diff ./empty_file1 ./empty_file2 > /dev/null
rm -rf empty_file*
# Put more files
for i in `ls ../src/`;
do
if [ -d ../src/$i ]
then
for j in `ls ../src/$i`;
do
./bfs_client put ../src/$i/$j /home/src/$i/$j
done;
else
./bfs_client put ../src/$i /home/src/$i
fi
done;
# Kill chunkserver and test retry
sh stop-server.sh cs0
sh stop-server.sh cs1
sleep 10
./bfs_client get /bin/bfs_client ./binary
rm -rf ./binary
# Nameserver restart
sh stop-nameserver.sh
sh start-nameserver.sh
sleep 10
./bfs_client get /bin/bfs_client ./binary
rm -rf ./binary
echo "Test done!"
|
import { faker } from '@faker-js/faker';
import { Prisma } from '@prisma/client';
import { prisma } from '../../../src/prisma';
const { name, internet } = faker;
const createUsers = async (
instanceRoles: Record<string, { name: string; id: number }>,
): Promise<[number, number[]]> => {
// TODO: add seeding admin
const userData: Prisma.usersCreateInput = {
email: '<EMAIL>',
first_name: name.firstName(),
last_name: name.lastName(),
instance_role: { connect: { id: instanceRoles.administrator.id } },
};
const user = await prisma.users.create({ data: userData });
const othersData: Prisma.usersCreateInput[] = Array.from(
new Array(10),
() => ({
email: internet.email(),
first_name: name.firstName(),
last_name: name.lastName(),
instance_role: { connect: { id: instanceRoles.member.id } },
}),
);
const otherIds = (
await Promise.all(
othersData.map((other) => prisma.users.create({ data: other })),
)
).map((other) => other.id);
return [user.id, otherIds];
};
export default createUsers;
|
SELECT title FROM Articles WHERE title LIKE '%Apple%'; |
package yimei.jss.helper;
import yimei.jss.FJSSMain;
import yimei.jss.jobshop.FlexibleStaticInstance;
import yimei.jss.jobshop.Objective;
import yimei.jss.jobshop.SchedulingSet;
import yimei.jss.rule.AbstractRule;
import yimei.jss.rule.RuleType;
import yimei.jss.rule.workcenter.basic.*;
import yimei.jss.simulation.Simulation;
import yimei.jss.simulation.StaticSimulation;
import java.io.*;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* Have results from the jobs run on the grid. There are 30 result files per
* instance file, and we only need the best fitness from each result file. This
* program should output a csv file containing the best fitness from each of the
* 30 result files for each instance.
*
* We have 30 output files for each instance, and 50 generations per output file,
* plus a 'best individual of run' output.
*
* For each generation/best, we have a fitness and the rule itself.
* We SHOULD be able to derive makespan from the fitness, by calculating the benchmark makespan
* that would have veeb used. We could also parse the rule(s) and re-calculate the makespan
* from this. Once we have results back from the grid, can verify this, but calculating from
* benchmark is definitely simpler.
*
* When looking through grid results, two main scenarios.
* Either we only care about the best makespan for each file, or we also care about the
* best makespan for each generation.
*
* May as well combine the two and store the best makespan from each generation,
* plus the best makespan of any rule. Should store each of these on the same row,
* and have 30 rows.
*
* Created by dyska on 8/07/17.
*/
public class GridResultCleaner {
private static final char DEFAULT_SEPARATOR = ',';
private static final String GRID_PATH = "/Users/dyska/Desktop/Uni/COMP489/GPJSS/grid_results/";
private String dataPath;
private String outPath;
private HashMap<String, Integer> benchmarkMakespans;
private boolean doIncludeGenerations;
private int numPops;
private boolean isStatic;
private AbstractRule routingRule;
public GridResultCleaner(String simulationType, String dirName, int numPops, boolean doIncludeGenerations) {
this.dataPath = GRID_PATH + simulationType + "/raw/" + dirName;
this.outPath = GRID_PATH + simulationType+ "/cleaned/" + dirName;
this.numPops = numPops;
this.doIncludeGenerations = doIncludeGenerations;
if (simulationType.toLowerCase() == "static") {
isStatic = true;
benchmarkMakespans = InitBenchmarkMakespans();
} else {
isStatic = false;
}
}
public GridResultCleaner(String simulationType, String dirName, AbstractRule routingRule, int numPops, boolean doIncludeGenerations) {
this.dataPath = GRID_PATH + simulationType + "/raw/" + dirName;
this.outPath = GRID_PATH + simulationType+ "/cleaned/" + dirName+"/"+routingRule.getName();
this.numPops = numPops;
this.routingRule = routingRule;
this.doIncludeGenerations = doIncludeGenerations;
if (simulationType.toLowerCase() == "static") {
isStatic = true;
benchmarkMakespans = InitBenchmarkMakespans();
} else {
isStatic = false;
}
}
private HashMap<String, Integer> InitBenchmarkMakespans() {
String homePath = "/Users/dyska/Desktop/Uni/COMP489/GPJSS/";
String dataPath = homePath + "data/FJSS/";
List<Objective> objectives = new ArrayList<Objective>();
objectives.add(Objective.MAKESPAN);
List<Integer> replications = new ArrayList<Integer>();
replications.add(new Integer(1));
List<String> fileNames = FJSSMain.getFileNames(new ArrayList(), Paths.get(dataPath), ".fjs");
HashMap<String, Integer> makeSpans = new HashMap<String, Integer>();
for (String fileName: fileNames) {
List<Simulation> simulations = new ArrayList<Simulation>();
FlexibleStaticInstance instance = FlexibleStaticInstance.readFromAbsPath(fileName);
Simulation simulation = new StaticSimulation(null, routingRule, instance);
simulations.add(simulation);
SchedulingSet schedulingSet = new SchedulingSet(simulations, replications, objectives);
int benchmarkMakespan = roundMakespan(schedulingSet.getObjectiveLowerBoundMtx().getData()[0][0]);
fileName = fileName.substring(dataPath.length());
makeSpans.put(fileName, benchmarkMakespan);
}
return makeSpans;
}
public void cleanResults() {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(Paths.get(dataPath))) {
for (Path path: stream) {
if (path.toFile().isDirectory()) {
//don't want .DS_Store files...
if (!path.toString().startsWith(dataPath+"/.")) {
if (routingRule != null) {
if (!path.toString().endsWith(routingRule.getName())) {
continue;
}
}
HashMap<Integer, Double[]> makespans = parseMakespans(path.toString());
if (makespans != null) {
System.out.println("Creating results file for: "+
path.toString().substring(dataPath.length()+1));
createResultFile(path.toString(), makespans);
}
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
public HashMap<Integer, Double[]> parseMakespans(String directoryPath) {
List<String> fileNames = FJSSMain.getFileNames(new ArrayList(), Paths.get(directoryPath), ".stat");
if (fileNames.isEmpty()) {
//must not be a directory for this file
return null;
}
HashMap<Integer, Double[]> makespans = new HashMap<Integer, Double[]>();
//we have a file, and the fitness of all rules evolved from this file is the makespan of that
//rule divided by the benchmark makespan (which is constant)
int benchmarkMakespan = 0;
if (isStatic) {
benchmarkMakespan = roundMakespan(getBenchmarkMakeSpan(directoryPath));
}
//iterating through the output from each different seed value
for (String fileName: fileNames) {
Double[] fitnesses = GetFitnesses(fileName);
String fileNumber = fileName.substring(fileName.indexOf("job")+"job.".length());
int fileNum = Integer.parseInt(fileNumber.substring(0,fileNumber.indexOf(".out.stat")));
if (isStatic) {
Double[] fileMakespans = new Double[fitnesses.length];
for (int i = 0; i < fitnesses.length; ++i) {
fileMakespans[i] = (double) roundMakespan(benchmarkMakespan * fitnesses[i]);
}
makespans.put(fileNum, fileMakespans);
} else {
//just going to record fitnesses
makespans.put(fileNum, fitnesses);
}
}
return makespans;
}
public static int roundMakespan(double makespan) {
//makespans are being calculated by multiplying benchmark by fitness
//should be extremely close to an integer value
int makespanInt = (int) Math.round(makespan);
if (Math.abs(makespanInt - makespan) > 0.0000001) {
//arbitrary value, but should be very very close
System.out.println("Why is the value not an integer?");
return -1;
}
return makespanInt;
}
public double getBenchmarkMakeSpan(String directoryPath) {
String fileName = directoryPath.substring(directoryPath.indexOf("data-FJSS-")+"data-FJSS-".length());
fileName = fileName.replace('-','/');
if (routingRule != null) {
fileName = fileName.substring(0,fileName.length()-routingRule.getName().length()-1);
}
fileName = fileName + ".fjs";
return benchmarkMakespans.getOrDefault(fileName, -1);
}
public Double[] GetFitnesses(String fileName) {
BufferedReader br = null;
List<Double> bestFitnesses = new ArrayList<Double>();
try {
br = new BufferedReader(new FileReader(fileName));
String sCurrentLine;
//may be multiple fitnesses per generation if numpops > 1
Double[] fitnesses = new Double[numPops]; //should be reset every generation
int numFound = 0;
while ((sCurrentLine = br.readLine()) != null) {
if (sCurrentLine.startsWith("Fitness")) {
//line should be in format "Fitness: [0.8386540120793787]"
sCurrentLine = sCurrentLine.substring(sCurrentLine.indexOf("[")+1, sCurrentLine.length()-1);
fitnesses[numFound] = Double.parseDouble(sCurrentLine);
numFound++;
}
if (numFound == numPops) {
//quickly sort the fitnesses - only want lower one (best)
Double best = fitnesses[0];
if (fitnesses.length == 2) {
if (fitnesses[1] < best) {
best = fitnesses[1];
}
}
bestFitnesses.add(best);
//reset
fitnesses = new Double[numPops];
numFound = 0;
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return bestFitnesses.toArray(new Double[0]);
}
public void createResultFile(String directoryPath, HashMap<Integer, Double[]> makespanMap) {
String outputFileName;
if (isStatic) {
outputFileName = directoryPath.substring(dataPath.length()+1+"data-".length())+".csv";
} else {
outputFileName = directoryPath.substring(directoryPath.lastIndexOf("/")+1)+".csv";
}
String CSVFile = outPath + "/"+ outputFileName;
try (FileWriter writer = new FileWriter(CSVFile)) {
//add header first
List<String> headers = new ArrayList<String>();
//expecting the same number of generations for all seeds, so just get any value
Double[] entry = makespanMap.get(makespanMap.keySet().iterator().next());
for (int i = 0; i < entry.length-1; ++i) {
headers.add("Gen"+i);
}
headers.add("Best");
writeLine(writer, headers);
for (Integer i: makespanMap.keySet()) {
List<String> makespanCSV = new ArrayList<String>();
String makeSpansString = "";
Double[] makespans = makespanMap.get(i);
for (Double makespan: makespans) {
makeSpansString += makespan.toString() +",";
}
makespanCSV.add(makeSpansString.substring(0, makeSpansString.length()-1));
writeLine(writer, makespanCSV);
}
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/*
* All the code below this line is not mine, taken from:
* https://www.mkyong.com/java/how-to-export-data-to-csv-file-java/
*/
public static void writeLine(Writer w, List<String> values, char separators, char customQuote) throws IOException {
boolean first = true;
//default customQuote is empty
if (separators == ' ') {
separators = DEFAULT_SEPARATOR;
}
StringBuilder sb = new StringBuilder();
for (String value : values) {
if (!first) {
sb.append(separators);
}
if (customQuote == ' ') {
sb.append(followCSVformat(value));
} else {
sb.append(customQuote).append(followCSVformat(value)).append(customQuote);
}
first = false;
}
sb.append("\n");
w.append(sb.toString());
}
private static String followCSVformat(String value) {
String result = value;
if (result.contains("\"")) {
result = result.replace("\"", "\"\"");
}
return result;
}
public static void writeLine(Writer w, List<String> values) throws IOException {
writeLine(w, values, DEFAULT_SEPARATOR, ' ');
}
public static void main(String args[]) {
AbstractRule routingRule = new SBT(RuleType.ROUTING);
GridResultCleaner grc = new GridResultCleaner("static","simple_routing_rule_tests", routingRule,
1, true);
grc.cleanResults();
}
}
|
require 'yaml'
require_relative 'service_generator'
module Kontena::Cli::Stacks
class ServiceGeneratorV2 < ServiceGenerator
def parse_data(options)
data = super(options)
data['net'] = options['network_mode'] if options['network_mode']
data['log_driver'] = options.dig('logging', 'driver')
data['log_opts'] = options.dig('logging', 'options')
if options['depends_on']
data['links'] ||= []
data['links'] = (data['links'] + parse_links(options['depends_on'])).uniq
end
data
end
def parse_build_options(options)
unless options['build'].is_a?(Hash)
options['build'] = { 'context' => options['build']}
end
options['build']['args'] = parse_build_args(options['build']['args']) if options['build']['args']
options['build']
end
end
end
|
#!/bin/bash
echo "[start.sh] executed"
if [ -z "${PUID}" -o -z "${PGID}" ]; then
exec /bin/bash
else
if [ "${PUID}" -eq 0 -o "${PGID}" -eq 0 ]; then
echo "[start.sh] Nothing to do here." ; exit 0
fi
fi
PGID=${PGID:-5555}
PUID=${PUID:-5555}
echo "PUID=${PUID}"
echo "PGID=${PGID}"
groupmod -o -g "$PGID" coder
usermod -o -u "$PUID" coder
chown -R ${PUID}:${PGID} /home/coder
su - coder -c "ssh-keygen -q -t rsa -b 4096 -f ~/.ssh/id_rsa -C coder[${PUID}-${PGID}]@$(hostname) -N ''"
echo "[start.sh] ssh-key generated."
#-------------------------------------------------------------------------------
exec /usr/sbin/sshd -D
|
<reponame>sula92/layeredpos-mvn-hn-spring
package com.sula.dao.custom.impl;
import com.sula.dao.CrudDAOImpl;
import com.sula.dao.custom.CustomerDAO;
import com.sula.entity.Customer;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Repository;
import java.sql.*;
@Repository
@Qualifier("customerdaoimpl")
public class CustomerDAOImpl extends CrudDAOImpl<Customer, String> implements CustomerDAO {
public CustomerDAOImpl() {
}
@Override
public String getLastCustomerId() throws SQLException {
return (String) session.createNativeQuery("SELECT id FROM Customer ORDER BY id DESC LIMIT 1").uniqueResult();
}
}
|
package ca.uhn.fhir.jpa.model.interceptor.api;
/*-
* #%L
* HAPI FHIR Model
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Value for {@link Hook#value()}
*/
public enum Pointcut {
/**
* This pointcut will be called once when a given interceptor is registered
*/
REGISTERED,
/**
* Invoked whenever a persisted resource has been modified and is being submitted to the
* subscription processing pipeline. This method is called before the resource is placed
* on any queues for processing and executes synchronously during the resource modification
* operation itself, so it should return quickly.
* <p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage - Hooks may modify this parameter. This will affect the checking process.</li>
* </ul>
* </p>
* <p>
* Hooks may return <code>void</code> or may return a <code>boolean</code>. If the method returns
* <code>void</code> or <code>true</code>, processing will continue normally. If the method
* returns <code>false</code>, subscription processing will not proceed for the given resource;
* </p>
*/
SUBSCRIPTION_RESOURCE_MODIFIED("ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
/**
* Invoked any time that a resource is matched by an individual subscription, and
* is about to be queued for delivery.
* <p>
* Hooks may make changes to the delivery payload, or make changes to the
* canonical subscription such as adding headers, modifying the channel
* endpoint, etc.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription</li>
* <li>ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage</li>
* <li>ca.uhn.fhir.jpa.subscription.module.matcher.SubscriptionMatchResult</li>
* </ul>
* <p>
* Hooks may return <code>void</code> or may return a <code>boolean</code>. If the method returns
* <code>void</code> or <code>true</code>, processing will continue normally. If the method
* returns <code>false</code>, delivery will be aborted.
* </p>
*/
SUBSCRIPTION_RESOURCE_MATCHED("ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage", "ca.uhn.fhir.jpa.subscription.module.matcher.SubscriptionMatchResult"),
/**
* Invoked whenever a persisted resource was checked against all active subscriptions, and did not
* match any.
* <p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage - Hooks should not modify this parameter as changes will not have any effect.</li>
* </ul>
* </p>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
SUBSCRIPTION_RESOURCE_DID_NOT_MATCH_ANY_SUBSCRIPTIONS("ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
/**
* Invoked immediately before the delivery of a subscription, and right before any channel-specific
* hooks are invoked (e.g. {@link #SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY}.
* <p>
* Hooks may make changes to the delivery payload, or make changes to the
* canonical subscription such as adding headers, modifying the channel
* endpoint, etc.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription</li>
* <li>ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage</li>
* </ul>
* <p>
* Hooks may return <code>void</code> or may return a <code>boolean</code>. If the method returns
* <code>void</code> or <code>true</code>, processing will continue normally. If the method
* returns <code>false</code>, processing will be aborted.
* </p>
*/
SUBSCRIPTION_BEFORE_DELIVERY("ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
/**
* Invoked immediately after the delivery of a subscription, and right before any channel-specific
* hooks are invoked (e.g. {@link #SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY}.
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription</li>
* <li>ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
SUBSCRIPTION_AFTER_DELIVERY("ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
/**
* Invoked immediately after the attempted delivery of a subscription, if the delivery
* failed.
* <p>
* Hooks may accept the following parameters:
* </p>
* <ul>
* <li>java.lang.Exception - The exception that caused the failure. Note this could be an exception thrown by a SUBSCRIPTION_BEFORE_DELIVERY or SUBSCRIPTION_AFTER_DELIVERY interceptor</li>
* <li>ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage - the message that triggered the exception</li>
* <li>java.lang.Exception</li>
* </ul>
* <p>
* Hooks may return <code>void</code> or may return a <code>boolean</code>. If the method returns
* <code>void</code> or <code>true</code>, processing will continue normally, meaning that
* an exception will be thrown by the delivery mechanism. This typically means that the
* message will be returned to the processing queue. If the method
* returns <code>false</code>, processing will be aborted and no further action will be
* taken for the delivery.
* </p>
*/
SUBSCRIPTION_AFTER_DELIVERY_FAILED("ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage", "java.lang.Exception"),
/**
* Invoked immediately after the delivery of a REST HOOK subscription.
* <p>
* When this hook is called, all processing is complete so this hook should not
* make any changes to the parameters.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription</li>
* <li>ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
SUBSCRIPTION_AFTER_REST_HOOK_DELIVERY("ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
/**
* Invoked immediately before the delivery of a REST HOOK subscription.
* <p>
* Hooks may make changes to the delivery payload, or make changes to the
* canonical subscription such as adding headers, modifying the channel
* endpoint, etc.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription</li>
* <li>ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage</li>
* </ul>
* <p>
* Hooks may return <code>void</code> or may return a <code>boolean</code>. If the method returns
* <code>void</code> or <code>true</code>, processing will continue normally. If the method
* returns <code>false</code>, processing will be aborted.
* </p>
*/
SUBSCRIPTION_BEFORE_REST_HOOK_DELIVERY("ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription", "ca.uhn.fhir.jpa.subscription.module.subscriber.ResourceDeliveryMessage"),
/**
* Invoked whenever a persisted resource (a resource that has just been stored in the
* database via a create/update/patch/etc.) is about to be checked for whether any subscriptions
* were triggered as a result of the operation.
* <p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage - Hooks may modify this parameter. This will affect the checking process.</li>
* </ul>
* </p>
* <p>
* Hooks may return <code>void</code> or may return a <code>boolean</code>. If the method returns
* <code>void</code> or <code>true</code>, processing will continue normally. If the method
* returns <code>false</code>, processing will be aborted.
* </p>
*/
SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED("ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
/**
* Invoked whenever a persisted resource (a resource that has just been stored in the
* database via a create/update/patch/etc.) has been checked for whether any subscriptions
* were triggered as a result of the operation.
* <p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage - This parameter should not be modified as processing is complete when this hook is invoked.</li>
* </ul>
* </p>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
SUBSCRIPTION_AFTER_PERSISTED_RESOURCE_CHECKED("ca.uhn.fhir.jpa.subscription.module.ResourceModifiedMessage"),
/**
* Invoked immediately after an active subscription is "registered". In HAPI FHIR, when
* a subscription
* <p>
* Hooks may make changes to the canonicalized subscription and this will have an effect
* on processing across this server. Note however that timing issues may occur, since the
* subscription is already technically live by the time this hook is called.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
SUBSCRIPTION_AFTER_ACTIVE_SUBSCRIPTION_REGISTERED("ca.uhn.fhir.jpa.subscription.module.CanonicalSubscription"),
/**
* Invoked before a resource will be created, immediately before the resource
* is persisted to the database.
* <p>
* Hooks will have access to the contents of the resource being created
* and may choose to make modifications to it. These changes will be
* reflected in permanent storage.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>org.hl7.fhir.instance.model.api.IBaseResource</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
OP_PRESTORAGE_RESOURCE_CREATED("org.hl7.fhir.instance.model.api.IBaseResource"),
/**
* Invoked before a resource will be created, immediately before the transaction
* is committed (after all validation and other business rules have successfully
* completed, and any other database activity is complete.
* <p>
* Hooks will have access to the contents of the resource being created
* but should generally not make any
* changes as storage has already occurred. Changes will not be reflected
* in storage, but may be reflected in the HTTP response.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>org.hl7.fhir.instance.model.api.IBaseResource</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
OP_PRECOMMIT_RESOURCE_CREATED("org.hl7.fhir.instance.model.api.IBaseResource"),
/**
* Invoked before a resource will be created
* <p>
* Hooks will have access to the contents of the resource being deleted
* but should not make any changes as storage has already occurred
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>org.hl7.fhir.instance.model.api.IBaseResource</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
OP_PRECOMMIT_RESOURCE_DELETED("org.hl7.fhir.instance.model.api.IBaseResource"),
/**
* Invoked before a resource will be updated, immediately before the transaction
* is committed (after all validation and other business rules have successfully
* completed, and any other database activity is complete.
* <p>
* Hooks will have access to the contents of the resource being updated
* (both the previous and new contents) but should generally not make any
* changes as storage has already occurred. Changes will not be reflected
* in storage, but may be reflected in the HTTP response.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>org.hl7.fhir.instance.model.api.IBaseResource (previous contents)</li>
* <li>org.hl7.fhir.instance.model.api.IBaseResource (new contents)</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
OP_PRECOMMIT_RESOURCE_UPDATED("org.hl7.fhir.instance.model.api.IBaseResource", "org.hl7.fhir.instance.model.api.IBaseResource"),
/**
* Invoked before a resource will be updated, immediately before the resource
* is persisted to the database.
* <p>
* Hooks will have access to the contents of the resource being updated
* (both the previous and new contents) and may choose to make modifications
* to the new contents of the resource. These changes will be reflected in
* permanent storage.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>org.hl7.fhir.instance.model.api.IBaseResource (previous contents)</li>
* <li>org.hl7.fhir.instance.model.api.IBaseResource (new contents)</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
OP_PRESTORAGE_RESOURCE_UPDATED("org.hl7.fhir.instance.model.api.IBaseResource", "org.hl7.fhir.instance.model.api.IBaseResource"),
/**
* Invoked when a resource may be returned to the user, whether as a part of a READ,
* a SEARCH, or even as the response to a CREATE/UPDATE, etc.
* <p>
* This hook is invoked when a resource has been loaded by the storage engine and
* is being returned to the HTTP stack for response. This is not a guarantee that the
* client will ultimately see it, since filters/headers/etc may affect what
* is returned but if a resource is loaded it is likely to be used.
* Note also that caching may affect whether this pointcut is invoked.
* </p>
* <p>
* Hooks will have access to the contents of the resource being returned
* and may choose to make modifications. These changes will be reflected in
* returned resource but have no effect on storage.
* </p>
* Hooks may accept the following parameters:
* <ul>
* <li>org.hl7.fhir.instance.model.api.IBaseResource (the resource being returned)</li>
* </ul>
* <p>
* Hooks should return <code>void</code>.
* </p>
*/
RESOURCE_MAY_BE_RETURNED("org.hl7.fhir.instance.model.api.IBaseResource");
private final List<String> myParameterTypes;
Pointcut(String... theParameterTypes) {
myParameterTypes = Collections.unmodifiableList(Arrays.asList(theParameterTypes));
}
public List<String> getParameterTypes() {
return myParameterTypes;
}
}
|
<reponame>uwmisl/purpledrop-driver<filename>jsclient/src/pdsocket.js<gh_stars>0
import {protobuf} from 'protobuf';
// Create a websocket client which will reconnect
export default function PdSocket(callback) {
let eventsocket = null;
let closed = false;
let wrapped_socket = {
close() {
console.log("Disconnecting event socket");
closed = true;
eventsocket.close();
},
};
function create_socket(uri) {
eventsocket = new WebSocket(uri);
eventsocket.onclose = () => {
console.log('WebSocket closed. Will attempt reconnect.');
setTimeout(() => {
if(!closed) {
console.log("Creating new event socket");
create_socket(uri);
}
}, 5000);
};
eventsocket.onerror = (error) => {
console.log('Websocket error: ', error);
};
eventsocket.onmessage = (event) => {
event.data.arrayBuffer().then((buf) => {
let data = new Uint8Array(buf);
let msg = protobuf.PurpleDropEvent.decode(data);
callback(msg);
});
};
}
create_socket(`ws://${location.hostname}:7001`);
return wrapped_socket;
} |
<gh_stars>0
import React from "react";
const SpecialButton = () => {
return (
<>
{/* Display a button element rendering the data being passed down from the parent container on props */}
</>
);
};
|
package br.com.digidev.messenger4j.test.integration.receive;
import static org.hamcrest.Matchers.emptyCollectionOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import br.com.digidev.messenger4j.exceptions.MessengerVerificationException;
import br.com.digidev.messenger4j.MessengerPlatform;
import br.com.digidev.messenger4j.receive.handlers.AccountLinkingEventHandler;
import br.com.digidev.messenger4j.receive.handlers.AttachmentMessageEventHandler;
import br.com.digidev.messenger4j.receive.handlers.TextMessageEventHandler;
import br.com.digidev.messenger4j.receive.MessengerReceiveClient;
import br.com.digidev.messenger4j.receive.MessengerReceiveClientBuilder;
import br.com.digidev.messenger4j.receive.events.AccountLinkingEvent;
import br.com.digidev.messenger4j.receive.events.AttachmentMessageEvent;
import br.com.digidev.messenger4j.receive.events.EchoMessageEvent;
import br.com.digidev.messenger4j.receive.events.FallbackEvent;
import br.com.digidev.messenger4j.receive.events.MessageDeliveredEvent;
import br.com.digidev.messenger4j.receive.events.MessageReadEvent;
import br.com.digidev.messenger4j.receive.events.OptInEvent;
import br.com.digidev.messenger4j.receive.events.PostbackEvent;
import br.com.digidev.messenger4j.receive.events.QuickReplyMessageEvent;
import br.com.digidev.messenger4j.receive.events.TextMessageEvent;
import br.com.digidev.messenger4j.receive.handlers.EchoMessageEventHandler;
import br.com.digidev.messenger4j.receive.handlers.FallbackEventHandler;
import br.com.digidev.messenger4j.receive.handlers.MessageDeliveredEventHandler;
import br.com.digidev.messenger4j.receive.handlers.MessageReadEventHandler;
import br.com.digidev.messenger4j.receive.handlers.OptInEventHandler;
import br.com.digidev.messenger4j.receive.handlers.PostbackEventHandler;
import br.com.digidev.messenger4j.receive.handlers.QuickReplyMessageEventHandler;
import java.util.Date;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
/**
* @author Messenger4J - http://github.com/messenger4j
*/
public class MessengerReceiveClientTest {
private MessengerReceiveClientBuilder builder;
private AttachmentMessageEventHandler mockAttachmentMessageEventHandler = mock(AttachmentMessageEventHandler.class);
private OptInEventHandler mockOptInEventHandler = mock(OptInEventHandler.class);
private EchoMessageEventHandler mockEchoMessageEventHandler = mock(EchoMessageEventHandler.class);
private QuickReplyMessageEventHandler mockQuickReplyMessageEventHandler = mock(QuickReplyMessageEventHandler.class);
private TextMessageEventHandler mockTextMessageEventHandler = mock(TextMessageEventHandler.class);
private PostbackEventHandler mockPostbackEventHandler = mock(PostbackEventHandler.class);
private AccountLinkingEventHandler mockAccountLinkingEventHandler = mock(AccountLinkingEventHandler.class);
private MessageReadEventHandler mockMessageReadEventHandler = mock(MessageReadEventHandler.class);
private MessageDeliveredEventHandler mockMessageDeliveredEventHandler = mock(MessageDeliveredEventHandler.class);
private FallbackEventHandler mockFallbackEventHandler = mock(FallbackEventHandler.class);
@Before
public void beforeEach() {
builder = MessengerPlatform.newReceiveClientBuilder("60efff025951cddde78c8d03de52cc90", "CUSTOM_VERIFY_TOKEN")
.onAttachmentMessageEvent(mockAttachmentMessageEventHandler)
.onOptInEvent(mockOptInEventHandler)
.onEchoMessageEvent(mockEchoMessageEventHandler)
.onQuickReplyMessageEvent(mockQuickReplyMessageEventHandler)
.onTextMessageEvent(mockTextMessageEventHandler)
.onPostbackEvent(mockPostbackEventHandler)
.onAccountLinkingEvent(mockAccountLinkingEventHandler)
.onMessageReadEvent(mockMessageReadEventHandler)
.onMessageDeliveredEvent(mockMessageDeliveredEventHandler)
.fallbackEventHandler(mockFallbackEventHandler);
}
@Test(expected = IllegalArgumentException.class)
public void shouldThrowExceptionIfObjectTypeIsNotPage() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"testValue\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"message\": {\n" +
" \"mid\": \"mid.1457764197618:41d102a3e1ae206a38\",\n" +
" \"text\": \"hello, world!\",\n" +
" \"quick_reply\": {\n" +
" \"payload\": \"DEVELOPER_DEFINED_PAYLOAD\"\n" +
" }\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then - throw exception
}
@Test
public void shouldHandleAttachmentMessageEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"message\": {\n" +
" \"mid\": \"mid.1458696618141:b4ef9d19ec21086067\",\n" +
" \"attachments\": [{\n" +
" \"type\": \"image\",\n" +
" \"payload\": {\n" +
" \"url\": \"IMAGE_URL\"\n" +
" }\n" +
" }, {\n" +
" \"type\": \"location\",\n" +
" \"payload\": {\n" +
" \"coordinates\": {\n" +
" \"lat\": 52.3765533,\n" +
" \"long\": 9.7389123\n" +
" }\n" +
" }\n" +
" }]\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<AttachmentMessageEvent> argument = ArgumentCaptor.forClass(AttachmentMessageEvent.class);
verify(mockAttachmentMessageEventHandler).handle(argument.capture());
final AttachmentMessageEvent attachmentMessageEvent = argument.getValue();
assertThat(attachmentMessageEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(attachmentMessageEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(attachmentMessageEvent.getTimestamp(), equalTo(new Date(1458692752478L)));
assertThat(attachmentMessageEvent.getMid(), equalTo("mid.1458696618141:b4ef9d19ec21086067"));
assertThat(attachmentMessageEvent.getAttachments(), hasSize(2));
final AttachmentMessageEvent.Attachment firstAttachment = attachmentMessageEvent.getAttachments().get(0);
assertThat(firstAttachment.getType(), equalTo(AttachmentMessageEvent.AttachmentType.IMAGE));
assertThat(firstAttachment.getPayload().asBinaryPayload().getUrl(), equalTo("IMAGE_URL"));
final AttachmentMessageEvent.Attachment secondAttachment = attachmentMessageEvent.getAttachments().get(1);
assertThat(secondAttachment.getType(), equalTo(AttachmentMessageEvent.AttachmentType.LOCATION));
assertThat(secondAttachment.getPayload().asLocationPayload().getCoordinates().getLatitude(),
equalTo(52.3765533));
assertThat(secondAttachment.getPayload().asLocationPayload().getCoordinates().getLongitude(),
equalTo(9.7389123));
verifyZeroInteractions(mockOptInEventHandler, mockEchoMessageEventHandler,
mockQuickReplyMessageEventHandler, mockTextMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleUnsupportedPayloadAttachmentMessageEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"message\": {\n" +
" \"mid\": \"mid.1458696618141:b4ef9d19ec21086067\",\n" +
" \"attachments\": [{\n" +
" \"type\": \"image\",\n" +
" \"payload\": {\n" +
" \"UNSUPPORTED_PAYLOAD_TYPE\": \"SOME_DATA\"\n" +
" }\n" +
" }, {\n" +
" \"type\": \"location\",\n" +
" \"payload\": {\n" +
" \"coordinates\": {\n" +
" \"lat\": 52.3765533,\n" +
" \"long\": 9.7389123\n" +
" }\n" +
" }\n" +
" }]\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<AttachmentMessageEvent> argument = ArgumentCaptor.forClass(AttachmentMessageEvent.class);
verify(mockAttachmentMessageEventHandler).handle(argument.capture());
final AttachmentMessageEvent attachmentMessageEvent = argument.getValue();
assertThat(attachmentMessageEvent.getAttachments(), hasSize(2));
final AttachmentMessageEvent.Attachment firstAttachment = attachmentMessageEvent.getAttachments().get(0);
assertThat(firstAttachment.getPayload().isUnsupportedPayload(), is(true));
verifyZeroInteractions(mockOptInEventHandler, mockEchoMessageEventHandler,
mockQuickReplyMessageEventHandler, mockTextMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleOptInEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1234567890,\n" +
" \"optin\": {\n" +
" \"ref\": \"PASS_THROUGH_PARAM\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<OptInEvent> argument = ArgumentCaptor.forClass(OptInEvent.class);
verify(mockOptInEventHandler).handle(argument.capture());
final OptInEvent optInEvent = argument.getValue();
assertThat(optInEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(optInEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(optInEvent.getTimestamp(), equalTo(new Date(1234567890L)));
assertThat(optInEvent.getRef(), equalTo("PASS_THROUGH_PARAM"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockEchoMessageEventHandler,
mockQuickReplyMessageEventHandler, mockTextMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleTextEchoMessageEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1480114700424,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"timestamp\": 1480114700296,\n" +
" \"message\": {\n" +
" \"is_echo\": true,\n" +
" \"app_id\": 1517776481860111,\n" +
" \"metadata\": \"DEVELOPER_DEFINED_METADATA_STRING\",\n" +
" \"mid\": \"mid.1457764197618:41d102a3e1ae206a38\",\n" +
" \"seq\": 282,\n" +
" \"text\": \"hello, text message world!\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<EchoMessageEvent> argument = ArgumentCaptor.forClass(EchoMessageEvent.class);
verify(mockEchoMessageEventHandler).handle(argument.capture());
final EchoMessageEvent echoMessageEvent = argument.getValue();
assertThat(echoMessageEvent.getSender().getId(), equalTo("PAGE_ID"));
assertThat(echoMessageEvent.getRecipient().getId(), equalTo("USER_ID"));
assertThat(echoMessageEvent.getTimestamp(), equalTo(new Date(1480114700296L)));
assertThat(echoMessageEvent.getAppId(), equalTo("1517776481860111"));
assertThat(echoMessageEvent.getMetadata(), equalTo("DEVELOPER_DEFINED_METADATA_STRING"));
assertThat(echoMessageEvent.getMid(), equalTo("mid.1457764197618:41d102a3e1ae206a38"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockQuickReplyMessageEventHandler, mockTextMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleTemplateEchoMessageEvent() throws Exception {
//given
final String payload = "{\"object\":\"page\",\"entry\":[{\"id\":\"171999997131834678\",\"time\":1480120722215," +
"\"messaging\":[{\"sender\":{\"id\":\"17175299999834678\"},\"recipient\":{\"id\":\"1256299999730577\"}," +
"\"timestamp\":1480120402725,\"message\":{\"is_echo\":true,\"app_id\":1559999994822905," +
"\"mid\":\"mid.1480199999925:83392d9f65\",\"seq\":294,\"attachments\":[{\"title\":\"Samsung Gear VR, " +
"Oculus Rift\",\"url\":null,\"type\":\"template\",\"payload\":{\"template_type\":\"receipt\"," +
"\"recipient_name\":\"<NAME>\",\"order_number\":\"order-505.0\",\"currency\":\"USD\"," +
"\"timestamp\":1428444852,\"payment_method\":\"Visa 1234\",\"summary\":{\"total_cost\":626.66," +
"\"total_tax\":57.67,\"subtotal\":698.99,\"shipping_cost\":20}," +
"\"address\":{\"city\":\"Menlo Park\",\"country\":\"US\",\"postal_code\":\"94025\",\"state\":\"CA\"," +
"\"street_1\":\"1 Hacker Way\",\"street_2\":\"\"},\"elements\":[{\"title\":\"Samsung Gear VR\"," +
"\"quantity\":1,\"image_url\":" +
"\"https:\\/\\/raw.githubusercontent.com\\/fbsamples\\/messenger-platform-samples\\/master\\/node\\" +
"/public\\/assets\\/gearvrsq.png\",\"price\":99.99,\"subtitle\":\"Frost White\"},{\"title\":" +
"\"Oculus Rift\",\"quantity\":1,\"image_url\":\"https:\\/\\/raw.githubusercontent.com\\/fbsamples\\" +
"/messenger-platform-samples\\/master\\/node\\/public\\/assets\\/riftsq.png\",\"price\":599," +
"\"subtitle\":\"Includes: headset, sensor, remote\"}],\"adjustments\":[{\"name\":\"New Customer Discount\"," +
"\"amount\":-50},{\"name\":\"$100 Off Coupon\",\"amount\":-100}]}}]}}]}]}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<EchoMessageEvent> argument = ArgumentCaptor.forClass(EchoMessageEvent.class);
verify(mockEchoMessageEventHandler).handle(argument.capture());
final EchoMessageEvent echoMessageEvent = argument.getValue();
assertThat(echoMessageEvent.getSender().getId(), equalTo("17175299999834678"));
assertThat(echoMessageEvent.getRecipient().getId(), equalTo("1256299999730577"));
assertThat(echoMessageEvent.getTimestamp(), equalTo(new Date(1480120402725L)));
assertThat(echoMessageEvent.getAppId(), equalTo("1559999994822905"));
assertThat(echoMessageEvent.getMetadata(), is(nullValue()));
assertThat(echoMessageEvent.getMid(), equalTo("mid.1480199999925:83392d9f65"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockQuickReplyMessageEventHandler, mockTextMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleQuickReplyMessageEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"message\": {\n" +
" \"mid\": \"mid.1457764197618:41d102a3e1ae206a38\",\n" +
" \"text\": \"hello, world!\",\n" +
" \"quick_reply\": {\n" +
" \"payload\": \"DEVELOPER_DEFINED_PAYLOAD\"\n" +
" }\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<QuickReplyMessageEvent> argument = ArgumentCaptor.forClass(QuickReplyMessageEvent.class);
verify(mockQuickReplyMessageEventHandler).handle(argument.capture());
final QuickReplyMessageEvent quickReplyMessageEvent = argument.getValue();
assertThat(quickReplyMessageEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(quickReplyMessageEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(quickReplyMessageEvent.getTimestamp(), equalTo(new Date(1458692752478L)));
assertThat(quickReplyMessageEvent.getMid(), equalTo("mid.1457764197618:41d102a3e1ae206a38"));
assertThat(quickReplyMessageEvent.getText(), equalTo("hello, world!"));
assertThat(quickReplyMessageEvent.getQuickReply().getPayload(), equalTo("DEVELOPER_DEFINED_PAYLOAD"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockTextMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleTextMessageEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"message\": {\n" +
" \"mid\": \"mid.1457764197618:41d102a3e1ae206a38\",\n" +
" \"text\": \"hello, text message world!\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<TextMessageEvent> argument = ArgumentCaptor.forClass(TextMessageEvent.class);
verify(mockTextMessageEventHandler).handle(argument.capture());
final TextMessageEvent textMessageEvent = argument.getValue();
assertThat(textMessageEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(textMessageEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(textMessageEvent.getTimestamp(), equalTo(new Date(1458692752478L)));
assertThat(textMessageEvent.getMid(), equalTo("mid.1457764197618:41d102a3e1ae206a38"));
assertThat(textMessageEvent.getText(), equalTo("hello, text message world!"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandlePostbackEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"postback\": {\n" +
" \"payload\": \"USER_DEFINED_PAYLOAD\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<PostbackEvent> argument = ArgumentCaptor.forClass(PostbackEvent.class);
verify(mockPostbackEventHandler).handle(argument.capture());
final PostbackEvent postbackEvent = argument.getValue();
assertThat(postbackEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(postbackEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(postbackEvent.getTimestamp(), equalTo(new Date(1458692752478L)));
assertThat(postbackEvent.getPayload(), equalTo("USER_DEFINED_PAYLOAD"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockTextMessageEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleAccountLinkingEventWithStatusLinked() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1234567890,\n" +
" \"account_linking\": {\n" +
" \"status\": \"linked\",\n" +
" \"authorization_code\": \"PASS_THROUGH_AUTHORIZATION_CODE\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<AccountLinkingEvent> argument = ArgumentCaptor.forClass(AccountLinkingEvent.class);
verify(mockAccountLinkingEventHandler).handle(argument.capture());
final AccountLinkingEvent accountLinkingEvent = argument.getValue();
assertThat(accountLinkingEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(accountLinkingEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(accountLinkingEvent.getTimestamp(), equalTo(new Date(1234567890L)));
assertThat(accountLinkingEvent.getStatus(), equalTo(AccountLinkingEvent.AccountLinkingStatus.LINKED));
assertThat(accountLinkingEvent.getAuthorizationCode(), equalTo("PASS_THROUGH_AUTHORIZATION_CODE"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockTextMessageEventHandler,
mockPostbackEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleAccountLinkingEventWithStatusUnlinked() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1234567890,\n" +
" \"account_linking\": {\n" +
" \"status\": \"unlinked\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<AccountLinkingEvent> argument = ArgumentCaptor.forClass(AccountLinkingEvent.class);
verify(mockAccountLinkingEventHandler).handle(argument.capture());
final AccountLinkingEvent accountLinkingEvent = argument.getValue();
assertThat(accountLinkingEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(accountLinkingEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(accountLinkingEvent.getTimestamp(), equalTo(new Date(1234567890L)));
assertThat(accountLinkingEvent.getStatus(), equalTo(AccountLinkingEvent.AccountLinkingStatus.UNLINKED));
assertThat(accountLinkingEvent.getAuthorizationCode(), nullValue());
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockTextMessageEventHandler,
mockPostbackEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleMessageReadEvent() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458668856463,\n" +
" \"read\": {\n" +
" \"watermark\": 1458668856253\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<MessageReadEvent> argument = ArgumentCaptor.forClass(MessageReadEvent.class);
verify(mockMessageReadEventHandler).handle(argument.capture());
final MessageReadEvent messageReadEvent = argument.getValue();
assertThat(messageReadEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(messageReadEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(messageReadEvent.getTimestamp(), equalTo(new Date(1458668856463L)));
assertThat(messageReadEvent.getWatermark(), equalTo(new Date(1458668856253L)));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockTextMessageEventHandler,
mockPostbackEventHandler, mockAccountLinkingEventHandler, mockMessageDeliveredEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleMessageDeliveredEventWithMids() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"delivery\": {\n" +
" \"mids\": [\n" +
" \"mid.1458668856218:ed81099e15d3f4f233\"\n" +
" ],\n" +
" \"watermark\": 1458668856253\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<MessageDeliveredEvent> argument = ArgumentCaptor.forClass(MessageDeliveredEvent.class);
verify(mockMessageDeliveredEventHandler).handle(argument.capture());
final MessageDeliveredEvent messageDeliveredEvent = argument.getValue();
assertThat(messageDeliveredEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(messageDeliveredEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(messageDeliveredEvent.getWatermark(), equalTo(new Date(1458668856253L)));
assertThat(messageDeliveredEvent.getMids(), hasSize(1));
assertThat(messageDeliveredEvent.getMids().get(0), equalTo("mid.1458668856218:ed81099e15d3f4f233"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockTextMessageEventHandler,
mockPostbackEventHandler, mockAccountLinkingEventHandler, mockMessageReadEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldHandleMessageDeliveredEventWithoutMids() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"delivery\": {\n" +
" \"watermark\": 1458668856253\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<MessageDeliveredEvent> argument = ArgumentCaptor.forClass(MessageDeliveredEvent.class);
verify(mockMessageDeliveredEventHandler).handle(argument.capture());
final MessageDeliveredEvent messageDeliveredEvent = argument.getValue();
assertThat(messageDeliveredEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(messageDeliveredEvent.getRecipient().getId(), equalTo("PAGE_ID"));
assertThat(messageDeliveredEvent.getWatermark(), equalTo(new Date(1458668856253L)));
assertThat(messageDeliveredEvent.getMids(), is(emptyCollectionOf(String.class)));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockTextMessageEventHandler,
mockPostbackEventHandler, mockAccountLinkingEventHandler, mockMessageReadEventHandler,
mockFallbackEventHandler);
}
@Test
public void shouldCallFallbackEventHandlerIfHandlerForConcreteEventIsNotRegistered() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"message\": {\n" +
" \"mid\": \"mid.1457764197618:41d102a3e1ae206a38\",\n" +
" \"text\": \"hello, text message world!\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder
.onTextMessageEvent(null)
.disableSignatureVerification()
.build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<FallbackEvent> argument = ArgumentCaptor.forClass(FallbackEvent.class);
verify(mockFallbackEventHandler).handle(argument.capture());
final FallbackEvent fallbackEvent = argument.getValue();
assertThat(fallbackEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(fallbackEvent.getRecipient().getId(), equalTo("PAGE_ID"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockTextMessageEventHandler);
}
@Test
public void shouldCallFallbackEventHandlerIfMessagingEventTypeIsUnsupported() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458692752478,\n" +
" \"EVENT_TYPE_THAT_IS_UNSUPPORTED\": {\n" +
" \"mid\": \"mid.1457764197618:41d102a3e1ae206a38\"\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder
.disableSignatureVerification()
.build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then
final ArgumentCaptor<FallbackEvent> argument = ArgumentCaptor.forClass(FallbackEvent.class);
verify(mockFallbackEventHandler).handle(argument.capture());
final FallbackEvent fallbackEvent = argument.getValue();
assertThat(fallbackEvent.getSender().getId(), equalTo("USER_ID"));
assertThat(fallbackEvent.getRecipient().getId(), equalTo("PAGE_ID"));
verifyZeroInteractions(mockAttachmentMessageEventHandler, mockOptInEventHandler,
mockEchoMessageEventHandler, mockQuickReplyMessageEventHandler, mockPostbackEventHandler,
mockAccountLinkingEventHandler, mockMessageReadEventHandler, mockMessageDeliveredEventHandler,
mockTextMessageEventHandler);
}
@Test(expected = IllegalArgumentException.class)
public void shouldThrowExceptionIfNoPayloadProvided() throws Exception {
//given
final String payload = null;
final MessengerReceiveClient messengerReceiveClient = builder.disableSignatureVerification().build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then - throw exception
}
@Test(expected = IllegalArgumentException.class)
public void shouldThrowExceptionIfNoSignatureProvidedAndVerificationNotDisabled() throws Exception {
//given
final String payload = "{\n" +
" \"object\": \"page\",\n" +
" \"entry\": [{\n" +
" \"id\": \"PAGE_ID\",\n" +
" \"time\": 1458692752478,\n" +
" \"messaging\": [{\n" +
" \"sender\": {\n" +
" \"id\": \"USER_ID\"\n" +
" },\n" +
" \"recipient\": {\n" +
" \"id\": \"PAGE_ID\"\n" +
" },\n" +
" \"timestamp\": 1458668856463,\n" +
" \"read\": {\n" +
" \"watermark\": 1458668856253,\n" +
" \"seq\": 38\n" +
" }\n" +
" }]\n" +
" }]\n" +
"}";
final MessengerReceiveClient messengerReceiveClient = builder.build();
//when
messengerReceiveClient.processCallbackPayload(payload);
//then - throw exception
}
@Test
public void shouldVerifyTheGivenSignature() throws Exception {
//given
final String payload = "{\"object\":\"page\",\"entry\":[{\"id\":\"1717527131834678\",\"time\":1475942721780," +
"\"messaging\":[{\"sender\":{\"id\":\"1256217357730577\"},\"recipient\":{\"id\":\"1717527131834678\"}," +
"\"timestamp\":1475942721741,\"message\":{\"mid\":\"mid.1475942721728:3b9e3646712f9bed52\"," +
"\"seq\":123,\"text\":\"34wrr3wr\"}}]}]}";
final String signature = "sha1=3daa41999293ff66c3eb313e04bcf77861bb0276";
final MessengerReceiveClient messengerReceiveClient = builder.build();
//when
messengerReceiveClient.processCallbackPayload(payload, signature);
//then
final ArgumentCaptor<TextMessageEvent> argument = ArgumentCaptor.forClass(TextMessageEvent.class);
verify(mockTextMessageEventHandler).handle(argument.capture());
final TextMessageEvent textMessageEvent = argument.getValue();
assertThat(textMessageEvent.getText(), is(equalTo("34wrr3wr")));
}
@Test(expected = MessengerVerificationException.class)
public void shouldThrowExceptionIfSignatureIsInvalid() throws Exception {
//given
final String payload = "{\"object\":\"page\",\"entry\":[{\"id\":\"1717527131834678\",\"time\":1475942721780," +
"\"messaging\":[{\"sender\":{\"id\":\"1256217357730577\"},\"recipient\":{\"id\":\"1717527131834678\"}," +
"\"timestamp\":1475942721741,\"message\":{\"mid\":\"mid.1475942721728:3b9e3646712f9bed52\"," +
"\"seq\":123,\"text\":\"CHANGED_TEXT_SO_SIGNATURE_IS_INVALID\"}}]}]}";
final String signature = "sha1=3daa41999293ff66c3eb313e04bcf77861bb0276";
final MessengerReceiveClient messengerReceiveClient = builder.build();
//when
messengerReceiveClient.processCallbackPayload(payload, signature);
//then - throw exception
}
@Test
public void shouldVerifyTheWebhook() throws Exception {
//given
final String mode = "subscribe";
final String verifyToken = "CUSTOM_VERIFY_TOKEN";
final String challenge = "CUSTOM_CHALLENGE";
final MessengerReceiveClient messengerReceiveClient = builder.build();
//when
final String returnedChallenge = messengerReceiveClient.verifyWebhook(mode, verifyToken, challenge);
//then
assertThat(returnedChallenge, is(equalTo(challenge)));
}
@Test(expected = MessengerVerificationException.class)
public void shouldThrowExceptionIfVerifyModeIsInvalid() throws Exception {
//given
final String mode = "INVALID_MODE";
final String verifyToken = "CUSTOM_VERIFY_TOKEN";
final String challenge = "CUSTOM_CHALLENGE";
final MessengerReceiveClient messengerReceiveClient = builder.build();
//when
messengerReceiveClient.verifyWebhook(mode, verifyToken, challenge);
//then - throw exception
}
@Test(expected = MessengerVerificationException.class)
public void shouldThrowExceptionIfVerifyTokenIsInvalid() throws Exception {
//given
final String mode = "subscribe";
final String verifyToken = "INVALID_VERIFY_TOKEN";
final String challenge = "CUSTOM_CHALLENGE";
final MessengerReceiveClient messengerReceiveClient = builder.build();
//when
messengerReceiveClient.verifyWebhook(mode, verifyToken, challenge);
//then - throw exception
}
} |
#Autogenerated by ReportLab guiedit do not edit
from reportlab.graphics.shapes import _DrawingEditorMixin, Drawing, Group, Rect, Line, String
from reportlab.lib.colors import Color, CMYKColor, PCMYKColor
class ExplodedDrawing_Drawing(_DrawingEditorMixin,Drawing):
def __init__(self,width=400,height=200,*args,**kw):
Drawing.__init__(self,width,height,*args,**kw)
self.transform = (1,0,0,1,0,0)
self.add(Rect(50,20,300,155,rx=0,ry=0,fillColor=None,fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,20,50,10,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,66,300,10,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,32,100,10,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,78,250,10,rx=0,ry=0,fillColor=Color(0,.501961,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,44,150,10,rx=0,ry=0,fillColor=Color(0,0,1,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,90,200,10,rx=0,ry=0,fillColor=Color(0,0,1,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,56,200,10,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(50,102,150,10,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(49,20,49,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(49,20,44,20,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(49,97.5,44,97.5,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(49,175,44,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
v0=self._nn(Group())
v0.transform = (1,0,0,1,44,58.75)
v0.add(String(-20,-4,'Ying',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,44,136.25)
v0.add(String(-21.66,-4,'Yang',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Line(50,20,350,20,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,20,50,15,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(125,20,125,15,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(200,20,200,15,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(275,20,275,15,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(350,20,350,15,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
v0=self._nn(Group())
v0.transform = (1,0,0,1,50,15)
v0.add(String(-2.5,-10,'0',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,125,15)
v0.add(String(-5,-10,'15',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,200,15)
v0.add(String(-5,-10,'30',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,275,15)
v0.add(String(-5,-10,'45',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,350,15)
v0.add(String(-5,-10,'60',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
if __name__=="__main__": #NORUNTESTS
ExplodedDrawing_Drawing().save(formats=['pdf'],outDir='.',fnRoot=None)
|
package models.Occupation;
import models.SkillContainer.ActiveSkillContianer;
import models.SkillContainer.BasicSkillContainer;
import models.SkillContainer.SneakSkillContainer;
public class Sneak extends Occupation {
//
public Sneak () {
setName("Sneak");
setOccupationModifier(1.50, 1.75, 1.25, 1.25);
setSkillContainer();
initSkillPoints();
}
//
protected void setSkillContainer() {
basic_skills = new BasicSkillContainer();
specific_skills = new SneakSkillContainer();
activeSkills = new ActiveSkillContianer(basic_skills, specific_skills);
}
}
|
#!/usr/bin/env bash
set -eu
export LC_ALL=C
KUBE_TEMPLATE=${LOCAL_MANIFESTS_DIR}/network-checkpointer.yaml
cat << EOF > $KUBE_TEMPLATE
---
# FIXME(yuanying): Fix to apply correct ClusterRole
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: remora:network-checkpointer
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: cluster-admin
subjects:
- kind: ServiceAccount
name: network-checkpointer
namespace: kube-system
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: network-checkpointer
namespace: kube-system
---
kind: ConfigMap
apiVersion: v1
metadata:
name: network-checkpointer
namespace: kube-system
labels:
tier: control-plane
k8s-app: network-checkpointer
data:
kubeconfig: |
apiVersion: v1
kind: Config
clusters:
- cluster:
certificate-authority: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt
server: https://${KUBE_PUBLIC_SERVICE_IP}:${KUBE_PORT}
name: default
contexts:
- context:
cluster: default
namespace: default
user: default
name: default
current-context: default
users:
- name: default
user:
tokenFile: /var/run/secrets/kubernetes.io/serviceaccount/token
---
apiVersion: "apps/v1"
kind: DaemonSet
metadata:
name: kube-etcd-network-checkpointer
namespace: kube-system
labels:
tier: control-plane
k8s-app: kube-etcd-network-checkpointer
spec:
template:
metadata:
labels:
tier: control-plane
k8s-app: kube-etcd-network-checkpointer
annotations:
checkpointer.alpha.coreos.com/checkpoint: "true"
spec:
containers:
- image: quay.io/coreos/kenc:0.0.2
name: kube-etcd-network-checkpointer
securityContext:
privileged: true
volumeMounts:
- mountPath: /etc/kubernetes/selfhosted-etcd
name: checkpoint-dir
readOnly: false
- mountPath: /var/etcd
name: etcd-dir
readOnly: false
- mountPath: /var/lock
name: var-lock
readOnly: false
command:
- /usr/bin/flock
- /var/lock/kenc.lock
- -c
- "kenc -r -m iptables && kenc -m iptables"
hostNetwork: true
nodeSelector:
node-role.kubernetes.io/master: ""
serviceAccountName: network-checkpointer
tolerations:
- key: node-role.kubernetes.io/master
operator: Exists
effect: NoSchedule
volumes:
- name: checkpoint-dir
hostPath:
path: /etc/kubernetes/checkpoint-iptables
- name: etcd-dir
hostPath:
path: /var/etcd
- name: var-lock
hostPath:
path: /var/lock
updateStrategy:
rollingUpdate:
maxUnavailable: 1
type: RollingUpdate
EOF
|
#!/bin/bash
# author: Liang Gong
if [ "$(uname)" == "Darwin" ]; then
# under Mac OS X platform
NODE='node'
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
# under GNU/Linux platform
NODE='nodejs'
fi
cd directory-traversal/server-static
RED='\033[0;31m'
BLUE='\033[0;34m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# start the server
echo -e "\t[${GREEN}start vulnerable server${NC}]: ${BLUE}server-static${NC}"
$NODE ../../node_modules/server-static/lib/index.js --dir=. -p=8887 >/dev/null 2>&1 &
vulnpid=$!
# wait for the server to get started
sleep 1.5s
echo -e "\t[${GREEN}server root directory${NC}]: `pwd`"
# utilize directory traversal to get files outside the working directory
# trigger directory traversal issues: send a request to retrieve the confidential file outside the working directory
$NODE attack.js
# kill the vulnerable npm package's process
kill -9 $vulnpid
|
for ((j=0; j <= 13; j++))
do
for ((f=0; f <= 11; f++))
do
cp Origin_Nexus.sh ./scripts_Nexus/C_Nexus"8$f"_"$j".sh
sed -i "s/\$1/8$f/g" ./scripts_Nexus/C_Nexus"8$f"_"$j".sh
sed -i "s/\$2/$j/g" ./scripts_Nexus/C_Nexus"8$f"_"$j".sh
done
done
for ((j=0; j <= 13; j++))
do
for ((f=0; f <= 75; f++))
do
cp Origin_Nexus.sh ./scripts_Nexus/C_Nexus"$f"_"$j".sh
sed -i "s/\$1/$f/g" ./scripts_Nexus/C_Nexus"$f"_"$j".sh
sed -i "s/\$2/$j/g" ./scripts_Nexus/C_Nexus"$f"_"$j".sh
done
done
|
#!/bin/bash
dieharder -d 10 -g 17 -S 3426806721
|
var http = require('http');
var concat = require('concat-stream');
var log = require('npmlog');
var zSchema = require('z-schema');
var querystring = require('querystring');
var config = require('./config');
var controller = require('./controller');
var rawContentType = {'Content-Type': 'text/plain'};
var slackSchema = {
"title": "Slack request schema",
"type": "object",
"properties": {
"token": { "type": "string" },
"team_id": { "type": "string" },
"team_domain": { "type": "string" },
"channel_id": { "type": "string" },
"channel_name": { "type": "string" },
"user_id": { "type": "string" },
"user_name": { "type": "string" },
"command": { "type": "string" },
"text": { "type": "string" }
},
"required": ["token", "team_id", "team_domain", "channel_id", "channel_name", "user_id", "user_name", "command", "text"]
};
var schemaValidator = new zSchema();
function validateSchema(obj) {
return schemaValidator.validate(obj, slackSchema);
}
function validateToken(obj) {
var tokensArray = config.tokens;
for (var i = 0; i < tokensArray.length; i++) {
if (obj.token === tokensArray[i]) {
return true;
}
}
return false;
}
function handler(req, res) {
if (req.method == 'POST') {
req.on('error', function(err) {
log.error("Error in request. Error details: %j", err);
process.exit(1);
});
req.pipe(concat(function(buffer) {
log.verbose("request", "buffer received");
var requestData = querystring.parse(buffer.toString());
log.info("request", "received data: %j", requestData);
if (!validateSchema(requestData)) {
res.writeHead(400, "Bad Request. Malformed JSON", rawContentType);
res.end("Malformed JSON");
} else {
if (!validateToken(requestData)) {
log.warn("request", "[403] Invalid token '%s'", requestData.token);
res.writeHead(403, "Invalid token", rawContentType);
res.end("Invalid token");
} else {
controller.handle(requestData)
.then(function(result) {
res.writeHead(200, "OK", rawContentType);
res.end("Event successfully added. Url: " + result.url);
})
.catch(function(error) {
res.writeHead(500, "Error", rawContentType);
res.end("Error " + error.toString());
}).done();
}
}
}));
} else {
log.warn("request", "[405] Unsupported request '%s' to '%s'", req.method, req.url);
res.writeHead(405, "Method not supported", rawContentType);
res.end("Method not supported");
}
}
http.createServer(handler).listen(config.port);
log.info("general", "server is running");
|
export * from './clover/clover';
|
package com.trivago.kangaroo;
import com.trivago.fastutilconcurrentwrapper.ConcurrentLongLongMapBuilder;
import com.trivago.fastutilconcurrentwrapper.LongLongMap;
import java.util.concurrent.ThreadLocalRandom;
public abstract class AbstractBenchHelper extends AbstractCommonBenchHelper {
protected static final int NUM_VALUES = 1_000_000;
protected LongLongMap map;
public void initAndLoadData(ConcurrentLongLongMapBuilder.MapMode mode) {
if (mode.equals(ConcurrentLongLongMapBuilder.MapMode.BUSY_WAITING)) {
map = ConcurrentLongLongMapBuilder.newBuilder()
.withBuckets(16)
.withInitialCapacity(NUM_VALUES)
.withMode(ConcurrentLongLongMapBuilder.MapMode.BUSY_WAITING)
.withLoadFactor(0.8f)
.build();
} else {
map = ConcurrentLongLongMapBuilder.newBuilder()
.withBuckets(16)
.withInitialCapacity(NUM_VALUES)
.withLoadFactor(0.8f)
.build();
}
for (int i = 0; i < NUM_VALUES; i++) {
long key = ThreadLocalRandom.current().nextLong();
long value = ThreadLocalRandom.current().nextLong();
map.put(key, value);
}
}
public void testGet() {
long key = ThreadLocalRandom.current().nextLong();
map.get(key);
}
public void testPut() {
long key = ThreadLocalRandom.current().nextLong();
long value = ThreadLocalRandom.current().nextLong();
map.put(key, value);
}
public void testAllOps() {
int op = ThreadLocalRandom.current().nextInt(3);
long key = ThreadLocalRandom.current().nextLong();
switch (op) {
case 1:
long value = ThreadLocalRandom.current().nextLong();
map.put(key, value);
break;
case 2:
map.remove(key);
break;
default:
map.get(key);
break;
}
}
}
|
#!/usr/bin/env bash
# Test: XML parser tests and JSON translation
# @see https://www.w3.org/TR/2008/REC-xml-20081126
# https://www.w3.org/TR/2009/REC-xml-names-20091208
#PROG="valgrind --leak-check=full --show-leak-kinds=all ../util/clixon_util_xml"
# Magic line must be first in script (see README.md)
s="$_" ; . ./lib.sh || if [ "$s" = $0 ]; then exit 0; else return 0; fi
: ${clixon_util_xml:="clixon_util_xml"}
new "xml parse"
expecteof "$clixon_util_xml -o" 0 "<a><b/></a>" "^<a><b/></a>$"
new "xml parse to json"
expecteof "$clixon_util_xml -oj" 0 "<a><b/></a>" '{"a":{"b":null}}'
new "xml parse strange names"
expecteof "$clixon_util_xml -o" 0 "<_-><b0.><c-.-._/></b0.></_->" "<_-><b0.><c-.-._/></b0.></_->"
new "xml parse name errors"
expecteof "$clixon_util_xml -o" 255 "<-a/>" ""
new "xml parse name errors"
expecteof "$clixon_util_xml -o" 255 "<9/>" ""
new "xml parse name errors"
expecteof "$clixon_util_xml -o" 255 "<a%/>" ""
LF='
'
new "xml parse content with CR LF -> LF, CR->LF (see https://www.w3.org/TR/REC-xml/#sec-line-ends)"
ret=$(echo "<x>a
b${LF}c
${LF}d</x>" | $clixon_util_xml -o)
if [ "$ret" != "<x>a${LF}b${LF}c${LF}d</x>" ]; then
err '<x>a$LFb$LFc</x>' "$ret"
fi
new "xml simple CDATA"
expecteofx "$clixon_util_xml -o" 0 '<a><![CDATA[a text]]></a>' '<a><![CDATA[a text]]></a>'
new "xml simple CDATA to json"
expecteofx "$clixon_util_xml -o -j" 0 '<a><![CDATA[a text]]></a>' '{"a":"a text"}'
new "xml complex CDATA"
XML=$(cat <<EOF
<a><description>An example of escaped CENDs</description>
<sometext><![CDATA[ They're saying "x < y" & that "z > y" so I guess that means that z > x ]]></sometext>
<!-- This text contains a CEND ]]> -->
<!-- In this first case we put the ]] at the end of the first CDATA block
and the > in the second CDATA block -->
<data><![CDATA[This text contains a CEND ]]]]><![CDATA[>]]></data>
<!-- In this second case we put a ] at the end of the first CDATA block
and the ]> in the second CDATA block -->
<alternative><![CDATA[This text contains a CEND ]]]><![CDATA[]>]]></alternative>
</a>
EOF
)
expecteof "$clixon_util_xml -o" 0 "$XML" "^<a><description>An example of escaped CENDs</description><sometext>
<![CDATA[ They're saying \"x < y\" & that \"z > y\" so I guess that means that z > x ]]>
</sometext><data><![CDATA[This text contains a CEND ]]]]><![CDATA[>]]></data><alternative><![CDATA[This text contains a CEND ]]]><![CDATA[]>]]></alternative></a>$"
JSON=$(cat <<EOF
{"a":{"description":"An example of escaped CENDs","sometext":" They're saying \"x < y\" & that \"z > y\" so I guess that means that z > x ","data":"This text contains a CEND ]]>","alternative":"This text contains a CEND ]]>"}}
EOF
)
new "xml complex CDATA to json"
expecteofx "$clixon_util_xml -oj" 0 "$XML" "$JSON"
XML=$(cat <<EOF
<message>Less than: < , greater than: > ampersand: & </message>
EOF
)
new "xml encode <>&"
expecteof "$clixon_util_xml -o" 0 "$XML" "$XML"
new "xml encode <>& to json"
expecteof "$clixon_util_xml -oj" 0 "$XML" '{"message":"Less than: < , greater than: > ampersand: & "}'
XML=$(cat <<EOF
<message>single-quote character ' represented as ' and double-quote character as "</message>
EOF
)
new "xml single and double quote"
expecteof "$clixon_util_xml -o" 0 "$XML" "<message>single-quote character ' represented as ' and double-quote character as \"</message>"
JSON=$(cat <<EOF
{"message":"single-quote character ' represented as ' and double-quote character as \""}
EOF
)
new "xml single and double quotes to json"
expecteofx "$clixon_util_xml -oj" 0 "$XML" "$JSON"
new "xml backspace"
expecteofx "$clixon_util_xml -o" 0 "<a>a\b</a>" "<a>a\b</a>"
new "xml backspace to json"
expecteofx "$clixon_util_xml -oj" 0 "<a>a\b</a>" '{"a":"a\\b"}'
new "Double quotes for attributes"
expecteof "$clixon_util_xml -o" 0 '<x a="t"/>' '<x a="t"/>'
new "Single quotes for attributes (returns double quotes but at least parses right)"
expecteof "$clixon_util_xml -o" 0 "<x a='t'/>" '<x a="t"/>'
new "Mixed quotes"
expecteof "$clixon_util_xml -o" 0 "<x a='t' b=\"q\"/>" '<x a="t" b="q"/>'
new "XMLdecl version"
expecteof "$clixon_util_xml -o" 0 '<?xml version="1.0"?><a/>' '<a/>'
new "XMLdecl version, single quotes"
expecteof "$clixon_util_xml -o" 0 "<?xml version='1.0'?><a/>" '<a/>'
new "XMLdecl version no element"
expecteof "$clixon_util_xml -o" 255 '<?xml version="1.0"?>' ''
new "XMLdecl no version"
expecteof "$clixon_util_xml -o" 255 '<?xml ?><a/>' ''
new "XMLdecl misspelled version"
expecteof "$clixon_util_xml -ol o" 255 '<?xml verion="1.0"?><a/>' ''
new "XMLdecl version + encoding"
expecteof "$clixon_util_xml -o" 0 '<?xml version="1.0" encoding="UTF-16"?><a/>' '<a/>'
new "XMLdecl version + misspelled encoding"
expecteof "$clixon_util_xml -ol o" 255 '<?xml version="1.0" encding="UTF-16"?><a/>' 'syntax error: at or before: e'
new "XMLdecl version + standalone"
expecteof "$clixon_util_xml -o" 0 '<?xml version="1.0" standalone="yes"?><a/>' '<a/>'
new "PI - Processing instruction empty"
expecteof "$clixon_util_xml -o" 0 '<?foo ?><a/>' '<a/>'
new "PI some content"
expecteof "$clixon_util_xml -o" 0 '<?foo something else ?><a/>' '<a/>'
new "prolog element misc*"
expecteof "$clixon_util_xml -o" 0 '<?foo something ?><a/><?bar more stuff ?><!-- a comment-->' '<a/>'
# We allow it as an internal necessity for parsing of xml fragments
#new "double element error"
#expecteof "$clixon_util_xml" 255 '<a/><b/>' ''
new "namespace: DefaultAttName"
expecteof "$clixon_util_xml -o" 0 '<x xmlns="n1">hello</x>' '<x xmlns="n1">hello</x>'
new "namespace: PrefixedAttName"
expecteof "$clixon_util_xml -o" 0 '<x xmlns:n2="urn:example:des"><n2:y>hello</n2:y></x>' '^<x xmlns:n2="urn:example:des"><n2:y>hello</n2:y></x>$'
new "First example 6.1 from https://www.w3.org/TR/2009/REC-xml-names-20091208"
XML=$(cat <<EOF
<?xml version="1.0"?>
<html:html xmlns:html='http://www.w3.org/1999/xhtml'>
<html:head><html:title>Frobnostication</html:title></html:head>
<html:body><html:p>Moved to
<html:a href='http://frob.example.com'>here.</html:a></html:p></html:body>
</html:html>
EOF
)
expecteof "$clixon_util_xml -o" 0 "$XML" "$XML"
new "Second example 6.1 from https://www.w3.org/TR/2009/REC-xml-names-20091208"
XML=$(cat <<EOF
<?xml version="1.0"?>
<!-- both namespace prefixes are available throughout -->
<bk:book xmlns:bk='urn:loc.gov:books'
xmlns:isbn='urn:ISBN:0-395-36341-6'>
<bk:title>Cheaper by the Dozen</bk:title>
<isbn:number>1568491379</isbn:number>
</bk:book>
EOF
)
expecteof "$clixon_util_xml -o" 0 "$XML" "$XML"
rm -rf $dir
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.