text stringlengths 1 1.05M |
|---|
<reponame>mmvanheusden/ForgeHax
package dev.fiki.forgehax.asm.patches;
import dev.fiki.forgehax.api.asm.MapClass;
import dev.fiki.forgehax.api.asm.MapMethod;
import dev.fiki.forgehax.asm.hooks.ForgeHaxHooks;
import dev.fiki.forgehax.asm.hooks.PushHooks;
import dev.fiki.forgehax.asm.utils.ASMHelper;
import dev.fiki.forgehax.asm.utils.ASMPattern;
import dev.fiki.forgehax.asm.utils.asmtype.ASMMethod;
import dev.fiki.forgehax.asm.utils.transforming.Inject;
import dev.fiki.forgehax.asm.utils.transforming.Patch;
import net.minecraft.client.entity.player.ClientPlayerEntity;
import org.objectweb.asm.tree.*;
@MapClass(ClientPlayerEntity.class)
public class ClientEntityPlayerPatch extends Patch {
@Inject
@MapMethod("aiStep")
public void aiStep(MethodNode main,
@MapMethod(parentClass = ForgeHaxHooks.class, name = "shouldSlowdownPlayer") ASMMethod hook) {
AbstractInsnNode skipNode = ASMPattern.builder()
.codeOnly()
.opcodes(ALOAD, INVOKEVIRTUAL, IFEQ, ALOAD, INVOKEVIRTUAL, IFNE)
.find(main)
.getLast("could not find IFNE node");
LabelNode skip = ((JumpInsnNode) skipNode).label;
InsnList list = new InsnList();
list.add(new VarInsnNode(ALOAD, 0));
list.add(ASMHelper.call(INVOKESTATIC, hook));
list.add(new JumpInsnNode(IFEQ, skip));
main.instructions.insert(skipNode, list);
}
@Inject
@MapMethod("tick")
public void tick(MethodNode main,
@MapMethod(parentClass = ClientPlayerEntity.class, name = "sendPosition") ASMMethod onUpdateWalkingPlayer,
@MapMethod(parentClass = ForgeHaxHooks.class, name = "onUpdateWalkingPlayerPre") ASMMethod updateWalkingPlayerPre,
@MapMethod(parentClass = ForgeHaxHooks.class, name = "onUpdateWalkingPlayerPost") ASMMethod updateWalkingPlayerPost) {
// <pre>
// this.onUpdateWalkingPlayer();
// <post>
// skip label
AbstractInsnNode walkingUpdateCall = ASMPattern.builder()
.custom(n -> {
if (n instanceof MethodInsnNode) {
return onUpdateWalkingPlayer.anyNameEqual(((MethodInsnNode) n).name);
}
return false;
})
.find(main)
.getFirst("could not find node to onUpdateWalkingPlayer");
LabelNode jmp = new LabelNode();
InsnList pre = new InsnList();
pre.add(new VarInsnNode(ALOAD, 0)); // this*
pre.add(ASMHelper.call(INVOKESTATIC, updateWalkingPlayerPre));
pre.add(new JumpInsnNode(IFNE, jmp));
InsnList post = new InsnList();
post.add(new VarInsnNode(ALOAD, 0)); // this*
post.add(ASMHelper.call(INVOKESTATIC, updateWalkingPlayerPost));
post.add(jmp);
// insert above ALOAD
main.instructions.insertBefore(walkingUpdateCall.getPrevious(), pre);
// insert below call
main.instructions.insert(walkingUpdateCall, post);
}
@Inject
@MapMethod("isHandsBusy")
public void isHandsBusy(MethodNode main,
@MapMethod(parentClass = ForgeHaxHooks.class, name = "shouldNotRowBoat") ASMMethod hook) {
AbstractInsnNode ret = ASMPattern.builder()
.codeOnly()
.opcode(IRETURN)
.find(main)
.getFirst("could not find return node");
LabelNode end = new LabelNode();
LabelNode jump = new LabelNode();
InsnList list = new InsnList();
list.add(new VarInsnNode(ALOAD, 0));
list.add(ASMHelper.call(INVOKESTATIC, hook));
list.add(new JumpInsnNode(IFEQ, jump));
list.add(new InsnNode(ICONST_0));
list.add(new JumpInsnNode(GOTO, end));
list.add(jump);
main.instructions.insert(list);
main.instructions.insertBefore(ret, end);
}
@Inject
@MapMethod("suffocatesAt")
public void suffocatesAt(MethodNode node,
@MapMethod(parentClass = PushHooks.class, name = "onPushedByBlock") ASMMethod onPushedByBlock) {
InsnNode ret = ASMHelper.findReturn(IRETURN, node);
LabelNode disabled = new LabelNode();
LabelNode notDisabled = new LabelNode();
InsnList list = new InsnList();
list.add(new VarInsnNode(ALOAD, 0));
list.add(ASMHelper.call(INVOKESTATIC, onPushedByBlock));
list.add(new JumpInsnNode(IFEQ, notDisabled));
list.add(new InsnNode(ICONST_0));
list.add(new JumpInsnNode(GOTO, disabled));
list.add(notDisabled);
node.instructions.insert(list);
node.instructions.insertBefore(ret, disabled);
}
}
|
<!DOCTYPE html>
<html>
<head>
<title>Stock Prices</title>
<script>
function getPrices() {
// Code to get stock prices
}
</script>
</head>
<body>
<h1>Stock Prices</h1>
<ul>
<li>Apple</li>
<li>Google</li>
<li>Microsoft</li>
<li>Tesla</li>
<li>Facebook</li>
</ul>
<div>
<button onclick="getPrices()">Get Prices</button>
</div>
</body>
</html> |
<gh_stars>0
import java.util.*
public class TreeNode{
TreeNode left=null;
TreeNode right=null;
int val=0;
TreeNode(int val){
this.val=val;
}
}
|
from lxml import etree
def pt_dev_io_port_passthrough(board_etree, scenario_etree, allocation_etree):
# Parse XML trees
board_root = etree.fromstring(board_etree)
scenario_root = etree.fromstring(scenario_etree)
allocation_root = etree.fromstring(allocation_etree)
# Extract port information from XML trees
board_ports = [port.attrib['name'] for port in board_root.findall('.//port')]
scenario_ports = [port.attrib['name'] for port in scenario_root.findall('.//port')]
allocated_ports = [port.attrib['name'] for port in allocation_root.findall('.//port')]
# Determine available ports
available_ports = [port for port in board_ports if port not in allocated_ports]
# Determine required ports for the scenario
required_ports = [port for port in scenario_ports if port not in allocated_ports]
# Allocate required ports
for port in required_ports:
new_port = etree.Element('port', name=port)
allocation_root.append(new_port)
# Update allocation_etree with the final allocation
allocation_etree = etree.tostring(allocation_root, encoding='unicode')
return allocation_etree |
public static void printArray(int[][] numbers) {
for (int i=0; i < numbers.length; i++) {
for (int j=0; j < numbers[i].length; j++) {
System.out.println(numbers[i][j]);
}
}
}
printArray(numbers); |
<reponame>osidorkin/Brunel<filename>etc/src/main/java/org/brunel/app/CookBookBuilder.java
/*
* Copyright (c) 2015 IBM Corporation and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.brunel.app;
public class CookBookBuilder extends DocBuilder {
private static final String ITEM_FORMAT = "**[%s](%s)** %s\n\n`%s`\n\n";
public static final String BASIC = "/org/brunel/app/basic.txt";
public static final String STATS = "/org/brunel/app/stats.txt";
public static final String INFO_VIS = "/org/brunel/app/infovis.txt";
public static void main(String[] args) throws Exception {
new CookBookBuilder().run();
}
private void run() throws Exception {
out.append("***\n");
out.append("### Basic Charts\n");
run(BASIC, ITEM_FORMAT);
out.append("***\n");
out.append("### Statistical Graphics\n");
run(STATS, ITEM_FORMAT);
out.append("***\n");
out.append("### Information Visualization\n");
run(INFO_VIS, ITEM_FORMAT);
System.out.println(out.toString());
display.showInBrowser();
}
@Override
protected String format(String itemFormat, String target,
String description, String image, String title, String brunel) {
return String.format(itemFormat, title, target, description, brunel);
}
}
|
<reponame>DPechetti/node_base_project<filename>src/infra/logging/logger.js
const pino = require('pino')({ prettyPrint: true });
module.exports = {
info: message => pino.info(message),
error: message => pino.error(message)
};
|
#!/bin/zsh
# My starter template for tmux layout
SESSION=`basename $PWD`
tmux -2 new-session -d -s $SESSION
# first window will contain vim + two terminals
tmux rename-window -t $SESSION:1 IDE
tmux split-window -v
tmux select-pane -t 1
tmux resize-pane -D 10
tmux select-pane -t 2
tmux split-window -h
tmux select-pane -t 1
# second window contains monitors + utils
tmux new-window -t $SESSION -a -n general-purpose
tmux split-window -h
tmux select-pane -t 1
tmux split-window -v
tmux select-pane -t 1
tmux resize-pane -U 15
tmux select-pane -t 3
tmux split-window -v
tmux select-pane -t 3
tmux resize-pane -U 10
tmux send-keys "mc" C-m
tmux select-pane -t 4
tmux send-keys "htop" C-m
tmux select-pane -t 2
#tmux send-keys "bmon -o curses" C-m
tmux select-window -t $SESSION:1
tmux -2 attach -t $SESSION
|
package task
import (
"time"
)
type Meta struct {
Worker int `json:"worker"`
Timestamp time.Time `json:"timestamp"`
}
|
def xor_two_hex(hex_1, hex_2):
""" XOR two hex strings and return a hex string.
Strips the 0x prefix before returning the hex string.
"""
int_1 = int(hex_1, 16)
int_2 = int(hex_2, 16)
xor = int_1 ^ int_2
return hex(xor).lstrip('0x')
|
#!/bin/bash
function docker_tag_exists() {
EXISTS=$(curl -s https://hub.docker.com/v2/repositories/$1/tags/?page_size=10000 | jq -r "[.results | .[] | .name == \"$2\"] | any")
test $EXISTS = true
}
if docker_tag_exists svenruppert/maven-3.6.0-adopt-openj9 1.8.0-162; then
echo skip building, image already existing - svenruppert/maven-3.6.0-adopt-openj9 1.8.0-162
else
echo start building the images
docker build -t svenruppert/maven-3.6.0-adopt-openj9 .
docker tag svenruppert/maven-3.6.0-adopt-openj9:latest svenruppert/maven-3.6.0-adopt-openj9:1.8.0-162
docker push svenruppert/maven-3.6.0-adopt-openj9:1.8.0-162
fi
docker image rm svenruppert/maven-3.6.0-adopt-openj9:latest
docker image rm svenruppert/maven-3.6.0-adopt-openj9:1.8.0-162 |
<filename>fj36-webservice/src/br/com/caelum/payfast/rest/PagamentoService.java
package br.com.caelum.payfast.rest;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
@ApplicationPath("/")
public class PagamentoService extends Application {
}
|
<filename>app/src/main/java/com/wizeline/recyclerview/di/AppBinder.java<gh_stars>1-10
package com.wizeline.recyclerview.di;
import com.wizeline.recyclerview.ui.main.MainActivity;
import com.wizeline.recyclerview.ui.main.MainModule;
import dagger.Module;
import dagger.android.ContributesAndroidInjector;
@Module
public abstract class AppBinder {
@ContributesAndroidInjector(modules = MainModule.class)
abstract MainActivity bindMainActivity();
}
|
// Given an index k, return the kth row of the Pascal's triangle.
//
//
// For example, given k = 3,
// Return [1,3,3,1].
//
//
//
// Note:
// Could you optimize your algorithm to use only O(k) extra space?
/**
* @param {number} rowIndex
* @return {number[]}
*/
var getRow = function(rowIndex) {
function arrange(k, j){
if(k <= 0 || k >= j){
return 1;
}
if( k > j/2){
k = j-k;
}
var m = 1;
var n = 1;
for(let i = 0; i < k; i++){
m *= j-i;
n *= i+1;
}
return m/n;
}
var resultAy = [];
for(let i = 0; i <= rowIndex; i++){
if(i <= rowIndex - i){
resultAy[i] = arrange(i, rowIndex);
}else{
resultAy[i] = resultAy[rowIndex - i]
}
}
return resultAy;
};
|
'use strict';
export default class extends think.controller.base {
/**
* some base method in here
*/
async __before() {
await this.getConfig();
let thisUrl = this.http.module + "/" + this.http.controller + "/" + this.http.action;
//判断登陆
let userinfo = await this.session("userInfo");
// console.log(userinfo)
// console.log(this.http.action)
if ((this.http.action != 'login') && (this.http.action != 'logout')) {
let userinfo = await this.session("userInfo");
if (think.isEmpty(userinfo)) {
return this.redirect("/admin/index/login");
} else {
this.assign('userinfo', userinfo);
}
}
//判断登陆
//判断权限
// let uinfo = await this.session('userInfo');
// let username = uinfo.name;
// let userData = await this.model('admin').findOne('user', {name: username});
// let roleData = await this.model('admin').findOne('manage_role', {id: userData.role});
// let permissions = (roleData.permission).split(",");
//没有权限
// if (myurl != 'admin/mail/warning') {
// if (permissions.indexOf(myurl) < 0) {
// if (this.http.method === 'POST') {
// return this.fail("抱歉,您没有权限访问,请与系统管理员联系!");
// } else {
// return this.display("admin/error/nopermission");
// }
// }
// }
//判断权限
// csrf 防止模拟提交
let csrf = await this.session("__CSRF__");
this.assign("csrf", csrf);
//}
}
async getConfig() {
let sys_setting = await this.model('setting').where({sys_name:'web_setting'}).find()
this.assign('_web', sys_setting);
}
/**
* 判断是否登录
* @returns {boolean}
*/
async islogin() {
//前台判断是否登录
let user = await this.session('userInfo');
let res = think.isEmpty(user) ? false : user.id;
return res;
}
async weblogin() {
let islogin = await this.islogin();
if (!islogin) {
//判断浏览客户端
this.redirect('/user/login')
//if (checkMobile(this.userAgent())) {
// //手机端直接跳转到登录页面
// this.redirect('/user/login')
//} else {
// //pc端跳转到错误页面
// return think.statusAction(700,this.http);
//}
}
}
}
|
module.exports = {
parser: '@typescript-eslint/parser',
extends: [
'plugin:@typescript-eslint/recommended',
],
parserOptions: {
ecmaVersion: 2018,
sourceType: 'module',
},
rules: {
'@typescript-eslint/ban-ts-ignore': 0,
'@typescript-eslint/consistent-type-assertions': 0,
'@typescript-eslint/camelcase': 0,
'@typescript-eslint/no-unused-vars': 0,
'no-script-url': 2,
'no-self-compare': 2,
'no-tabs': 2,
},
};
|
#!/bin/sh
#
# Copyright 2018 The Prometheus Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
check_license() {
local file=""
for file in $(find . -type f -iname '*.go' ! -path './vendor/*'); do
head -n3 "${file}" | grep -Eq "(Copyright|generated|GENERATED)" || echo " ${file}"
done
}
licRes=$(check_license)
if [ -n "${licRes}" ]; then
echo "license header checking failed:"
echo "${licRes}"
exit 255
fi
|
package benchmarks.CLEVER.divide.Eq;
public class oldV {
private int lib(int x, int y) { return x / y; }
public int client(int c, int d) {
if (d == 0) {
return 0;
}
return lib(c, d);
}
} |
<reponame>afialapis/dibi<filename>packages/conn/test/01.test_crud.js
import assert from 'assert'
import config from './config'
import {getConnection} from '../src'
let pgConn= undefined
const TEST_RECORDS= [
{name: 'Peter', description: 'A simple man', counter: 91},
{name: 'Harry', description: 'A dirty man' , counter: 99},
{name: 'James', description: 'A golden man', counter: 99},
{name: 'Jonny', description: 'A rocker man', counter: 46},
]
describe('Postgres: Test some queries', function() {
describe('Connection open', function() {
it('should create the database connection', function() {
pgConn = getConnection(config.db)
})
}),
describe('clean', function() {
it('should drop test_01 table if exists', async function() {
const query = `DROP TABLE IF EXISTS test_01`
await pgConn.execute(query)
})
}),
describe('Create', function() {
it('should create test_01 table', async function() {
const query = `
CREATE TABLE test_01 (
id serial,
name TEXT NOT NULL,
description TEXT NULL,
counter INTEGER
)`
await pgConn.execute(query)
})
}),
describe('Insert', function() {
it('should create test records', async function() {
for (const rec of TEST_RECORDS) {
const query= `
INSERT INTO test_01
(name, description, counter)
VALUES
($1, $2, $3)
`
await pgConn.execute(query, [rec.name, rec.description, rec.counter])
}
})
}),
describe('Update', function() {
it('should update one record', async function() {
const query = `
WITH rows as (
UPDATE test_01
SET description = $1
WHERE name = $2
RETURNING 1
) SELECT count(*) AS cnt FROM rows`
const res= await pgConn.select_one(query, ['A not so simple man', 'Peter'])
assert.strictEqual(res.cnt, '1')
})
}),
describe('Update', function() {
it('should update several records', async function() {
const query = `
WITH rows as (
UPDATE test_01
SET name = $1
WHERE counter = $2
RETURNING 1
) SELECT count(*) AS cnt FROM rows`
const res= await pgConn.select_one(query, ['Frederic', 99])
assert.strictEqual(res.cnt, '2')
})
}),
describe('Delete', function() {
it('should delete one record', async function() {
const query = `
WITH rows as (
DELETE
FROM test_01
WHERE name = $1
RETURNING 1
) SELECT count(*) AS cnt FROM rows`
const res= await pgConn.select_one(query, ['Jonny'])
assert.strictEqual(res.cnt, '1')
})
}),
describe('Count', function() {
it('should count 3 records', async function() {
const query = `
SELECT COUNT(1) as cnt
FROM test_01`
const res= await pgConn.select_one(query)
assert.strictEqual(res.cnt, '3')
})
}),
describe('Count', function() {
it('should count 2 records with name Frederic', async function() {
const query = `
SELECT COUNT(1) as cnt
FROM test_01
WHERE name = $1`
const res= await pgConn.select_one(query, ['Frederic'])
assert.strictEqual(res.cnt, '2')
})
}),
describe('Count', function() {
it('should count 2 distinct names, Frederic and Peter', async function() {
const query = `
SELECT COUNT(DISTINCT name) as cnt
FROM test_01`
const res= await pgConn.select_one(query)
assert.strictEqual(res.cnt, '2')
})
}),
describe('Distinct', function() {
it('should return distinct names, Frederic and Peter', async function() {
const query = `
SELECT DISTINCT name as cnt
FROM test_01`
const res= await pgConn.select(query)
assert.strictEqual(res.length, 2)
})
}),
describe('Delete', function() {
it('should delete other records', async function() {
const query = `
WITH rows as (
DELETE
FROM test_01
RETURNING 1
) SELECT count(*) AS cnt FROM rows`
const res= await pgConn.select_one(query)
assert.strictEqual(res.cnt , '3')
})
}),
describe('clean', function() {
it('should drop test_01', async function() {
const query = `DROP TABLE test_01`
await pgConn.execute(query)
})
})
})
|
class BankAccount:
def __init__(self, name):
self.name = name
self.balance = 0
self.account_number = ""
def deposit(self, amount):
self.balance += amount
def withdraw(self, amount):
if self.balance >= amount:
self.balance -= amount
else:
print("Insufficient funds")
def get_balance(self):
return self.balance
def set_account_number(self, account_number):
self.account_number = account_number
def get_account_info(self):
return f"Name: {self.name}, Account Number: {self.account_number}" |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const lib = require('../../lib/sigv4-auth-provider.js');
const assert = require('assert');
describe('SigV4AuthProvider', () => {
describe('#extractNonce()', () => {
let SigV4AuthProvider = lib.SigV4AuthProvider;
let expected = '0c0b0c6f3946d14ce1a49a8f8c86a888';
it('should pull basic nonce=', function () {
let buf = Buffer.from('nonce=0c0b0c6f3946d14ce1a49a8f8c86a888');
assert.equal(SigV4AuthProvider.extractNonce(buf), expected);
});
it('should stop at a comma', function () {
let buf = Buffer.from('nonce=0c0b0c6f3946d14ce1a49a8f8c86a888,,');
assert.equal(SigV4AuthProvider.extractNonce(buf), expected);
});
it('should return undefined when no nonce= is present', function () {
let buf = Buffer.from('0b0c6f3946d14ce1a49a8f8c86a888,,');
assert.notStrictEqual(SigV4AuthProvider.extractNonce(buf), false);
});
});
describe('#constructor()', () => {
let SigV4AuthProvider = lib.SigV4AuthProvider;
let originalFn = SigV4AuthProvider.getRegionFromEnv;
let regionFromEnv;
beforeEach(function () {
regionFromEnv = "ENV_DEFAULT_REGION";
SigV4AuthProvider.getRegionFromEnv = () => {
return regionFromEnv
};
});
afterEach(function () {
SigV4AuthProvider.getRegionFromEnv = originalFn;
});
it('should use Region if Provided', () => {
let provider = new SigV4AuthProvider({region: "us-east-23", accessKeyId:'key'});
assert.equal(provider.region, "us-east-23");
});
it('should use default if Provided', () => {
let provider = new SigV4AuthProvider({accessKeyId:'key'});
assert.equal(provider.region, "ENV_DEFAULT_REGION");
});
it('should fail if no region retrievable', () => {
regionFromEnv = null;
let err = new Error(
"[SIGV4_MISSING_REGION] No region provided. You must either provide a region or set "
+ "environment variable [AWS_REGION]");
assert.throws(() => {new SigV4AuthProvider()}, err);
});
});
});
describe('SigV4Authenticator', () => {
describe('#initialResponse()', () => {
let target = new lib.SigV4AuthProvider({region: "region", accessKeyId:'key'}).newAuthenticator();
it('should call callback function with Sigv4 buffer', () => {
target.initialResponse((err, buf) => {
if (err != null) {
assert.fail("Error sent to callback");
}
// this is a style of buffer setup that is deprecated, however
// it is consistent with older versions of js. We use it
// here as a double-entry bookkeeping that our buffer is right.
assert.notStrictEqual(buf, new Buffer("SigV4\0\0", 'utf8'));
})
});
});
describe('#evaluateChallenge()', () => {
let target = new lib.SigV4Authenticator({
region: 'us-west-2',
accessKeyId: 'UserID-1',
secretAccessKey: 'UserSecretKey-1',
date: new Date(1591742511000)
});
it('should call callback with Signed Request', () => {
let nonceBuffer = Buffer.from("nonce=91703fdc2ef562e19fbdab0f58e42fe5");
let expected = "signature=7f3691c18a81b8ce7457699effbfae5b09b4e0714ab38c1292dbdf082c9ddd87,access_key=UserID-1,amzdate=2020-06-09T22:41:51.000Z";
let calledCallback = false;
target.evaluateChallenge(nonceBuffer, (err, buff) => {
assert.equal(buff.toString(), expected);
calledCallback = true;
});
assert.equal(calledCallback, true);
});
it('should fail when Nonce is not found', () => {
let nonceBuffer = Buffer.from("buffer1");
let calledCallback = false;
let expected = 'Error: [SIGV4_MISSING_NONCE] Did not find nonce in SigV4 '
+ 'challenge:[buffer1]';
target.evaluateChallenge(nonceBuffer, (err, buff) => {
assert.equal(expected, err.toString());
calledCallback = true;
});
assert.equal(calledCallback, true);
});
});
});
|
function createCharacterFrequencyTable(str) {
let table = {};
for (let char of str) {
if (table[char]) table[char]++;
else table[char] = 1;
}
return table;
}
const result = createCharacterFrequencyTable('Hello World');
console.log(result); // { H: 1, e: 1, l: 3, o: 2, W: 1, r: 1, d: 1 } |
<reponame>ecmwf/ecflow
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// Name : Request
// Author : Avi
// Revision : $Revision$
//
// Copyright 2009-2020 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//
// Description :
//============================================================================
#include <string>
#include <iostream>
#include <boost/test/unit_test.hpp>
#include "Defs.hpp"
#include "PersistHelper.hpp"
#include "Memento.hpp"
using namespace std;
using namespace ecf;
// ********************************************************************
// These test are used to check that MIGRATE is equivalent to check pt
// MIGRATE will be used for migration from old to new release
// MIGRATE is essentially the defs structure with state.
// The state is written out as comments
// It is loaded like a normal Defs, the parser detects MIGRATE
// and loads the state in.
//
// By default persistence/MIGRATE *ONLY* writes the state when it not the default.
// Hence the defaults should *NOT* change. These test will change the state
// to a non default value.
//
// Write the Defs with state and the compare with in memory defs
// Write the Defs as check pt an then compare with in memory defs
// Finally compare the two *RELOADED* defs file.
// ********************************************************************
BOOST_AUTO_TEST_SUITE( ParserTestSuite )
BOOST_AUTO_TEST_CASE( test_memento_persist_and_reload )
{
std::vector<ecf::Aspect::Type> aspects;
bool aspect_only = false;
cout << "AParser:: ...test_memento_persist_and_reload\n";
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
StateMemento memento2(NState::ABORTED);
defs.set_memento(&memento2,aspects,aspect_only);
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"StateMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
std::pair<NState::State,boost::posix_time::time_duration> state;
state.first = NState::ABORTED;
state.second = boost::posix_time::time_duration(1,1,1,0);
NodeStateMemento memento(state);
t->set_memento(&memento,aspects,aspect_only);
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeStateMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
NodeDefStatusDeltaMemento memento(DState::ABORTED);
t->set_memento(&memento,aspects,aspect_only);
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeDefStatusDeltaMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
SuspendedMemento memento(true);
t->set_memento(&memento,aspects,aspect_only);
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"SuspendedMemento failed: " << helper.errorMsg());
SuspendedMemento memento1;
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"SuspendedMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Event event(1);
NodeEventMemento memento(event);
t->set_memento(&memento,aspects,aspect_only); // add event
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeEventMemento failed: " << helper.errorMsg());
event.set_value(true);
NodeEventMemento memento1(event); // set event
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeEventMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Meter meter("meter",0,100);
NodeMeterMemento memento(meter);
t->set_memento(&memento,aspects,aspect_only); // add meter
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeMeterMemento failed: " << helper.errorMsg());
meter.set_value(100);
NodeMeterMemento memento1( meter); // change meter
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeMeterMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Label label("label","xxx");
NodeLabelMemento memento(label);
t->set_memento(&memento,aspects,aspect_only); // add label;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeLabelMemento failed: " << helper.errorMsg());
label.set_new_value("yyy");
NodeLabelMemento memento1( label ); // change label
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeLabelMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Expression exp("1 == 0");
NodeTriggerMemento memento(exp);
t->set_memento(&memento,aspects,aspect_only); // add trigger;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTriggerMemento failed: " << helper.errorMsg());
exp.setFree();
NodeTriggerMemento memento1( exp ); // free trigger
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTriggerMemento failed: " << helper.errorMsg());
exp.clearFree();
NodeTriggerMemento memento2( exp ); // clear trigger
t->set_memento(&memento2,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTriggerMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Expression exp("1 == 0");
NodeCompleteMemento memento(exp);
t->set_memento(&memento,aspects,aspect_only); // add trigger;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeCompleteMemento failed: " << helper.errorMsg());
exp.setFree();
NodeCompleteMemento memento1( exp ); // free trigger
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeCompleteMemento failed: " << helper.errorMsg());
exp.clearFree();
NodeCompleteMemento memento2( exp ); // clear trigger
t->set_memento(&memento2,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeCompleteMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Repeat repeat(RepeatDate("YMD",20090916,20090916,1) );
NodeRepeatMemento memento(repeat);
t->set_memento(&memento,aspects,aspect_only); // add repeat;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeRepeatMemento failed: " << helper.errorMsg());
repeat.increment();
NodeRepeatMemento memento1( repeat ); // change repeat
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeRepeatMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Repeat repeat(RepeatDateList("YMD",{20090916,20090916}));
NodeRepeatMemento memento(repeat);
t->set_memento(&memento,aspects,aspect_only); // add repeat;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeRepeatMemento failed: " << helper.errorMsg());
repeat.increment();
NodeRepeatMemento memento1( repeat ); // change repeat
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeRepeatMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Limit limit("suiteLimit",10);
NodeLimitMemento memento(limit);
t->set_memento(&memento,aspects,aspect_only); // add limit;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeLimitMemento failed: " << helper.errorMsg());
std::set<std::string> paths;
paths.insert("/s1/t1");
limit.set_state(20,2,paths);
NodeLimitMemento memento1( limit ); // change limit
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeLimitMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
InLimit inlimit("suiteLimit","/path/to/node",2);
NodeInLimitMemento memento(inlimit);
t->set_memento(&memento,aspects,aspect_only); // add inlimit only, no state
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeInLimitMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
Variable variable("name","value");
NodeVariableMemento memento(variable);
t->set_memento(&memento,aspects,aspect_only); // add variable;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeVariableMemento failed: " << helper.errorMsg());
variable.set_value("new value");
NodeVariableMemento memento1( variable ); // change variable
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeVariableMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
ecf::LateAttr lateAttr;
lateAttr.addSubmitted( ecf::TimeSlot(3,12) );
lateAttr.addActive( ecf::TimeSlot(3,12) );
lateAttr.addComplete( ecf::TimeSlot(4,12), true);
NodeLateMemento memento(lateAttr);
t->set_memento(&memento,aspects,aspect_only); // add late;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeLateMemento failed: " << helper.errorMsg());
lateAttr.setLate(true);
NodeLateMemento memento1( lateAttr ); // change late
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeLateMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
ecf::TodayAttr attr(ecf::TimeSlot(10,12)) ;
NodeTodayMemento memento(attr);
t->set_memento(&memento,aspects,aspect_only); // add today;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTodayMemento failed: " << helper.errorMsg());
attr.setFree();
NodeTodayMemento memento1( attr ); // change today
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTodayMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
ecf::TimeAttr attr(ecf::TimeSlot(10,12)) ;
NodeTimeMemento memento(attr);
t->set_memento(&memento,aspects,aspect_only); // add time;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTimeMemento failed: " << helper.errorMsg());
attr.setFree();
NodeTimeMemento memento1( attr ); // change time
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeTimeMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
DayAttr attr(DayAttr::MONDAY);
NodeDayMemento memento(attr);
t->set_memento(&memento,aspects,aspect_only); // add day;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeDayMemento failed: " << helper.errorMsg());
attr.setFree();
NodeDayMemento memento1( attr ); // change day
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeDayMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
DateAttr attr(1,2,2009);
NodeDateMemento memento(attr);
t->set_memento(&memento,aspects,aspect_only); // add date;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeDateMemento failed: " << helper.errorMsg());
attr.setFree();
NodeDateMemento memento1( attr ); // change date
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeDateMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
ecf::CronAttr attr;
ecf::TimeSlot start( 0, 0 );
ecf::TimeSlot finish( 10, 0 );
ecf::TimeSlot incr( 0, 5 );
std::vector<int> weekdays; for(int i=0;i<7;++i) weekdays.push_back(i);
std::vector<int> daysOfMonth;for(int i=1;i<32;++i) daysOfMonth.push_back(i);
std::vector<int> months; for(int i=1;i<13;++i) months.push_back(i);
attr.addTimeSeries(start,finish,incr);
attr.addWeekDays( weekdays );
attr.addDaysOfMonth(daysOfMonth);
attr.addMonths( months );
NodeCronMemento memento(attr);
t->set_memento(&memento,aspects,aspect_only); // add cron;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeCronMemento failed: " << helper.errorMsg());
attr.setFree();
NodeCronMemento memento1( attr ); // change cron
t->set_memento(&memento1,aspects,aspect_only);
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeCronMemento failed: " << helper.errorMsg());
}
{
Defs defs;
suite_ptr suite = defs.add_suite("s1");
node_ptr t = suite->add_task("t1");
std::vector<ecf::Child::CmdType> child_cmds = ecf::Child::list();
ZombieAttr attr(ecf::Child::USER, child_cmds, ecf::User::FOB,10);
NodeZombieMemento memento(attr);
t->set_memento(&memento,aspects,aspect_only); // add zombie;
PersistHelper helper;
BOOST_CHECK_MESSAGE( helper.test_state_persist_and_reload_with_checkpt(defs),"NodeZombieMemento failed: " << helper.errorMsg());
}
}
BOOST_AUTO_TEST_SUITE_END()
|
#!/usr/bin/env bash
cd $(dirname $0)
PKI_BUILD_HOME="${PKI_BUILD_HOME:-./build/pki}";
PKI_DEPLOY_HOME="${PKI_DEPLOY_HOME:-./build/deploy/pki}";
BUILD_DIR="$PKI_BUILD_HOME"
BUILD_CA_DIR="$BUILD_DIR/CA"
ROOT_CA_NAME="root-ca"
DEPLOY_DIR="$PKI_DEPLOY_HOME/etc/etcd"
PREFIX="etcd"
if [ ! -f "$BUILD_CA_DIR/$ROOT_CA_NAME.pem" ]; then
./root-ca.sh
fi
mkdir -p "$BUILD_CA_DIR"
# Generate the CA
if [ ! -f "$BUILD_CA_DIR/$PREFIX-ca.pem" ]; then
echo "--- Generate the \"${PREFIX}\" CA ---";
cfssl gencert -initca ./CA/$PREFIX-ca-csr.json | \
cfssljson -bare "$BUILD_CA_DIR/$PREFIX-ca"
cfssl sign -ca="$BUILD_CA_DIR/$ROOT_CA_NAME.pem" -ca-key="$BUILD_CA_DIR/$ROOT_CA_NAME-key.pem" \
-config=./CA/ca-config.json -profile="intermediate_ca" \
"$BUILD_CA_DIR/$PREFIX-ca.csr" | \
cfssljson -bare "$BUILD_CA_DIR/$PREFIX-ca"
fi
# cfssl-certinfo -cert "$BUILD_CA_DIR/$PREFIX-ca.pem"
mkdir -p "$DEPLOY_DIR"
if [ ! -f "$DEPLOY_DIR/ca.key" ]; then
echo "--- Deploy the \"${PREFIX}\" CA ---";
cp -vf "$BUILD_CA_DIR/$PREFIX-ca-key.pem" \
"$DEPLOY_DIR/ca.key"
mkbundle -f "$DEPLOY_DIR/ca.crt" \
"$BUILD_CA_DIR/$ROOT_CA_NAME.pem" \
"$BUILD_CA_DIR/$PREFIX-ca.pem"
fi
|
import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core';
import { FormItem, SurveyErrorStateMatcher, FormItemWidget } from '../index';
export class FormItemText extends FormItem {
hint: string;
}
@Component({
selector: 'ammo-form-item-text',
templateUrl: './form-item-text.component.html',
styleUrls: ['./form-item-text.component.scss']
})
export class FormItemTextComponent implements FormItemWidget, OnInit {
@Input() item: FormItemText;
@Input() editable: boolean=true;
@Output() changes = new EventEmitter<FormItemText>();
matcher = new SurveyErrorStateMatcher();
constructor() { }
ngOnInit() {
this.matcher.item=this.item;
}
ngOnChanges() {
this.matcher.item=this.item;
}
checkRequired(placeholder) {
if (placeholder === 'Required') {
return true;
}
}
onValueChanges(item) {
this.changes.emit(item);
}
}
|
#!/usr/bin/env bash
# A very simple case - Error in this detected by mgsmith@netgate
# Enable modstate and save running on a simple system without upgrade callback
# Upgrade yang revision, but no other (upgrade) changes
# Then start from running with modstate enabled and the new revision
# Magic line must be first in script (see README.md)
s="$_" ; . ./lib.sh || if [ "$s" = $0 ]; then exit 0; else return 0; fi
APPNAME=simple
cfg=$dir/conf_yang.xml
# Create configuration
cat <<EOF > $cfg
<clixon-config xmlns="http://clicon.org/config">
<CLICON_CONFIGFILE>/usr/local/etc/clixon.xml</CLICON_CONFIGFILE>
<CLICON_FEATURE>*:*</CLICON_FEATURE>
<CLICON_YANG_DIR>/usr/local/share/clixon</CLICON_YANG_DIR>
<CLICON_YANG_DIR>$dir</CLICON_YANG_DIR>
<CLICON_YANG_MODULE_MAIN>$APPNAME</CLICON_YANG_MODULE_MAIN>
<CLICON_CLI_MODE>hello</CLICON_CLI_MODE>
<CLICON_CLISPEC_DIR>/usr/local/lib/hello/clispec</CLICON_CLISPEC_DIR>
<CLICON_SOCK>/usr/local/var/hello.sock</CLICON_SOCK>
<CLICON_BACKEND_PIDFILE>/usr/local/var/hello.pidfile</CLICON_BACKEND_PIDFILE>
<CLICON_XMLDB_DIR>$dir</CLICON_XMLDB_DIR>
<CLICON_XMLDB_MODSTATE>true</CLICON_XMLDB_MODSTATE>
<CLICON_STARTUP_MODE>init</CLICON_STARTUP_MODE>
<CLICON_MODULE_LIBRARY_RFC7895>false</CLICON_MODULE_LIBRARY_RFC7895>
</clixon-config>
EOF
cat <<EOF > $dir/$APPNAME.yang
module $APPNAME {
yang-version 1.1;
namespace "urn:example:simple";
prefix he;
revision 2019-04-17 {
description
"Clixon hello world example";
}
container hello{
container world{
presence true;
}
}
}
EOF
new "test params: -f $cfg"
# Bring your own backend
if [ $BE -ne 0 ]; then
# kill old backend (if any)
new "kill old backend"
sudo clixon_backend -zf $cfg
if [ $? -ne 0 ]; then
err
fi
new "start backend -s init -f $cfg"
start_backend -s init -f $cfg
new "waiting"
wait_backend
fi
new "add hello world (with modstate)"
expecteof "$clixon_netconf -qf $cfg" 0 "<rpc $DEFAULTNS><edit-config><target><candidate/></target><config><hello xmlns=\"urn:example:simple\"><world/></hello></config></edit-config></rpc>]]>]]>" "^<rpc-reply $DEFAULTNS><ok/></rpc-reply>]]>]]>$"
new "netconf commit"
expecteof "$clixon_netconf -qf $cfg" 0 "<rpc $DEFAULTNS><commit/></rpc>]]>]]>" "^<rpc-reply $DEFAULTNS><ok/></rpc-reply>]]>]]>$"
new "Kill backend"
# Check if premature kill
pid=$(pgrep -u root -f clixon_backend)
if [ -z "$pid" ]; then
err "backend already dead"
fi
# kill backend
stop_backend -f $cfg
# Now add a new yang for hello
cat <<EOF > $dir/$APPNAME.yang
module $APPNAME {
yang-version 1.1;
namespace "urn:example:simple";
prefix he;
revision 2020-01-01 {
description
"Test new revision";
}
revision 2019-04-17 {
description
"Clixon hello world example";
}
container hello{
container world{
presence true;
}
}
}
EOF
# Now start again from running with modstate enabled and new revision
if [ $BE -ne 0 ]; then
new "start backend -s running -f $cfg"
start_backend -s running -f $cfg
new "waiting"
wait_backend
fi
new "netconf get config"
expecteof "$clixon_netconf -qf $cfg" 0 "<rpc $DEFAULTNS><get-config><source><candidate/></source></get-config></rpc>]]>]]>" "^<rpc-reply $DEFAULTNS><data><hello xmlns=\"urn:example:simple\"><world/></hello></data></rpc-reply>]]>]]>$"
new "Kill backend"
# Check if premature kill
pid=$(pgrep -u root -f clixon_backend)
if [ -z "$pid" ]; then
err "backend already dead"
fi
# kill backend
stop_backend -f $cfg
rm -rf $dir
|
source ./lib/extended-oc.sh
PARAMS_FOLDER=./params/VENV/
PARAMS_FILE=admin-0.config
ARGS_FILE=${PARAMS_FOLDER}${PARAMS_FILE}
# ====================================================================================
# Order dependent
# No spaces
# Set these in the above config file
TOOLS_PROJECT=the-tools-project
TARGET_PROJECT=the-target-destination-environment-project
ADMIN_SERVER_NODEJS_BUILD=the-build-tied-to-the-github-code-branch-for-the-admin-server
ADMIN_SERVER_NODEJS_DEPLOYMENT=the-deployment-of-the-admin-server-website
ADMIN_SERVER_MONGODB_DEPLOYMENT=the-database-for-the-admin-server
ADMIN_SERVER_MINIO_DEPLOYMENT=the-minio-filesystem-proxy-for-the-admin-server-filesystem
ADMIN_SERVER_NGINX_BUILD=the-build-of-the-network-routing-proxy-for-the-admin-server
ADMIN_SERVER_NGINX_DEPLOYMENT=the-deployment-of-the-network-routing-proxy-for-the-admin-server
ADMIN_SERVER_BC_TEMPLATE=the-admin-server-build-config-json-template
ADMIN_SERVER_BC_PARAMS=the-admin-server-build-config-settings
ADMIN_SERVER_DC_TEMPLATE=the-admin-server-deployment-config-json-template
ADMIN_SERVER_DC_PARAMS=the-admin-server-deployment-config-settings
NGINX_BC_TEMPLATE=the-nginx-build-config-json-template
NGINX_BC_PARAMS=the-nginx-build-config-settings
NGINX_DC_TEMPLATE=the-nginx-deployment-config-json-template
NGINX_DC_PARAMS=the-nginx-deployment-config-settings
# ====================================================================================
runDeployAll() {
checkOpenshiftSession
checkFileExists "config" ${ARGS_FILE}
local tools_project target_project admin_server_nodejs_build admin_server_nodejs_deployment \
admin_server_mongodb_deployment admin_server_minio_deployment admin_server_nginx_build \
admin_server_nginx_deployment admin_server_bc_template admin_server_bc_params admin_server_dc_template \
admin_server_dc_params nginx_bc_template nginx_bc_params nginx_dc_template nginx_dc_params
extractArgument tools_project "runDeployAll" "TOOLS_PROJECT" "${1}"
extractArgument target_project "runDeployAll" "TARGET_PROJECT" "${2}"
extractArgument admin_server_nodejs_build "runDeployAll" "ADMIN_SERVER_NODEJS_BUILD" "${3}"
extractArgument admin_server_nodejs_deployment "runDeployAll" "ADMIN_SERVER_NODEJS_DEPLOYMENT" "${4}"
extractArgument admin_server_mongodb_deployment "runDeployAll" "ADMIN_SERVER_MONGODB_DEPLOYMENT" "${5}"
extractArgument admin_server_minio_deployment "runDeployAll" "ADMIN_SERVER_MINIO_DEPLOYMENT" "${6}"
extractArgument admin_server_nginx_build "runDeployAll" "ADMIN_SERVER_NGINX_BUILD" "${7}"
extractArgument admin_server_nginx_deployment "runDeployAll" "ADMIN_SERVER_NGINX_DEPLOYMENT" "${8}"
extractArgument admin_server_bc_template "runDeployAll" "ADMIN_SERVER_BC_TEMPLATE" "${9}"
extractArgument admin_server_bc_params "runDeployAll" "ADMIN_SERVER_BC_PARAMS" "${10}"
extractArgument admin_server_dc_template "runDeployAll" "ADMIN_SERVER_DC_TEMPLATE" "${11}"
extractArgument admin_server_dc_params "runDeployAll" "ADMIN_SERVER_DC_PARAMS" "${12}"
extractArgument nginx_bc_template "runDeployAll" "NGINX_BC_TEMPLATE" "${13}"
extractArgument nginx_bc_params "runDeployAll" "NGINX_BC_PARAMS" "${14}"
extractArgument nginx_dc_template "runDeployAll" "NGINX_DC_TEMPLATE" "${15}"
extractArgument nginx_dc_params "runDeployAll" "NGINX_DC_PARAMS" "${16}"
checkProjectExists ${tools_project}
checkProjectExists ${target_project}
checkFileExists "template", ${admin_server_bc_template}
checkFileExists "template", ${admin_server_dc_template}
checkFileExists "template", ${nginx_bc_template}
checkFileExists "template", ${nginx_dc_template}
local admin_server_bc_params_path=${PARAMS_FOLDER}${admin_server_bc_params}
local admin_server_dc_params_path=${PARAMS_FOLDER}${admin_server_dc_params}
local nginx_bc_params_path=${PARAMS_FOLDER}${nginx_bc_params}
local nginx_dc_params_path=${PARAMS_FOLDER}${nginx_dc_params}
checkFileExists "parameters", ${admin_server_bc_params_path}
checkFileExists "parameters", ${admin_server_dc_params_path}
checkFileExists "parameters", ${nginx_bc_params_path}
checkFileExists "parameters", ${nginx_dc_params_path}
echo -e \\n"deploy-all: Starting deployment."\\n
local original_namespace=$(oc project --short=true)
oc project ${tools_project}
oc -n ${tools_project} process -f ${admin_server_bc_template} --param-file=${admin_server_bc_params_path} | oc create -f -
oc project ${target_project}
oc -n ${target_project} process -f ${admin_server_dc_template} --param-file=${admin_server_dc_params_path} | oc create -f -
oc project ${tools_project}
oc -n ${tools_project} process -f ${nginx_bc_template} --param-file=${nginx_bc_params_path} | oc create -f -
oc project ${target_project}
oc -n ${target_project} process -f ${nginx_dc_template} --param-file=${nginx_dc_params_path} | oc create -f -
oc project ${original_namespace}
echo -e \\n"deploy-all: Completed deployment."\\n
}
runDeployAll $(<${ARGS_FILE})
|
def sort_abs(arr):
abs_arr = [abs(num) for num in arr]
abs_arr.sort(reverse=True)
return abs_arr
sort_abs([-5, 6, -2, 8, -7]) |
// Copyright (C) 2010, <NAME> <<EMAIL>>. All rights reserved.
package socketlog
import (
"bytes"
"net"
"net/url"
"sync"
l4g "github.com/ccpaging/nxlog4go"
"github.com/ccpaging/nxlog4go/cast"
"github.com/ccpaging/nxlog4go/driver"
"github.com/ccpaging/nxlog4go/patt"
)
// Appender is an Appender that sends output to an UDP/TCP server
type Appender struct {
mu sync.Mutex // ensures atomic writes; protects the following fields
rec chan *driver.Recorder // entry channel
runOnce sync.Once
waitExit *sync.WaitGroup
level int
layout driver.Layout // format entry for output
proto string
hostport string
sock net.Conn
}
/* Bytes Buffer */
var bufferPool *sync.Pool
func init() {
bufferPool = &sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
}
driver.Register("socket", &Appender{})
}
// NewAppender creates a socket appender with proto and hostport.
func NewAppender(proto, hostport string) *Appender {
return &Appender{
rec: make(chan *driver.Recorder, 32),
layout: patt.NewJSONLayout(),
proto: proto,
hostport: hostport,
}
}
// Open creates an Appender with DSN.
func (*Appender) Open(dsn string, args ...interface{}) (driver.Appender, error) {
proto, hostport := "udp", "127.0.0.1:12124"
if dsn != "" {
if u, err := url.Parse(dsn); err == nil {
if u.Scheme != "" {
proto = u.Scheme
}
if u.Host != "" {
hostport = u.Host
}
}
}
return NewAppender(proto, hostport).SetOptions(args...), nil
}
// Layout returns the output layout for the appender.
func (sa *Appender) Layout() driver.Layout {
sa.mu.Lock()
defer sa.mu.Unlock()
return sa.layout
}
// SetLayout sets the output layout for the appender.
func (sa *Appender) SetLayout(layout driver.Layout) *Appender {
sa.mu.Lock()
defer sa.mu.Unlock()
sa.layout = layout
return sa
}
// SetOptions sets name-value pair options.
//
// Return the appender.
func (sa *Appender) SetOptions(args ...interface{}) *Appender {
ops, idx, _ := driver.ArgsToMap(args...)
for _, k := range idx {
sa.Set(k, ops[k])
}
return sa
}
// Enabled encodes log Recorder and output it.
func (sa *Appender) Enabled(r *driver.Recorder) bool {
// r.Level < fa.level
if !(r.Level >= sa.level) {
return false
}
sa.runOnce.Do(func() {
sa.waitExit = &sync.WaitGroup{}
sa.waitExit.Add(1)
go sa.run(sa.waitExit)
})
// Write after closed
if sa.waitExit == nil {
sa.output(r)
return false
}
sa.rec <- r
return false
}
// Write is the filter's output method. This will block if the output
// buffer is full.
func (sa *Appender) Write(b []byte) (int, error) {
return 0, nil
}
func (sa *Appender) run(waitExit *sync.WaitGroup) {
for {
select {
case r, ok := <-sa.rec:
if !ok {
waitExit.Done()
return
}
sa.output(r)
}
}
}
func (sa *Appender) closeChannel() {
// notify closing. See run()
close(sa.rec)
// waiting for running channel closed
sa.waitExit.Wait()
sa.waitExit = nil
// drain channel
for r := range sa.rec {
sa.output(r)
}
}
// Close the socket if it opened.
func (sa *Appender) Close() {
if sa.waitExit == nil {
return
}
sa.closeChannel()
sa.mu.Lock()
defer sa.mu.Unlock()
if sa.sock != nil {
sa.sock.Close()
}
}
// Output a log recorder to a socket. Connecting to the server on demand.
func (sa *Appender) output(r *driver.Recorder) {
sa.mu.Lock()
defer sa.mu.Unlock()
var err error
if sa.sock == nil {
sa.sock, err = net.Dial(sa.proto, sa.hostport)
if err != nil {
l4g.LogLogError(err)
return
}
}
buf := bufferPool.Get().(*bytes.Buffer)
buf.Reset()
defer bufferPool.Put(buf)
sa.layout.Encode(buf, r)
_, err = sa.sock.Write(buf.Bytes())
if err != nil {
l4g.LogLogError(err)
sa.sock.Close()
sa.sock = nil
}
}
// Set sets name-value option with:
// level - The output level
//
// Pattern layout options:
// pattern - Layout format pattern
// ...
//
// Return error
func (sa *Appender) Set(k string, v interface{}) (err error) {
sa.mu.Lock()
defer sa.mu.Unlock()
var s string
switch k {
case "level":
var n int
if n, err = l4g.Level(l4g.INFO).IntE(v); err == nil {
sa.level = n
}
case "protocol": // DEPRECATED. See Open function's dsn argument
if s, err = cast.ToString(v); err == nil && len(s) > 0 {
if sa.sock != nil {
sa.sock.Close()
}
sa.proto = s
}
case "endpoint": // DEPRECATED. See Open function's dsn argument
if s, err = cast.ToString(v); err == nil && len(s) > 0 {
if sa.sock != nil {
sa.sock.Close()
}
sa.hostport = s
}
default:
return sa.layout.Set(k, v)
}
return
}
|
import { Model } from '@watheia/model';
import { findObjectById } from './find-object-by-id';
export function resolveReferenceField(
object: Model,
fieldName: string,
objects: Model[],
debugContext: { keyPath: (string | number)[]; stack: Model[] } = { keyPath: [], stack: [] }
) {
if (!(fieldName in object)) {
return object;
}
const result = findObjectById(object[fieldName], objects, {
keyPath: debugContext.keyPath.concat(fieldName),
stack: debugContext.stack.concat(object)
});
return {
...object,
[fieldName]: result
};
}
|
package net.johnewart.gearman.example;
import net.johnewart.gearman.client.GearmanFunction;
import net.johnewart.gearman.net.Connection;
import org.apache.commons.lang3.ArrayUtils;
import net.johnewart.gearman.client.GearmanWorkerPool;
import net.johnewart.gearman.common.Job;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WorkerPoolDemo {
private static Logger LOG = LoggerFactory.getLogger(WorkerPoolDemo.class);
static class ReverseFunction implements GearmanFunction
{
@Override
public byte[] process(Job job) {
byte[] data = job.getData();
String function = job.getFunctionName();
LOG.debug("Got data for function " + function);
ArrayUtils.reverse(data);
return data;
}
}
public static void main(String... args)
{
try {
byte data[] = "This is a test".getBytes();
GearmanWorkerPool workerPool = new GearmanWorkerPool.Builder()
.threads(2)
.withConnection(new Connection("localhost", 4730))
.build();
workerPool.registerCallback("reverse", new ReverseFunction());
workerPool.doWork();
} catch (Exception e) {
LOG.error("Error: ", e);
}
}
}
|
<reponame>dmitric/studio
import ItemManager from './ItemManager.js'
import ASCIIQuadtreeShader from '../Shaders/ASCIIQuadtreeShader.js'
import ASCIIShader from '../Shaders/ASCIIShader.js'
import CircleQuadtreeShader from '../Shaders/CircleQuadtreeShader.js'
import CircleShader from '../Shaders/CircleShader.js'
import CircleSketchingShader from '../Shaders/CircleSketchingShader.js'
import CloseShader from '../Shaders/CloseShader.js'
import CrossSplitQuadtreeShader from '../Shaders/CrossSplitQuadtreeShader.js'
import CubeShader from '../Shaders/CubeShader.js'
import DiscShader from '../Shaders/DiscShader.js'
import EquilateralTriangleShader from '../Shaders/EquilateralTriangleShader.js'
import GhostSquareShader from '../Shaders/GhostSquareShader.js'
import GhostTriangleShader from '../Shaders/GhostTriangleShader.js'
import GlitchShader from '../Shaders/GlitchShader.js'
import PipeShader from '../Shaders/PipeShader.js'
import PixelFireShader from '../Shaders/PixelFireShader.js'
import PixelShader from '../Shaders/PixelShader.js'
import QuadtreeShader from '../Shaders/QuadtreeShader.js'
import RainbowShader from '../Shaders/RainbowShader.js'
import ScaledCircleShader from '../Shaders/ScaledCircleShader.js'
import ScaledTriangleShader from '../Shaders/ScaledTriangleShader.js'
import SketchingShader from '../Shaders/SketchingShader.js'
import SlantShader from '../Shaders/SlantShader.js'
import StarsAndBarsShader from '../Shaders/StarsAndBarsShader.js'
import TargetShader from '../Shaders/TargetShader.js'
import TriangleShader from '../Shaders/TriangleShader.js'
/**
* ShaderManager
*/
export default class ShaderManager extends ItemManager {
static defaultItems () {
return [
new PixelShader(),
new CircleShader(),
new TriangleShader(),
new CubeShader(),
new EquilateralTriangleShader(),
new CrossSplitQuadtreeShader(),
new ASCIIShader(),
new ScaledCircleShader(),
new GhostSquareShader(),
new GlitchShader(),
new SlantShader(),
new CircleSketchingShader(),
new CloseShader(),
new PipeShader(),
new ScaledTriangleShader(),
new DiscShader(),
new SketchingShader(),
new StarsAndBarsShader(),
new ASCIIQuadtreeShader(),
new QuadtreeShader(),
new CircleQuadtreeShader(),
new RainbowShader(),
new PixelFireShader(),
new TargetShader(),
new GhostTriangleShader()
]
}
}
|
import Avatar from '@material-ui/core/Avatar';
import Button from '@material-ui/core/Button';
import Card from '@material-ui/core/Card';
import CardActions from '@material-ui/core/CardActions';
import CardContent from '@material-ui/core/CardContent';
import CardHeader from '@material-ui/core/CardHeader';
import { red } from '@material-ui/core/colors';
import IconButton from '@material-ui/core/IconButton';
import { createStyles, makeStyles, Theme } from '@material-ui/core/styles';
import Typography from '@material-ui/core/Typography';
import MoreVertIcon from '@material-ui/icons/MoreVert';
import React from 'react';
const useStyles = makeStyles((theme: Theme) =>
createStyles({
root: {
maxWidth: 345,
},
media: {
height: 0,
paddingTop: '56.25%', // 16:9
},
expand: {
transform: 'rotate(0deg)',
marginLeft: 'auto',
transition: theme.transitions.create('transform', {
duration: theme.transitions.duration.shortest,
}),
},
expandOpen: {
transform: 'rotate(180deg)',
},
avatar: {
backgroundColor: red[500],
},
}),
);
export interface ProfileProps extends React.HTMLAttributes<React.ReactNode> {
/**
* a Name of the person
*/
name: string,
/**
* a Title of the person
*/
title: string,
/**
* a Work summary of the person
*/
workSummary: string
/**
* Handler for Buy click event
*/
onHire?: React.MouseEventHandler,
};
export function Profile({ name, title, workSummary, onHire }: ProfileProps) {
const classes = useStyles();
return (
<Card className={classes.root}>
<CardHeader
avatar={
<Avatar aria-label="recipe" className={classes.avatar}>
{name && name.charAt(0).toUpperCase()}
</Avatar>
}
action={
<IconButton aria-label="settings">
<MoreVertIcon />
</IconButton>
}
title={name}
subheader={title}
/>
<CardContent>
<Typography variant="body2" color="textSecondary" component="p">
{workSummary}
</Typography>
</CardContent>
<CardActions disableSpacing>
<Button variant="contained" color="primary" onClick={onHire}>
Hire
</Button>
</CardActions>
</Card>
);
};
|
#!/bin/bash
dieharder -d 101 -g 206 -S 1065277196
|
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @description Creates a new ControllerDef, including the descriptor and action definitions.
* A ControllerDef instance is created as part of the ComponentDef initialization.
* @constructor
* @param {Object} config
* @export
*/
function ControllerDef(config){
this.descriptor = config[Json.ApplicationKey.DESCRIPTOR];
this.actionDefs = {};
var actionDefs = config[Json.ApplicationKey.ACTIONDEFS];
for(var i=0;i<actionDefs.length;i++){
var actionDefConfig = actionDefs[i];
var actionDef = $A.componentService.createActionDef(actionDefConfig);
this.actionDefs[actionDef.getName()] = actionDef;
}
}
/**
* Gets the Controller Descriptor with the format <code>markup://aura:component</code>.
* @returns {String} ControllerDef descriptor
*/
ControllerDef.prototype.getDescriptor = function(){
return this.descriptor;
};
/**
* Check if an action def exists.
*/
ControllerDef.prototype.hasActionDef = function(key){
return this.actionDefs.hasOwnProperty(key);
};
/**
* Gets the Action Definition.
* @param {String} key - A action name which is defined on the controller.
* @returns {ActionDef} an action definition, undefined if the definition does not exist.
*/
ControllerDef.prototype.getActionDef = function(key) {
var action = this.actionDefs[key];
return action;
};
/**
* Gets a new Action instance based on the given key.
* @param {String} key - A action name which is defined on the controller.
* @returns {Action} A new Action instance
*/
ControllerDef.prototype.get = function(key){
return this.getActionDef(key).newInstance();
};
Aura.Controller.ControllerDef = ControllerDef;
|
/*
Jameleon - An automation testing tool..
Copyright (C) 2005 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111AssertLevel.NO_FUNCTION07 USA
*/
package net.sf.jameleon.event;
import net.sf.jameleon.TestCaseTag;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
public class TestCaseEventHandler{
private static TestCaseEventHandler eventHandler;
private final List testCaseListeners = Collections.synchronizedList(new LinkedList());
private TestCaseEventHandler(){}
public static TestCaseEventHandler getInstance(){
if (eventHandler == null) {
eventHandler = new TestCaseEventHandler();
}
return eventHandler;
}
public void clearInstance(){
eventHandler = null;
}
public void addTestCaseListener(TestCaseListener tcl){
if (tcl != null && !testCaseListeners.contains(tcl)){
testCaseListeners.add(tcl);
}
}
public List getTestCaseListeners(){
return testCaseListeners;
}
public void removeTestCaseListener(TestCaseListener tcl){
testCaseListeners.remove(tcl);
}
public void beginTestCase(TestCaseTag tct){
TestCaseEvent tce = new TestCaseEvent(tct);
synchronized(testCaseListeners){
Iterator it = testCaseListeners.iterator();
TestCaseListener tcl;
while (it.hasNext()) {
tcl = (TestCaseListener)it.next();
tcl.beginTestCase(tce);
}
}
}
public void endTestCase(TestCaseTag tct){
TestCaseEvent tce = new TestCaseEvent(tct);
synchronized(testCaseListeners){
Iterator it = testCaseListeners.iterator();
TestCaseListener tcl;
while (it.hasNext()) {
tcl = (TestCaseListener)it.next();
tcl.endTestCase(tce);
}
}
}
}
|
import React from 'react'
import { css, StyledContainer } from '@generates/swag'
import Spreadsheet from '../components/Spreadsheet.js'
export default function NoDataPage () {
return (
<StyledContainer className={css({ fontFamily: 'sans-serif' })()}>
<h1>
swag-sheet
</h1>
<br />
<div>
<Spreadsheet
columns={[
{
id: '<NAME>',
disableSortBy: false
},
{
id: 'Plate Number',
disableSortBy: false
},
{
id: 'Make and Model',
disableSortBy: false
}
]}
isLoading={false}
showLoading={true}
css={{ table: { width: '100%' } }}
/>
</div>
</StyledContainer>
)
}
|
#!/bin/sh
set -e
cd $(dirname $0)
SCRIPT_DIR=$(pwd)
SCRIPT_NAME=$(dirname $0)
. ./utils/ensure_file.sh
if [ $(uname) = "Darwin" ]; then
NPROC=$(sysctl -n hw.ncpu)
else
NPROC=$(nproc)
fi
mkdir -p $HOME/tmp
cd $HOME/tmp
version=1.8.1
filename=googletest-release-${version}.tar.gz
folder=${filename%.tar.gz}
URL=https://github.com/google/googletest/archive/release-${version}.tar.gz
ensure_file 152b849610d91a9dfa1401293f43230c2e0c33f8 $filename $URL
tar -xf $filename
cd $folder
PREFIX=$HOME/local/gtest
mkdir -p build
cd build
cmake .. \
-DGFLAGS_BUILD_SHARED_LIBS=ON \
-DINSTALL_SHARED_LIBS=ON \
-DCMAKE_INSTALL_PREFIX:PATH=$PREFIX
make -j${NPROC} && make install
|
/*
* Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information.
*/
package com.linkedin.kafka.cruisecontrol.exception;
/**
* This exception indicates that the percentage of partitions modeled in the load monitor is not enough.
*/
public class NotEnoughValidSnapshotsException extends KafkaCruiseControlException {
public NotEnoughValidSnapshotsException(String msg) {
super(msg);
}
}
|
#!/bin/bash
# LinuxGSM fix_sfc.sh function
# Author: Daniel Gibbs
# Website: https://linuxgsm.com
# Description: Resolves various issues with Zombie Master: Reborn.
functionselfname="$(basename "$(readlink -f "${BASH_SOURCE[0]}")")"
if [ ! -f "${serverfiles}/bin/datacache.so" ]; then
ln -s "${serverfiles}/bin/datacache_srv.so" "${serverfiles}/bin/datacache.so"
fi
if [ ! -f "${serverfiles}/bin/dedicated.so" ]; then
ln -s "${serverfiles}/bin/dedicated_srv.so" "${serverfiles}/bin/dedicated.so"
fi
if [ ! -f "${serverfiles}/bin/engine.so" ]; then
ln -s "${serverfiles}/bin/engine_srv.so" "${serverfiles}/bin/engine.so"
fi
if [ ! -f "${serverfiles}/bin/materialsystem.so" ]; then
ln -s "${serverfiles}/bin/materialsystem_srv.so" "${serverfiles}/bin/materialsystem.so"
fi
if [ ! -f "${serverfiles}/bin/replay.so" ]; then
ln -s "${serverfiles}/bin/replay_srv.so" "${serverfiles}/bin/replay.so"
fi
if [ ! -f "${serverfiles}/bin/shaderapiempty.so" ]; then
ln -s "${serverfiles}/bin/shaderapiempty_srv.so" "${serverfiles}/bin/shaderapiempty.so"
fi
if [ ! -f "${serverfiles}/bin/soundemittersystem.so" ]; then
ln -s "${serverfiles}/bin/soundemittersystem_srv.so" "${serverfiles}/bin/soundemittersystem.so"
fi
if [ ! -f "${serverfiles}/bin/studiorender.so" ]; then
ln -s "${serverfiles}/bin/studiorender_srv.so" "${serverfiles}/bin/studiorender.so"
fi
if [ ! -f "${serverfiles}/bin/vphysics.so" ]; then
ln -s "${serverfiles}/bin/vphysics_srv.so" "${serverfiles}/bin/vphysics.so"
fi
if [ ! -f "${serverfiles}/bin/scenefilecache.so" ]; then
ln -s "${serverfiles}/bin/scenefilecache_srv.so" "${serverfiles}/bin/scenefilecache.so"
fi
|
import tensorflow as tf
# Create a recurrent neural network to generate a sequence
model = tf.keras.Sequential()
model.add(tf.keras.layers.LSTM(64, return_sequences=True, input_shape=(3, 1)))
model.add(tf.keras.layers.LSTM(64, activation='relu'))
model.add(tf.keras.layers.Dense(3))
# Define the optimizer
optimizer = tf.keras.optimizers.Adam(lr=0.01)
# Compile the model
model.compile(loss='mse', optimizer=optimizer)
# fit the model and generate a sequence
model.fit(np.expand_dims(np.array([1,2,3]), axis=0), np.expand_dims(np.array([1,2,3]), axis=0), epochs=5)
prediction = model.predict(np.expand_dims(np.array([1,2,3]), axis=0)) |
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2017 The Raven Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# What to do
sign=false
verify=false
build=false
setupenv=false
# Systems to build
linux=true
windows=true
osx=true
# Other Basic variables
SIGNER=
VERSION=
commit=false
url=https://github.com/blastdev/blast-core
proc=2
mem=2000
lxc=true
osslTarUrl=http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz
osslPatchUrl=https://bitcoin.org/cfields/osslsigncode-Backports-to-1.7.1.patch
scriptName=$(basename -- "$0")
signProg="gpg --detach-sign"
commitFiles=true
# Help Message
read -d '' usage <<- EOF
Usage: $scriptName [-c|u|v|b|s|B|o|h|j|m|] signer version
Run this script from the directory containing the blast, gitian-builder, gitian.sigs, and blast-detached-sigs.
Arguments:
signer GPG signer to sign each build assert file
version Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified
Options:
-c|--commit Indicate that the version argument is for a commit or branch
-u|--url Specify the URL of the repository. Default is https://github.com/blastdev/blast-core
-v|--verify Verify the Gitian build
-b|--build Do a Gitian build
-s|--sign Make signed binaries for Windows and Mac OSX
-B|--buildsign Build both signed and unsigned binaries
-o|--os Specify which Operating Systems the build is for. Default is lwx. l for linux, w for windows, x for osx
-j Number of processes to use. Default 2
-m Memory to allocate in MiB. Default 2000
--kvm Use KVM instead of LXC
--setup Set up the Gitian building environment. Uses KVM. If you want to use lxc, use the --lxc option. Only works on Debian-based systems (Ubuntu, Debian)
--detach-sign Create the assert file for detached signing. Will not commit anything.
--no-commit Do not commit anything to git
-h|--help Print this help message
EOF
# Get options and arguments
while :; do
case $1 in
# Verify
-v|--verify)
verify=true
;;
# Build
-b|--build)
build=true
;;
# Sign binaries
-s|--sign)
sign=true
;;
# Build then Sign
-B|--buildsign)
sign=true
build=true
;;
# PGP Signer
-S|--signer)
if [ -n "$2" ]
then
SIGNER=$2
shift
else
echo 'Error: "--signer" requires a non-empty argument.'
exit 1
fi
;;
# Operating Systems
-o|--os)
if [ -n "$2" ]
then
linux=false
windows=false
osx=false
if [[ "$2" = *"l"* ]]
then
linux=true
fi
if [[ "$2" = *"w"* ]]
then
windows=true
fi
if [[ "$2" = *"x"* ]]
then
osx=true
fi
shift
else
echo 'Error: "--os" requires an argument containing an l (for linux), w (for windows), or x (for Mac OSX)\n'
exit 1
fi
;;
# Help message
-h|--help)
echo "$usage"
exit 0
;;
# Commit or branch
-c|--commit)
commit=true
;;
# Number of Processes
-j)
if [ -n "$2" ]
then
proc=$2
shift
else
echo 'Error: "-j" requires an argument'
exit 1
fi
;;
# Memory to allocate
-m)
if [ -n "$2" ]
then
mem=$2
shift
else
echo 'Error: "-m" requires an argument'
exit 1
fi
;;
# URL
-u)
if [ -n "$2" ]
then
url=$2
shift
else
echo 'Error: "-u" requires an argument'
exit 1
fi
;;
# kvm
--kvm)
lxc=false
;;
# Detach sign
--detach-sign)
signProg="true"
commitFiles=false
;;
# Commit files
--no-commit)
commitFiles=false
;;
# Setup
--setup)
setup=true
;;
*) # Default case: If no more options then break out of the loop.
break
esac
shift
done
# Set up LXC
if [[ $lxc = true ]]
then
export USE_LXC=1
fi
# Check for OSX SDK
if [[ ! -e "gitian-builder/inputs/MacOSX10.11.sdk.tar.gz" && $osx == true ]]
then
echo "Cannot build for OSX, SDK does not exist. Will build for other OSes"
osx=false
fi
# Get signer
if [[ -n"$1" ]]
then
SIGNER=$1
shift
fi
# Get version
if [[ -n "$1" ]]
then
VERSION=$1
COMMIT=$VERSION
shift
fi
# Check that a signer is specified
if [[ $SIGNER == "" ]]
then
echo "$scriptName: Missing signer."
echo "Try $scriptName --help for more information"
exit 1
fi
# Check that a version is specified
if [[ $VERSION == "" ]]
then
echo "$scriptName: Missing version."
echo "Try $scriptName --help for more information"
exit 1
fi
# Add a "v" if no -c
if [[ $commit = false ]]
then
COMMIT="v${VERSION}"
fi
echo ${COMMIT}
# Setup build environment
if [[ $setup = true ]]
then
sudo apt-get install ruby apache2 git apt-cacher-ng python-vm-builder qemu-kvm qemu-utils
git clone https://github.com/bitcoin-core/gitian.sigs.git
git clone https://github.com/bitcoin-core/bitcoin-detached-sigs.git
git clone https://github.com/devrandom/gitian-builder.git
pushd ./gitian-builder
if [[ -n "$USE_LXC" ]]
then
sudo apt-get install lxc
bin/make-base-vm --suite trusty --arch amd64 --lxc
else
bin/make-base-vm --suite trusty --arch amd64
fi
popd
fi
# Set up build
pushd ./blast
git fetch
git checkout ${COMMIT}
popd
# Build
if [[ $build = true ]]
then
# Make output folder
mkdir -p ./blast-binaries/${VERSION}
# Build Dependencies
echo ""
echo "Building Dependencies"
echo ""
pushd ./gitian-builder
mkdir -p inputs
wget -N -P inputs $osslPatchUrl
wget -N -P inputs $osslTarUrl
make -C ../blast-core/depends download SOURCES_PATH=`pwd`/cache/common
# Linux
if [[ $linux = true ]]
then
echo ""
echo "Compiling ${VERSION} Linux"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit blast=${COMMIT} --url blast=${url} ../blast-core/contrib/gitian-descriptors/gitian-linux.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-linux --destination ../gitian.sigs/ ../blast-core/contrib/gitian-descriptors/gitian-linux.yml
mv build/out/blast-*.tar.gz build/out/src/blast-*.tar.gz ../blast-binaries/${VERSION}
fi
# Windows
if [[ $windows = true ]]
then
echo ""
echo "Compiling ${VERSION} Windows"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit blast=${COMMIT} --url blast=${url} ../blast-core/contrib/gitian-descriptors/gitian-win.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-unsigned --destination ../gitian.sigs/ ../blast-core/contrib/gitian-descriptors/gitian-win.yml
mv build/out/blast-*-win-unsigned.tar.gz inputs/blast-win-unsigned.tar.gz
mv build/out/blast-*.zip build/out/blast-*.exe ../blast-binaries/${VERSION}
fi
# Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Compiling ${VERSION} Mac OSX"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit blast=${COMMIT} --url blast=${url} ../blast-core/contrib/gitian-descriptors/gitian-osx.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-unsigned --destination ../gitian.sigs/ ../blast-core/contrib/gitian-descriptors/gitian-osx.yml
mv build/out/blast-*-osx-unsigned.tar.gz inputs/blast-osx-unsigned.tar.gz
mv build/out/blast-*.tar.gz build/out/blast-*.dmg ../blast-binaries/${VERSION}
fi
popd
if [[ $commitFiles = true ]]
then
# Commit to gitian.sigs repo
echo ""
echo "Committing ${VERSION} Unsigned Sigs"
echo ""
pushd gitian.sigs
git add ${VERSION}-linux/${SIGNER}
git add ${VERSION}-win-unsigned/${SIGNER}
git add ${VERSION}-osx-unsigned/${SIGNER}
git commit -a -m "Add ${VERSION} unsigned sigs for ${SIGNER}"
popd
fi
fi
# Verify the build
if [[ $verify = true ]]
then
# Linux
pushd ./gitian-builder
echo ""
echo "Verifying v${VERSION} Linux"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-linux ../blast-core/contrib/gitian-descriptors/gitian-linux.yml
# Windows
echo ""
echo "Verifying v${VERSION} Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-win-unsigned ../blast-core/contrib/gitian-descriptors/gitian-win.yml
# Mac OSX
echo ""
echo "Verifying v${VERSION} Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-unsigned ../blast-core/contrib/gitian-descriptors/gitian-osx.yml
# Signed Windows
echo ""
echo "Verifying v${VERSION} Signed Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../blast-core/contrib/gitian-descriptors/gitian-osx-signer.yml
# Signed Mac OSX
echo ""
echo "Verifying v${VERSION} Signed Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../blast-core/contrib/gitian-descriptors/gitian-osx-signer.yml
popd
fi
# Sign binaries
if [[ $sign = true ]]
then
pushd ./gitian-builder
# Sign Windows
if [[ $windows = true ]]
then
echo ""
echo "Signing ${VERSION} Windows"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../blast-core/contrib/gitian-descriptors/gitian-win-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-signed --destination ../gitian.sigs/ ../blast-core/contrib/gitian-descriptors/gitian-win-signer.yml
mv build/out/blast-*win64-setup.exe ../blast-binaries/${VERSION}
mv build/out/blast-*win32-setup.exe ../blast-binaries/${VERSION}
fi
# Sign Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Signing ${VERSION} Mac OSX"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../blast-core/contrib/gitian-descriptors/gitian-osx-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-signed --destination ../gitian.sigs/ ../blast-core/contrib/gitian-descriptors/gitian-osx-signer.yml
mv build/out/blast-osx-signed.dmg ../blast-binaries/${VERSION}/blast-${VERSION}-osx.dmg
fi
popd
if [[ $commitFiles = true ]]
then
# Commit Sigs
pushd gitian.sigs
echo ""
echo "Committing ${VERSION} Signed Sigs"
echo ""
git add ${VERSION}-win-signed/${SIGNER}
git add ${VERSION}-osx-signed/${SIGNER}
git commit -a -m "Add ${VERSION} signed binary sigs for ${SIGNER}"
popd
fi
fi
|
var gulp = require('gulp');
var ngGulp = require('ng-gulp');
var gulpConnect = require('gulp-connect');
var path = require('path');
var cwd = process.cwd();
ngGulp(gulp, {
disableLiveReload: true,
devServerPort: 8080,
externals: {
'angular-material': 'window["angular-material"]',
'angular-ui-router': 'window["angular-ui-router"]',
'@microfocus/ng-ias': 'window["ng-ias"]'
},
files: {
indexProduction: 'src/index.production.html',
vendorDevelopment: [
'node_modules/angular/angular.js',
'node_modules/angular-animate/angular-animate.js',
'node_modules/angular-aria/angular-aria.js',
'node_modules/angular-material/angular-material.js',
'node_modules/angular-material/angular-material.css',
'node_modules/angular-ui-router/release/angular-ui-router.js',
'node_modules/@microfocus/ng-ias/dist/ng-ias.js',
'node_modules/@microfocus/ng-ias/dist/ng-ias.css',
'node_modules/ias-icons/dist/ias-icons.css',
'node_modules/ias-icons/dist/**/*',
'node_modules/jquery/dist/jquery.js',
'node_modules/js-cookie/src/js.cookie.js'
],
vendorProduction: [
'node_modules/angular/angular.js',
'node_modules/angular-animate/angular-animate.js',
'node_modules/angular-aria/angular-aria.js',
'node_modules/angular-material/angular-material.js',
'node_modules/angular-material/angular-material.css',
'node_modules/angular-ui-router/release/angular-ui-router.js',
'node_modules/@microfocus/ng-ias/dist/ng-ias.js',
'node_modules/@microfocus/ng-ias/dist/ng-ias.css',
'node_modules/ias-icons/dist/ias-icons.css',
'node_modules/ias-icons/dist/**/*',
'node_modules/jquery/dist/jquery.js',
'node_modules/js-cookie/src/js.cookie.js'
],
vendorTest: [
'node_modules/angular/angular.js',
'node_modules/angular-mocks/angular-mocks.js',
'node_modules/angular-animate/angular-animate.js',
'node_modules/angular-aria/angular-aria.js',
'node_modules/angular-material/angular-material.js',
'node_modules/angular-material/angular-material.css',
'node_modules/angular-ui-router/release/angular-ui-router.js',
'node_modules/jquery/dist/jquery.js',
'node_modules/js-cookie/src/js.cookie.js'
]
}
});
//
// Define some of our own additional tasks
//
gulp.task('copy:oauth.html', function() {
return gulp
.src(path.resolve(cwd, 'node_modules/unjar-from-url/node_modules/gromit/html/oauth.html'))
.pipe(gulp.dest(path.resolve(cwd, 'dist')));
});
gulp.task('copy:gromit', function() {
return gulp
.src(path.resolve(cwd, 'node_modules/unjar-from-url/node_modules/gromit/**/*'))
.pipe(gulp.dest(path.resolve(cwd, 'dist/gromit')));
});
gulp.task('copy:extras', ['copy:oauth.html', 'copy:gromit']);
// Ensure copy:extras is a part of the copy:development & copy:production base tasks:
gulp.tasks['copy:development'].dep.push('copy:extras');
gulp.tasks['copy:production'].dep.push('copy:extras');
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1beta1",
manifest={
"Featurestore",
},
)
class Featurestore(proto.Message):
r"""Vertex AI Feature Store provides a centralized repository for
organizing, storing, and serving ML features. The Featurestore
is a top-level container for your features and their values.
Attributes:
name (str):
Output only. Name of the Featurestore. Format:
``projects/{project}/locations/{location}/featurestores/{featurestore}``
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this Featurestore
was created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this Featurestore
was last updated.
etag (str):
Optional. Used to perform consistent
read-modify-write updates. If not set, a blind
"overwrite" update happens.
labels (Sequence[google.cloud.aiplatform_v1beta1.types.Featurestore.LabelsEntry]):
Optional. The labels with user-defined
metadata to organize your Featurestore.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed.
See https://goo.gl/xmQnxf for more information
on and examples of labels. No more than 64 user
labels can be associated with one
Featurestore(System labels are excluded)."
System reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
online_serving_config (google.cloud.aiplatform_v1beta1.types.Featurestore.OnlineServingConfig):
Required. Config for online serving
resources.
state (google.cloud.aiplatform_v1beta1.types.Featurestore.State):
Output only. State of the featurestore.
encryption_spec (google.cloud.aiplatform_v1beta1.types.EncryptionSpec):
Optional. Customer-managed encryption key
spec for data storage. If set, both of the
online and offline data storage will be secured
by this key.
"""
class State(proto.Enum):
r"""Possible states a Featurestore can have."""
STATE_UNSPECIFIED = 0
STABLE = 1
UPDATING = 2
class OnlineServingConfig(proto.Message):
r"""OnlineServingConfig specifies the details for provisioning
online serving resources.
Attributes:
fixed_node_count (int):
The number of nodes for each cluster. The number of nodes
will not scale automatically but can be scaled manually by
providing different values when updating. Only one of
``fixed_node_count`` and ``scaling`` can be set. Setting one
will reset the other.
scaling (google.cloud.aiplatform_v1beta1.types.Featurestore.OnlineServingConfig.Scaling):
Online serving scaling configuration. Only one of
``fixed_node_count`` and ``scaling`` can be set. Setting one
will reset the other.
"""
class Scaling(proto.Message):
r"""Online serving scaling configuration. If min_node_count and
max_node_count are set to the same value, the cluster will be
configured with the fixed number of node (no auto-scaling).
Attributes:
min_node_count (int):
Required. The minimum number of nodes to
scale down to. Must be greater than or equal to
1.
max_node_count (int):
The maximum number of nodes to scale up to. Must be greater
or equal to min_node_count.
"""
min_node_count = proto.Field(
proto.INT32,
number=1,
)
max_node_count = proto.Field(
proto.INT32,
number=2,
)
fixed_node_count = proto.Field(
proto.INT32,
number=2,
)
scaling = proto.Field(
proto.MESSAGE,
number=4,
message="Featurestore.OnlineServingConfig.Scaling",
)
name = proto.Field(
proto.STRING,
number=1,
)
create_time = proto.Field(
proto.MESSAGE,
number=3,
message=timestamp_pb2.Timestamp,
)
update_time = proto.Field(
proto.MESSAGE,
number=4,
message=timestamp_pb2.Timestamp,
)
etag = proto.Field(
proto.STRING,
number=5,
)
labels = proto.MapField(
proto.STRING,
proto.STRING,
number=6,
)
online_serving_config = proto.Field(
proto.MESSAGE,
number=7,
message=OnlineServingConfig,
)
state = proto.Field(
proto.ENUM,
number=8,
enum=State,
)
encryption_spec = proto.Field(
proto.MESSAGE,
number=10,
message=gca_encryption_spec.EncryptionSpec,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -ex
export_or_prefix() {
export OPENRESTY_PREFIX=$(brew --prefix openresty/brew/openresty-debug)
}
before_install() {
HOMEBREW_NO_AUTO_UPDATE=1 brew install perl cpanminus etcd luarocks openresty/brew/openresty-debug redis@3.2
brew upgrade go
sudo sed -i "" "s/requirepass/#requirepass/g" /usr/local/etc/redis.conf
brew services start redis@3.2
export GO111MOUDULE=on
sudo cpanm --notest Test::Nginx >build.log 2>&1 || (cat build.log && exit 1)
export_or_prefix
luarocks install --lua-dir=${OPENRESTY_PREFIX}/luajit luacov-coveralls --local --tree=deps
}
do_install() {
export_or_prefix
make deps
git clone https://github.com/iresty/test-nginx.git test-nginx
wget -P utils https://raw.githubusercontent.com/openresty/openresty-devel-utils/master/lj-releng
chmod a+x utils/lj-releng
wget https://github.com/iresty/grpc_server_example/releases/download/20200314/grpc_server_example-darwin-amd64.tar.gz
tar -xvf grpc_server_example-darwin-amd64.tar.gz
brew install grpcurl
}
script() {
export_or_prefix
export PATH=$OPENRESTY_PREFIX/nginx/sbin:$OPENRESTY_PREFIX/luajit/bin:$OPENRESTY_PREFIX/bin:$PATH
etcd --enable-v2=true &
sleep 1
./grpc_server_example &
make help
make init
sudo make run
mkdir -p logs
sleep 1
sudo make stop
sleep 1
ln -sf $PWD/deps/lib $PWD/deps/lib64
sudo mkdir -p /usr/local/var/log/nginx/
sudo touch /usr/local/var/log/nginx/error.log
sudo chmod 777 /usr/local/var/log/nginx/error.log
APISIX_ENABLE_LUACOV=1 prove -Itest-nginx/lib -I./ -r t/admin/*.t
}
after_success() {
$PWD/deps/bin/luacov-coveralls
}
case_opt=$1
shift
case ${case_opt} in
before_install)
before_install "$@"
;;
do_install)
do_install "$@"
;;
script)
script "$@"
;;
after_success)
after_success "$@"
;;
esac
|
"use strict";
const Koa = require("koa");
const Router = require("koa-router");
const Semver = require("..");
const request = require("supertest");
const _ = require("lodash");
const handler = (message = "handler", final = true) => (ctx, next) => {
ctx.body = (ctx.body || []).concat(message);
if (!final) return next();
};
describe("Semver", () => {
let app;
let server;
beforeEach(() => {
app = new Koa();
server = app.listen();
});
afterEach(async () => {
await server.close();
});
it("should only accept functions as handlers", () => {
const version = new Semver();
expect(() => version.use(1)).toThrow();
expect(() => version.use("string")).toThrow();
expect(() => version.use({})).toThrow();
version.use(_.noop);
expect(version.modes).toHaveLength(1);
});
it("must set a first argument on handlers", () => {
expect(() => Semver.handlers.param()).toThrow();
expect(() => Semver.handlers.header()).toThrow();
expect(() => Semver.handlers.query()).toThrow();
});
it("should clone the array of modes as a different reference", () => {
const version = new Semver();
version.use(_.noop);
version.use(_.noop);
const cloned = version.clone();
expect(cloned).not.toBe(version);
expect(cloned.modes).not.toBe(version.modes);
});
it("should match first middleware when no method is set up", async () => {
const version = new Semver();
app.use(version.match("^2.0.0", handler("2")));
app.use(version.match("^2.1.0", handler("2")));
app.use(version.match("^1.0.0", handler("1")));
const response = await request(server).get("/");
expect(response.body).toEqual(["2"]);
});
it("should allow continuations", async () => {
const version = new Semver();
version.use(Semver.handlers.header("X-Semver"));
app.use(
version.match("^2.0.0", (ctx, next) => {
ctx.body = ["This is a middleware"];
return next();
})
);
app.use(version.match("^2.1.0", handler("2")));
app.use(version.match("^1.0.0", handler("1")));
const response = await request(server).get("/").set("X-Semver", "2.4.0");
expect(response.body).toEqual(["This is a middleware", "2"]);
});
describe("koa-router@next", () => {
it("should match routes via path", async () => {
const router = new Router();
const version = new Semver();
version.use(Semver.handlers.param(":ver"));
router.get("/:ver?/path", version.match("^4.0.0", handler("4")));
router.get("/:ver/path", version.match("^2.0.0", handler("2")));
router.get("/:ver/path", version.match("^1.0.0", handler("1")));
app.use(router.routes());
let response;
response = await request(server).get("/1.4.0/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
response = await request(server).get("/2.4.0/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["2"]);
response = await request(server).get("/3.4.0/path");
expect(response.status).toEqual(404);
expect(response.body).toEqual({});
response = await request(server).get("/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["4"]);
});
it("should match routes via path with *", async () => {
const router = new Router();
const version = new Semver();
version.use(Semver.handlers.param(":ver"));
router.get("/:ver/path", version.match("^1.0.0", handler("1")));
router.get("/:ver/path", version.match("*", handler("*")));
app.use(router.routes());
let response;
response = await request(server).get("/2.4.0/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["*"]);
response = await request(server).get("/1.4.0/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
response = await request(server).get("/0.4.0/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["*"]);
});
it("should match routes via headers", async () => {
const router = new Router();
const version = new Semver();
version.use(Semver.handlers.header("X-Semver"));
router.get("/path", version.match("^2.0.0", handler("2")));
router.get("/path", version.match("^1.0.0", handler("1")));
app.use(router.routes());
let response;
// No version on headers should return the latest (first one)
response = await request(server).get("/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["2"]);
response = await request(server).get("/path").set("X-Semver", "1.4.0");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
response = await request(server).get("/path").set("X-Semver", "2.4.0");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["2"]);
response = await request(server).get("/path").set("X-Semver", "3.4.0");
expect(response.status).toEqual(404);
expect(response.body).toEqual({});
});
it("should match routes via query", async () => {
const router = new Router();
const version = new Semver();
version.use(Semver.handlers.query("Semver"));
router.get("/path", version.match("^2.0.0", handler("2")));
router.get("/path", version.match("^1.0.0", handler("1")));
app.use(router.routes());
let response;
// No version on query should return the latest (first one)
response = await request(server).get("/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["2"]);
response = await request(server).get("/path").query({ Semver: "1.4.0" });
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
response = await request(server).get("/path").query({ Semver: "2.4.0" });
expect(response.status).toEqual(200);
expect(response.body).toEqual(["2"]);
response = await request(server).get("/path").query({ Semver: "3.4.0" });
expect(response.status).toEqual(404);
expect(response.body).toEqual({});
});
it("should respect hierarchy", async () => {
const router = new Router();
const version = new Semver();
version.use(Semver.handlers.param(":Semver"));
version.use(Semver.handlers.query("Semver"));
version.use(Semver.handlers.header("Semver"));
router.get("/:Semver/path", version.match("^2.0.0", handler("2")));
router.get("/:Semver/path", version.match("^1.0.0", handler("1")));
app.use(router.routes());
let response;
// No version on query should return the latest (first one)
response = await request(server).get("/_/path");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["2"]);
response = await request(server)
.get("/_/path")
.query({ Semver: "1.4.0" });
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
response = await request(server).get("/_/path").set("Semver", "1.4.0");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
response = await request(server)
.get("/1.0.0/path")
.set("Semver", "2.4.0");
expect(response.status).toEqual(200);
expect(response.body).toEqual(["1"]);
});
});
});
|
module.exports = {
requestShow() {
return 'from extend request';
}
} |
"""
Design a Python program to print out the first 10 prime numbers
"""
def get_prime_numbers(n):
prime_numbers = []
x = 2
while len(prime_numbers) < n:
is_prime = True
for num in prime_numbers:
if x % num == 0:
is_prime = False
break
if is_prime:
prime_numbers.append(x)
x += 1
return prime_numbers
if __name__ == '__main__':
result = get_prime_numbers(10)
print(result) |
#! /bin/sh
#: Install sh-stdlib.
#: sh-stdlib is a standard library for shell.
_LOCAL="$HOME/.local"
_LOCAL_SUBDIRS='bin lib opt share'
# TODO: do we need to honor POSIXSH_STDLIB_HOME here?
_SHSTDLIB_HOME="${POSIXSH_STDLIB_HOME:-${_LOCAL}/lib/shell/sh}"
_DOWNLOAD_CACHE=/tmp
_URL_LATEST=https://github.com/ya55en/sh-stdlib/releases/latest
_URL_DOWNLOAD_RE='^location: https://github.com/ya55en/sh-stdlib/releases/tag/v\(.*\)$'
__latest__=$(curl -Is $_URL_LATEST | grep ^location | tr -d '\n\r' | sed "s|$_URL_DOWNLOAD_RE|\1|")
__version__="${1:-$__latest__}" # version passed as an argument for unreleased builds
_SHSTDLIB_FILENAME="sh-stdlib-v${__version__}.tgz"
_URL_DOWNLOAD="https://github.com/ya55en/sh-stdlib/releases/download/v${__version__}/${_SHSTDLIB_FILENAME}"
#: Terminate execution with given message and rc and.
die() {
rc=$1
msg="$2"
_fatal "$msg" >&2
#echo "${C_FATAL}FATAL: $msg${C_OFF}" >&2
exit $rc
}
if [ "$DEBUG" = true ]; then
echo "DEBUG: HOME='${HOME}'"
echo "DEBUG: _SHSTDLIB_HOME='${_SHSTDLIB_HOME}'"
echo "DEBUG: _SHSTDLIB_FILENAME='${_SHSTDLIB_FILENAME}'"
echo "DEBUG: _URL_DOWNLOAD='${_URL_DOWNLOAD}'"
echo "DEBUG: __version__='${__version__}'"
fi
#: Create ~/.local/{bin,lib,opt,share}.
create_dot_local() {
if [ -e "$_LOCAL" ]; then
echo "W: $_LOCAL already exists, skipping."
else
echo "I: Creating directory ${_LOCAL}..."
mkdir -p "${_LOCAL}"
fi
for directory in $_LOCAL_SUBDIRS; do
thedir="${_LOCAL}/${directory}"
if [ -e "${thedir}" ]; then
echo "W: ${thedir} already exists, skipping."
else
echo "I: Creating ${thedir}..."
mkdir "${thedir}"
fi
done
}
#: Download sh-stdlib core tarball and install it.
download_shstdlib_core() {
target_file_path="${_DOWNLOAD_CACHE}/${_SHSTDLIB_FILENAME}"
echo "install_shstdlib(): target_file_path=${target_file_path}"
if [ -e "${target_file_path}" ]; then
echo "Release file already downloaded, skipping."
else
echo "Downloading ${target_file_path}..."
curl -sL "$_URL_DOWNLOAD" -o "${target_file_path}" ||
die 9 "Download failed. (URL: $_URL_DOWNLOAD)"
fi
}
#: Install sh-stdlib into `_SHSTDLIB_HOME`.
install_shstdlib_core() {
target_file_path="${_DOWNLOAD_CACHE}/${_SHSTDLIB_FILENAME}"
if [ -e "${_SHSTDLIB_HOME}" ]; then
echo "Target directory already exists: ${_SHSTDLIB_HOME}, skipping."
echo "(If you do want that (old) thing replaced, please remove it manually first.)"
else
echo "Extracting sh-stdlib archive $_SHSTDLIB_FILENAME to target directory ${_SHSTDLIB_HOME}..."
mkdir -p "${_SHSTDLIB_HOME}"
tar xf "${target_file_path}" -C "${_SHSTDLIB_HOME}"
fi
}
#: Create ~/.bashrc.d/ .
create_bashrcd() {
if [ -e "$HOME/.bashrc.d" ]; then
echo "W: $HOME/.bashrc.d/ already exists, skipping."
else
echo "I: Creating $HOME/.bashrc.d/..."
mkdir "$HOME/.bashrc.d"
fi
}
#: Create bashrcd script ~/.bashrc.d/00-sh-stdlib-init.sh.
create_bashrcd_script_00() {
target_file_path="$HOME/.bashrc.d/00-sh-stdlib-init.sh"
if [ -e "${target_file_path}" ]; then
echo "Bashrcd script '00-sh-stdlib-init.sh' already exists, skipping. (${target_file_path})"
else
echo "Installing bashrcd script ${target_file_path}..."
cat > "${target_file_path}" << EOS
# $target_file_path - sh-stdlib environment setup
_LOCAL="\$HOME/.local"
echo "\$PATH" | grep -q "\$_LOCAL/bin" || PATH="\$_LOCAL/bin:\$PATH"
EOS
echo "Adding POSIXSH_STDLIB_HOME setup to bashrcd script ${target_file_path}..."
cat >> "${target_file_path}" << EOS
# sh-stdlib: set POSIXSH_STDLIB_HOME and add sh-stdlib/bin to PATH
POSIXSH_STDLIB_HOME="$_SHSTDLIB_HOME" ; export POSIXSH_STDLIB_HOME
echo \$PATH | grep -q "\$POSIXSH_STDLIB_HOME/bin" || PATH="\$POSIXSH_STDLIB_HOME/bin:\$PATH"
# sh-stdlib: set POSIXSH_IMPORT_PATH
# (POSIXSH_IMPORT_PATH is where sys.sh 'import()' look for modules to import.)
echo "\$POSIXSH_IMPORT_PATH" | grep -q "\$POSIXSH_STDLIB_HOME/lib" || POSIXSH_IMPORT_PATH="\$POSIXSH_STDLIB_HOME:\$POSIXSH_IMPORT_PATH"
echo "\$POSIXSH_IMPORT_PATH" | grep -q "\$POSIXSH_STDLIB_HOME/unittest" || POSIXSH_IMPORT_PATH="\$POSIXSH_STDLIB_HOME/unittest:\$POSIXSH_IMPORT_PATH"
EOS
fi
}
#: Create bashrcd script ~/.bashrc.d/99-sh-stdlib-import-path.sh.
create_bashrcd_script_99_import_path() {
target_script_name='99-sh-stdlib-import-path.sh'
target_file_path="$HOME/.bashrc.d/$target_script_name"
if [ -e "${target_file_path}" ]; then
echo "bashrcd script '$target_script_name' already exists, skipping."
else
echo "Installing bashrcd script ${target_file_path}..."
cat > "${target_file_path}" << EOS
# $target_file_path - sh-stdlib: export variables set by other scripts.
export POSIXSH_STDLIB_HOME
export POSIXSH_IMPORT_PATH
export PATH
EOS
fi
}
#: Add ~/.bashrc.d/ activation code to ~/.bashrc.
add_bashrcd_sourcing_snippet() {
# shellcheck disable=SC2016
if grep -q 'for file in "\$HOME/\.bashrc.d/"\*\.sh; do' ~/.bashrc; then
echo "bashrc.d sourcing snippet already set, skipping."
else
echo "Setting bashrc.d sourcing snippet..."
cat >> "$HOME/.bashrc" << EOS
#: sh-stdlib: sourcing initializing scripts from ~/.bashrc.d/*.sh
if [ -d "\$HOME/.bashrc.d/" ]; then
for file in "\$HOME/.bashrc.d/"*.sh; do
. "\$file"
done
fi
EOS
fi
}
create_symlink() {
ln -s "$_SHSTDLIB_HOME/unittest/shtest" "$_LOCAL/lib/shell/sh/bin"
}
source_bashrcd_scripts() {
. "$HOME/.bashrc.d/00-sh-stdlib-init.sh"
. "$HOME/.bashrc.d/99-sh-stdlib-import-path.sh"
}
self_test() {
chmod +x "$_SHSTDLIB_HOME/bin/tapview"
"$_SHSTDLIB_HOME/unittest/shtest" "$_SHSTDLIB_HOME/tests" | "$_SHSTDLIB_HOME/bin/tapview"
}
#: Print adequate instructions on the console.
instruct_user() {
# TODO: Think on having a refresh-env command to reload env
cat << EOS
IMPORTANT:
Please ** CLOSE and REOPEN ** all your terminals in order to refresh
the environment variables.
sh-stdlib installation: SUCCESSFUL.
EOS
}
main() {
create_dot_local
download_shstdlib_core
install_shstdlib_core
create_bashrcd
create_bashrcd_script_00
create_bashrcd_script_99_import_path
add_bashrcd_sourcing_snippet
# create_symlink
source_bashrcd_scripts
self_test
instruct_user
}
main
|
import expect from 'expect';
import {getCurrentRefinements} from '../utils.js';
describe('currentRefinedValues', () => {
const firstRefinement = '#hierarchical-categories .item:nth-child(6)';
const secondRefinement = '#brands .item:nth-child(8)';
it('is empty', () => getCurrentRefinements()
.then(refinements => expect(refinements.length).toBe(0))
);
context('when we have some refinements', () => {
beforeEach(() => browser.click(firstRefinement).pause(500).click(secondRefinement).pause(500));
it('shows refinements', () =>
getCurrentRefinements().then(refinements => expect(refinements.length).toBe(2))
);
it('has a "Clear all" button', () =>
browser
.click('#current-refined-values .clear-all')
.then(getCurrentRefinements)
.then(refinements => expect(refinements.length).toBe(0))
);
});
});
|
module.exports = {
compileProgram: require('./compileProgram'),
defaultValue: require('./defaultValue'),
extractAttributes: require('./extractAttributes'),
extractUniforms: require('./extractUniforms'),
generateUniformAccessObject: require('./generateUniformAccessObject'),
setPrecision: require('./setPrecision'),
mapSize: require('./mapSize'),
mapType: require('./mapType')
}; |
var merge = require( "webpack-merge" );
var path = require("path");
var webpack = require("webpack");
var HtmlWebpackPlugin = require("html-webpack-plugin");
var DEVELOPMENT = "DEVELOPMENT";
var PRODUCTION = "PRODUCTION";
var ENTRY_FILE = "./src/index.js";
// Detemine build env
var target = process.env.npm_lifecycle_event;
var targetEnv = target === "build" ? PRODUCTION : DEVELOPMENT;
/*
Shared configuration for both dev and production
*/
var baseConfig = {
output: {
path: path.resolve(__dirname + "/dist"),
filename: "index.js"
},
resolve: {
modules: [path.join(__dirname, "src"), "node_modules"],
extensions: [".js", ".elm"]
},
module: {
rules: [
{
test: /\.elm$/,
exclude: [/elm-stuff/, /node_modules/],
loader: "elm-webpack-loader",
options: {
cwd: path.resolve(__dirname, "src"),
}
},
]
},
plugins: [
// Generate index.html with links to webpack bundles
new HtmlWebpackPlugin({
title: "Example",
xhtml: true,
template: "src/index.html",
}),
],
}
var devConfig = {};
// Additional webpack settings for prod env (when invoked via "npm run build")
var prodConfig = {
output: {
path: path.resolve(__dirname + "/../docs"),
filename: "[name]-[hash].js",
publicPath: "/elm-select/",
},
}
if (targetEnv === DEVELOPMENT) {
console.log("Serving locally...");
module.exports = merge(baseConfig, devConfig);
} else {
console.log("Building for production...");
module.exports = merge(baseConfig, prodConfig);
}
|
def findTriplets(arr):
triplets = []
for i in range(len(arr)):
for j in range(i+1, len(arr)):
for k in range(j+1, len(arr)):
if arr[i] + arr[j] + arr[k] == 0:
triplets.append((arr[i], arr[j], arr[k]))
return triplets |
#!/usr/bin/env bash
echo "" >> /etc/hosts
echo "192.168.1.122 entry entry.dev www.entry.dev" >> /etc/hosts
echo "192.168.1.124 maria maria.dev" >> /etc/hosts
apt-get update
apt-get upgrade -y
apt-get -qq install -y curl wget vim apache2 apache2-utils
a2enmod rewrite
a2enmod headers
a2dismod status
a2dissite 000-default.conf
sed -i 's/www-data/ubuntu/g' /etc/apache2/envvars
ln -s /var/www/app.dev/app_dev.conf /etc/apache2/sites-enabled/app_dev.conf
/etc/init.d/apache2 restart
cp /var/www/app.dev/logrotate /etc/logrotate.d/appdev |
#!/usr/bin/python
## image-to-gcode is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by the
## Free Software Foundation; either version 2 of the License, or (at your
## option) any later version. image-to-gcode is distributed in the hope
## that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
## warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
## the GNU General Public License for more details. You should have
## received a copy of the GNU General Public License along with image-to-gcode;
## if not, write to the Free Software Foundation, Inc., 59 Temple Place,
## Suite 330, Boston, MA 02111-1307 USA
##
## image-to-gcode.py is Copyright (C) 2005 <NAME>
## <EMAIL>
## image-to-gcode.py is Copyright (C) 2006 <NAME>
## <EMAIL>
#################################################################################
# image-to-gcode #
#################################################################################
import math
import sys
import operator
epsilon = 1e-5
MAXINT = 1000000000
def ball_tool(r,rad):
s = -math.sqrt(rad**2-r**2)
return s
def endmill(r,dia, rough_offset=0.0):
return 0
def vee_common(angle, rough_offset=0.0):
slope = math.tan(math.pi/2.0 - (angle / 2.0) * math.pi / 180.0)
def f(r, dia):
return r * slope
return f
def make_tool_shape(NUMPY,f, wdia, resp, rough_offset=0.0):
# resp is pixel size
res = 1. / resp
wrad = wdia/2.0 + rough_offset
rad = int(math.ceil((wrad-resp/2.0)*res))
if rad < 1: rad = 1
dia = 2*rad+1
hdia = rad
l = []
for x in range(dia):
for y in range(dia):
r = math.hypot(x-hdia, y-hdia) * resp
if r < wrad:
z = f(r, wrad)
l.append(z)
if NUMPY == True:
Image_Matrix = Image_Matrix_Numpy
else:
Image_Matrix = Image_Matrix_List
TOOL = Image_Matrix(dia,dia)
l = []
temp = []
for x in range(dia):
temp.append([])
for y in range(dia):
r = math.hypot(x-hdia, y-hdia) * resp
if r < wrad:
z = f(r, wrad)
l.append(z)
temp[x].append(float(z))
else:
temp[x].append(1e100000)
TOOL.From_List(temp)
TOOL.minus(TOOL.min()+rough_offset)
return TOOL
def amax(seq):
res = 0
for i in seq:
if abs(i) > abs(res): res = i
return res
def group_by_sign(seq, slop=math.sin(math.pi/18), key=lambda x:x):
sign = None
subseq = []
for i in seq:
ki = key(i)
if sign is None:
subseq.append(i)
if ki != 0:
sign = ki / abs(ki)
else:
subseq.append(i)
if sign * ki < -slop:
sign = ki / abs(ki)
yield subseq
subseq = [i]
if subseq: yield subseq
class Convert_Scan_Alternating:
def __init__(self):
self.st = 0
def __call__(self, primary, items):
st = self.st = self.st + 1
if st % 2: items.reverse()
if st == 1: yield True, items
else: yield False, items
def reset(self):
self.st = 0
class Convert_Scan_Increasing:
def __call__(self, primary, items):
yield True, items
def reset(self):
pass
class Convert_Scan_Decreasing:
def __call__(self, primary, items):
items.reverse()
yield True, items
def reset(self):
pass
class Convert_Scan_Upmill:
def __init__(self, slop = math.sin(math.pi / 18)):
self.slop = slop
def __call__(self, primary, items):
for span in group_by_sign(items, self.slop, operator.itemgetter(2)):
if amax([it[2] for it in span]) < 0:
span.reverse()
yield True, span
def reset(self):
pass
class Convert_Scan_Downmill:
def __init__(self, slop = math.sin(math.pi / 18)):
self.slop = slop
def __call__(self, primary, items):
for span in group_by_sign(items, self.slop, operator.itemgetter(2)):
if amax([it[2] for it in span]) > 0:
span.reverse()
yield True, span
def reset(self):
pass
class Reduce_Scan_Lace:
def __init__(self, converter, slope, keep):
self.converter = converter
self.slope = slope
self.keep = keep
def __call__(self, primary, items):
slope = self.slope
keep = self.keep
if primary:
idx = 3
test = operator.le
else:
idx = 2
test = operator.ge
def bos(j):
return j - j % keep
def eos(j):
if j % keep == 0: return j
return j + keep - j%keep
for i, (flag, span) in enumerate(self.converter(primary, items)):
subspan = []
a = None
for i, si in enumerate(span):
ki = si[idx]
if a is None:
if test(abs(ki), slope):
a = b = i
else:
if test(abs(ki), slope):
b = i
else:
if i - b < keep: continue
yield True, span[bos(a):eos(b+1)]
a = None
if a is not None:
yield True, span[a:]
def reset(self):
self.converter.reset()
#############
class Reduce_Scan_Lace_new:
def __init__(self, converter, depth, keep):
self.converter = converter
self.depth = depth
self.keep = keep
def __call__(self, primary, items):
keep = self.keep
max_z_cut = self.depth # set a max z value to cut
def bos(j):
return j - j % keep
def eos(j):
if j % keep == 0: return j
return j + keep - j%keep
for i, (flag, span) in enumerate(self.converter(primary, items)):
subspan = []
a = None
for i, si in enumerate(span):
ki = si[1] # This is (x,y,z)
z_value = ki[2] # Get the z value from ki
if a is None:
if z_value < max_z_cut:
a = b = i
else:
if z_value < max_z_cut:
b = i
else:
if i - b < keep: continue
yield True, span[bos(a):eos(b+1)]
a = None
if a is not None:
yield True, span[a:]
def reset(self):
self.converter.reset()
#############
class Converter:
def __init__(self, BIG, \
image, units, tool_shape, pixelsize, pixelstep, safetyheight, tolerance,\
feed, convert_rows, convert_cols, cols_first_flag, border, entry_cut,\
roughing_delta, roughing_feed, xoffset, yoffset, splitstep, header, \
postscript, edge_offset, disable_arcs):
self.BIG = BIG
self.image = image
self.units = units
self.tool_shape = tool_shape
self.pixelsize = pixelsize
self.safetyheight = safetyheight
self.tolerance = tolerance
self.base_feed = feed
self.convert_rows = convert_rows
self.convert_cols = convert_cols
self.cols_first_flag = cols_first_flag
self.entry_cut = entry_cut
self.roughing_delta = roughing_delta
self.roughing_feed = roughing_feed
self.header = header
self.postscript = postscript
self.border = border
self.edge_offset = edge_offset
self.disable_arcs = disable_arcs
self.xoffset = xoffset
self.yoffset = yoffset
# Split step stuff
splitpixels = 0
if splitstep > epsilon:
pixelstep = int(math.floor(pixelstep * splitstep * 2))
splitpixels = int(math.floor(pixelstep * splitstep ))
self.pixelstep = pixelstep
self.splitpixels = splitpixels
self.cache = {}
w, h = self.w, self.h = image.shape
self.h1 = h
self.w1 = w
###
row_cnt=0
cnt_border = 0
if self.convert_rows != None:
row_cnt = math.ceil( self.w1 / pixelstep) + 2
col_cnt = 0
if self.convert_cols != None:
col_cnt = math.ceil( self.h1 / pixelstep) + 2
if self.roughing_delta != 0:
cnt_mult = math.ceil(self.image.min() / -self.roughing_delta) + 1
else:
cnt_mult = 1
if self.convert_cols != None or self.convert_rows != None:
cnt_border = 2
self.cnt_total = (row_cnt + col_cnt + cnt_border )* cnt_mult
self.cnt = 0.0
def one_pass(self):
g = self.g
g.set_feed(self.feed)
if self.convert_cols and self.cols_first_flag:
self.g.set_plane(19)
self.mill_cols(self.convert_cols, True)
if self.convert_rows: g.safety()
if self.convert_rows:
self.g.set_plane(18)
self.mill_rows(self.convert_rows, not self.cols_first_flag)
if self.convert_cols and not self.cols_first_flag:
self.g.set_plane(19)
if self.convert_rows: g.safety()
self.mill_cols(self.convert_cols, not self.convert_rows)
g.safety()
## mill border ##
if self.convert_cols:
self.convert_cols.reset()
if self.convert_rows:
self.convert_rows.reset()
step_save = self.pixelstep
self.pixelstep = max(self.w1, self.h1) + 1
if self.border == 1 and not self.convert_rows:
if self.convert_cols:
self.g.set_plane(18)
self.mill_rows(self.convert_cols, True)
g.safety()
if self.border == 1 and not self.convert_cols:
if self.convert_rows:
self.g.set_plane(19)
self.mill_cols(self.convert_rows, True)
g.safety()
self.pixelstep = step_save
if self.convert_cols:
self.convert_cols.reset()
if self.convert_rows:
self.convert_rows.reset()
g.safety()
def convert(self):
output_gcode = []
self.g = g = Gcode(safetyheight=self.safetyheight,
tolerance=self.tolerance,
units=self.units,
header=self.header,
postscript=self.postscript,
target=lambda s: output_gcode.append(s),
disable_arcs = self.disable_arcs)
g.begin()
#g.continuous(self.tolerance) #commented V0.7
g.safety()
if self.roughing_delta:
##########################################
self.feed = self.roughing_feed
r = -self.roughing_delta
m = self.image.min()
while r > m:
self.rd = r
self.one_pass()
r = r - self.roughing_delta
if r < m + epsilon:
self.rd = m
self.one_pass()
##########################################
else:
self.feed = self.base_feed
self.rd = self.image.min()
self.one_pass()
##########################################
g.end()
return output_gcode
def get_z(self, x, y):
try:
return min(0, max(self.rd, self.cache[x,y]))
except KeyError:
self.cache[x,y] = d = self.image.height_calc(x,y,self.tool_shape)
return min(0.0, max(self.rd, d))
def get_dz_dy(self, x, y):
y1 = max(0, y-1)
y2 = min(self.image.shape[0]-1, y+1)
dy = self.pixelsize * (y2-y1)
return (self.get_z(x, y2) - self.get_z(x, y1)) / dy
def get_dz_dx(self, x, y):
x1 = max(0, x-1)
x2 = min(self.image.shape[1]-1, x+1)
dx = self.pixelsize * (x2-x1)
return (self.get_z(x2, y) - self.get_z(x1, y)) / dx
def frange(self,start, stop, step):
out = []
i = start
while i < stop:
out.append(i)
i += step
return out
def mill_rows(self, convert_scan, primary):
global STOP_CALC
w1 = self.w1
h1 = self.h1
pixelsize = self.pixelsize
pixelstep = self.pixelstep
pixel_offset = int(math.ceil(self.edge_offset / pixelsize))
jrange = self.frange(self.splitpixels+pixel_offset, w1-pixel_offset, pixelstep)
if jrange[0] != pixel_offset: jrange.insert(0,pixel_offset)
if w1-1-pixel_offset not in jrange: jrange.append(w1-1-pixel_offset)
irange = range(pixel_offset,h1-pixel_offset)
for j in jrange:
self.cnt = self.cnt+1
#progress(self.cnt, self.cnt_total, self.START_TIME, self.BIG )
y = (w1-j-1) * pixelsize + self.yoffset
scan = []
for i in irange:
self.BIG.update()
#if STOP_CALC: return
x = i * pixelsize + self.xoffset
milldata = (i, (x, y, self.get_z(i, j)),
self.get_dz_dx(i, j), self.get_dz_dy(i, j))
scan.append(milldata)
for flag, points in convert_scan(primary, scan):
if flag:
self.entry_cut(self, points[0][0], j, points)
for p in points:
self.g.cut(*p[1])
self.g.flush()
def mill_cols(self, convert_scan, primary):
global STOP_CALC
w1 = self.w1
h1 = self.h1
pixelsize = self.pixelsize
pixelstep = self.pixelstep
pixel_offset = int(math.ceil(self.edge_offset / pixelsize))
jrange = self.frange(self.splitpixels+pixel_offset, h1-pixel_offset, pixelstep)
if jrange[0] != pixel_offset: jrange.insert(0,pixel_offset)
if h1-1-pixel_offset not in jrange: jrange.append(h1-1-pixel_offset)
irange = range(pixel_offset,w1-pixel_offset)
if h1-1-pixel_offset not in jrange: jrange.append(h1-1-pixel_offset)
jrange.reverse()
for j in jrange:
self.cnt = self.cnt+1
#progress(self.cnt, self.cnt_total, self.START_TIME, self.BIG )
x = j * pixelsize + self.xoffset
scan = []
for i in irange:
self.BIG.update()
#if STOP_CALC: return
y = (w1-i-1) * pixelsize + self.yoffset
milldata = (i, (x, y, self.get_z(j, i)),
self.get_dz_dy(j, i), self.get_dz_dx(j, i))
scan.append(milldata)
for flag, points in convert_scan(primary, scan):
if flag:
self.entry_cut(self, j, points[0][0], points)
for p in points:
self.g.cut(*p[1])
self.g.flush()
def convert(*args, **kw):
return Converter(*args, **kw).convert()
class SimpleEntryCut:
def __init__(self, feed):
self.feed = feed
def __call__(self, conv, i0, j0, points):
p = points[0][1]
if self.feed:
conv.g.set_feed(self.feed)
conv.g.safety()
conv.g.rapid(p[0], p[1])
if self.feed:
conv.g.set_feed(conv.feed)
# Calculate the portion of the arc to do so that none is above the
# safety height (that's just silly)
def circ(r,b):
z = r**2 - (r-b)**2
if z < 0: z = 0
return z**.5
class ArcEntryCut:
def __init__(self, feed, max_radius):
self.feed = feed
self.max_radius = max_radius
def __call__(self, conv, i0, j0, points):
if len(points) < 2:
p = points[0][1]
if self.feed:
conv.g.set_feed(self.feed)
conv.g.safety()
conv.g.rapid(p[0], p[1])
if self.feed:
conv.g.set_feed(conv.feed)
return
p1 = points[0][1]
p2 = points[1][1]
z0 = p1[2]
lim = int(math.ceil(self.max_radius / conv.pixelsize))
r = range(1, lim)
if self.feed:
conv.g.set_feed(self.feed)
conv.g.safety()
x, y, z = p1
pixelsize = conv.pixelsize
cx = cmp(p1[0], p2[0])
cy = cmp(p1[1], p2[1])
radius = self.max_radius
if cx != 0:
h1 = conv.h1
for di in r:
dx = di * pixelsize
i = i0 + cx * di
if i < 0 or i >= h1: break
z1 = conv.get_z(i, j0)
dz = (z1 - z0)
if dz <= 0: continue
if dz > dx:
conv.g.write("(case 1)")
radius = dx
break
rad1 = (dx * dx / dz + dz) / 2
if rad1 < radius:
radius = rad1
if dx > radius:
break
z1 = min(p1[2] + radius, conv.safetyheight)
x1 = p1[0] + cx * circ(radius, z1 - p1[2])
conv.g.rapid(x1, p1[1])
conv.g.cut(z=z1)
I = - cx * circ(radius, z1 - p1[2])
K = (p1[2] + radius) - z1
conv.g.flush(); conv.g.lastgcode = None
if cx > 0:
#conv.g.write("G3 X%f Z%f R%f" % (p1[0], p1[2], radius)) #G3
conv.g.write("G3 X%f Z%f I%f K%f" % (p1[0], p1[2], I, K))
else:
#conv.g.write("G2 X%f Z%f R%f" % (p1[0], p1[2], radius)) #G2
conv.g.write("G2 X%f Z%f I%f K%f" % (p1[0], p1[2], I, K))
conv.g.lastx = p1[0]
conv.g.lasty = p1[1]
conv.g.lastz = p1[2]
else:
w1 = conv.w1
for dj in r:
dy = dj * pixelsize
j = j0 - cy * dj
if j < 0 or j >= w1: break
z1 = conv.get_z(i0, j)
dz = (z1 - z0)
if dz <= 0: continue
if dz > dy:
radius = dy
break
rad1 = (dy * dy / dz + dz) / 2
if rad1 < radius: radius = rad1
if dy > radius: break
z1 = min(p1[2] + radius, conv.safetyheight)
y1 = p1[1] + cy * circ(radius, z1 - p1[2])
conv.g.rapid(p1[0], y1)
conv.g.cut(z=z1)
J = -cy * circ(radius, z1 - p1[2])
K = (p1[2] + radius) - z1
conv.g.flush(); conv.g.lastgcode = None
if cy > 0:
#conv.g.write("G2 Y%f Z%f R%f" % (p1[1], p1[2], radius)) #G2
conv.g.write("G2 Y%f Z%f J%f K%f" % (p1[1], p1[2], J, K))
else:
#conv.g.write("G3 Y%f Z%f R%f" % (p1[1], p1[2], radius)) #G3
conv.g.write("G3 Y%f Z%f J%f K%f" % (p1[1], p1[2], J, K))
conv.g.lastx = p1[0]
conv.g.lasty = p1[1]
conv.g.lastz = p1[2]
if self.feed:
conv.g.set_feed(conv.feed)
class Image_Matrix_List: #Nested list (no Numpy)
def __init__(self, width=0, height=0):
self.width = width
self.height = height
self.matrix = []
self.shape = [width, height]
def __call__(self,i,j):
return self.matrix[i][j]
def Assign(self,i,j,val):
self.matrix[i][j] = float(val)
def From_List(self,input_list):
s = len(input_list)
self.width = s
self.height = s
for x in range(s):
self.api()
for y in range(s):
self.apj(x,float(input_list[x][y]))
def FromImage(self, im, pil_format):
global STOP_CALC
self.matrix = []
if pil_format:
him,wim = im.size
for i in range(0,wim):
self.api()
for j in range(0,him):
pix = im.getpixel((j,i))
self.apj(i,pix)
else:
him = im.width()
wim = im.height()
for i in range(0,wim):
self.api()
for j in range(0,him):
try: pix = im.get(j,i).split()
except: pix = im.get(j,i)
self.apj(i,pix[0])
self.width = wim
self.height = him
self.shape = [wim, him]
self.t_offset = 0
def pad_w_zeros(self,tool):
ts = tool.width
for i in range(len(self.matrix),self.width+ts):
self.api()
for i in range(0,len(self.matrix)):
for j in range(len(self.matrix[i]),self.height+ts):
self.apj(i,-1e1000000)
def height_calc(self,x,y,tool):
ts = tool.width
d = -1e1000000
ilow = (int)(x-(ts-1)/2)
ihigh = (int)(x+(ts-1)/2+1)
jlow = (int)(y-(ts-1)/2)
jhigh = (int)(y+(ts-1)/2+1)
icnt = 0
for i in range( ilow , ihigh):
jcnt = 0
for j in range( jlow , jhigh):
d = max( d, self(j,i) - tool(jcnt,icnt))
jcnt = jcnt+1
icnt = icnt+1
return d
def min(self):
minval = 1e1000000
for i in range(0,self.width):
for j in range(0,self.height):
minval = min(minval,self.matrix[i][j])
return minval
def max(self):
maxval = -1e1000000
for i in range(0,self.width):
for j in range(0,self.height):
maxval = max(maxval,self.matrix[i][j])
return maxval
def api(self):
self.matrix.append([])
def apj(self,i,val):
fval = float(val)
self.matrix[i].append(fval)
def mult(self,val):
fval = float(val)
icnt=0
for i in self.matrix:
jcnt = 0
for j in i:
self.matrix[icnt][jcnt] = fval * j
jcnt = jcnt + 1
icnt=icnt+1
def minus(self,val):
fval = float(val)
icnt=0
for i in self.matrix:
jcnt = 0
for j in i:
self.matrix[icnt][jcnt] = j - fval
jcnt = jcnt + 1
icnt=icnt+1
class Image_Matrix_Numpy:
def __init__(self, width=2, height=2):
import numpy
self.width = width
self.height = height
self.matrix = numpy.zeros((width, height), 'Float32')
self.shape = [width, height]
self.t_offset = 0
def __call__(self,i,j):
return self.matrix[i+self.t_offset,j+self.t_offset]
def Assign(self,i,j,val):
fval=float(val)
self.matrix[i+self.t_offset,j+self.t_offset]=fval
def From_List(self,input_list):
import numpy
s = len(input_list)
self.width = s
self.height = s
self.matrix = numpy.zeros((s, s), 'Float32')
for x in range(s):
for y in range(s):
self.matrix[x,y]=float(input_list[x][y])
def FromImage(self, im, pil_format):
import numpy
global STOP_CALC
self.matrix = []
if pil_format:
him,wim = im.size
self.matrix = numpy.zeros((wim, him), 'Float32')
for i in range(0,wim):
for j in range(0,him):
pix = im.getpixel((j,i))
self.matrix[i,j] = float(pix)
else:
him = im.width()
wim = im.height()
self.matrix = numpy.zeros((wim, him), 'Float32')
for i in range(0,wim):
for j in range(0,him):
try: pix = im.get(j,i).split()
except: pix = im.get(j,i)
self.matrix[i,j] = float(pix[0])
self.width = wim
self.height = him
self.shape = [wim, him]
self.t_offset = 0
def pad_w_zeros(self,tool):
import numpy
ts = tool.width
self.t_offset = (ts-1)/2
to = self.t_offset
w, h = self.shape
w1 = w + ts-1
h1 = h + ts-1
temp = numpy.zeros((w1, h1), 'Float32')
for j in range(0, w1):
for i in range(0, h1):
temp[j,i] = -1e1000000
temp[to:to+w, to:to+h] = self.matrix
self.matrix = temp
def height_calc(self,x,y,tool):
to = self.t_offset
ts = tool.width
d= -1e100000
m1 = self.matrix[y:y+ts, x:x+ts]
d = (m1 - tool.matrix).max()
return d
def min(self):
return self.matrix[self.t_offset:self.t_offset+self.width,
self.t_offset:self.t_offset+self.height].min()
def max(self):
return self.matrix[self.t_offset:self.t_offset+self.width,
self.t_offset:self.t_offset+self.height].max()
def mult(self,val):
self.matrix = self.matrix * float(val)
def minus(self,val):
self.matrix = self.matrix - float(val)
################################################################################
# Author.py #
# A component of emc2 #
################################################################################
# Compute the 3D distance from the line segment l1..l2 to the point p.
# (Those are lower case L1 and L2)
def dist_lseg(l1, l2, p):
x0, y0, z0 = l1
xa, ya, za = l2
xi, yi, zi = p
dx = xa-x0
dy = ya-y0
dz = za-z0
d2 = dx*dx + dy*dy + dz*dz
if d2 == 0: return 0
t = (dx * (xi-x0) + dy * (yi-y0) + dz * (zi-z0)) / d2
if t < 0: t = 0
if t > 1: t = 1
dist2 = (xi - x0 - t*dx)**2 + (yi - y0 - t*dy)**2 + (zi - z0 - t*dz)**2
return dist2 ** .5
def rad1(x1,y1,x2,y2,x3,y3):
x12 = x1-x2
y12 = y1-y2
x23 = x2-x3
y23 = y2-y3
x31 = x3-x1
y31 = y3-y1
den = abs(x12 * y23 - x23 * y12)
if abs(den) < 1e-5: return MAXINT
return math.hypot(float(x12), float(y12)) * math.hypot(float(x23), float(y23)) * math.hypot(float(x31), float(y31)) / 2 / den
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self): return "<%f,%f>" % (self.x, self.y)
def __sub__(self, other):
return Point(self.x - other.x, self.y - other.y)
def __add__(self, other):
return Point(self.x + other.x, self.y + other.y)
def __mul__(self, other):
return Point(self.x * other, self.y * other)
__rmul__ = __mul__
def cross(self, other):
return self.x * other.y - self.y * other.x
def dot(self, other):
return self.x * other.x + self.y * other.y
def mag(self):
return math.hypot(self.x, self.y)
def mag2(self):
return self.x**2 + self.y**2
def cent1(x1,y1,x2,y2,x3,y3):
P1 = Point(x1,y1)
P2 = Point(x2,y2)
P3 = Point(x3,y3)
den = abs((P1-P2).cross(P2-P3))
if abs(den) < 1e-5: return MAXINT, MAXINT
alpha = (P2-P3).mag2() * (P1-P2).dot(P1-P3) / 2 / den / den
beta = (P1-P3).mag2() * (P2-P1).dot(P2-P3) / 2 / den / den
gamma = (P1-P2).mag2() * (P3-P1).dot(P3-P2) / 2 / den / den
Pc = alpha * P1 + beta * P2 + gamma * P3
return Pc.x, Pc.y
def arc_center(plane, p1, p2, p3):
x1, y1, z1 = p1
x2, y2, z2 = p2
x3, y3, z3 = p3
if plane == 17: return cent1(x1,y1,x2,y2,x3,y3)
if plane == 18: return cent1(x1,z1,x2,z2,x3,z3)
if plane == 19: return cent1(y1,z1,y2,z2,y3,z3)
def arc_rad(plane, P1, P2, P3):
if plane is None: return MAXINT
x1, y1, z1 = P1
x2, y2, z2 = P2
x3, y3, z3 = P3
if plane == 17: return rad1(x1,y1,x2,y2,x3,y3)
if plane == 18: return rad1(x1,z1,x2,z2,x3,z3)
if plane == 19: return rad1(y1,z1,y2,z2,y3,z3)
return None, 0
def get_pts(plane, x,y,z):
if plane == 17: return x,y
if plane == 18: return x,z
if plane == 19: return y,z
def one_quadrant(plane, c, p1, p2, p3):
xc, yc = c
x1, y1 = get_pts(plane, p1[0],p1[1],p1[2])
x2, y2 = get_pts(plane, p2[0],p2[1],p2[2])
x3, y3 = get_pts(plane, p3[0],p3[1],p3[2])
def sign(x):
if abs(x) < 1e-5: return 0
if x < 0: return -1
return 1
signs = set((
(sign(x1-xc),sign(y1-yc)),
(sign(x2-xc),sign(y2-yc)),
(sign(x3-xc),sign(y3-yc))
))
if len(signs) == 1: return True
if (1,1) in signs:
signs.discard((1,0))
signs.discard((0,1))
if (1,-1) in signs:
signs.discard((1,0))
signs.discard((0,-1))
if (-1,1) in signs:
signs.discard((-1,0))
signs.discard((0,1))
if (-1,-1) in signs:
signs.discard((-1,0))
signs.discard((0,-1))
if len(signs) == 1: return True
def arc_dir(plane, c, p1, p2, p3):
xc, yc = c
x1, y1 = get_pts(plane, p1[0],p1[1],p1[2])
x2, y2 = get_pts(plane, p2[0],p2[1],p2[2])
x3, y3 = get_pts(plane, p3[0],p3[1],p3[2])
theta_start = math.atan2(y1-yc, x1-xc)
theta_mid = math.atan2(y2-yc, x2-xc)
theta_end = math.atan2(y3-yc, x3-xc)
if theta_mid < theta_start:
theta_mid = theta_mid + 2 * math.pi
while theta_end < theta_mid:
theta_end = theta_end + 2 * math.pi
return theta_end < 2 * math.pi
def arc_fmt(plane, c1, c2, p1):
x, y, z = p1
if plane == 17: return "I%.4f J%.4f" % (c1-x, c2-y)
if plane == 18: return "I%.4f K%.4f" % (c1-x, c2-z)
if plane == 19: return "J%.4f K%.4f" % (c1-y, c2-z)
# Perform Douglas-Peucker simplification on the path 'st' with the specified
# tolerance. The '_first' argument is for internal use only.
#
# The Douglas-Peucker simplification algorithm finds a subset of the input points
# whose path is never more than 'tolerance' away from the original input path.
#
# If 'plane' is specified as 17, 18, or 19, it may find helical arcs in the given
# plane in addition to lines. Note that if there is movement in the plane
# perpendicular to the arc, it will be distorted, so 'plane' should usually
# be specified only when there is only movement on 2 axes
def douglas(st, tolerance=.001, plane=None, _first=True):
if len(st) == 1:
yield "G1", st[0], None
return
l1 = st[0]
l2 = st[-1]
worst_dist = 0
worst = 0
min_rad = MAXINT
max_arc = -1
ps = st[0]
pe = st[-1]
for i, p in enumerate(st):
if p is l1 or p is l2: continue
dist = dist_lseg(l1, l2, p)
if dist > worst_dist:
worst = i
worst_dist = dist
rad = arc_rad(plane, ps, p, pe)
if rad < min_rad:
max_arc = i
min_rad = rad
worst_arc_dist = 0
if min_rad != MAXINT:
c1, c2 = arc_center(plane, ps, st[max_arc], pe)
lx, ly, lz = st[0]
if one_quadrant(plane, (c1, c2), ps, st[max_arc], pe):
for i, (x,y,z) in enumerate(st):
if plane == 17: dist = abs(math.hypot(c1-x, c2-y) - min_rad)
elif plane == 18: dist = abs(math.hypot(c1-x, c2-z) - min_rad)
elif plane == 19: dist = abs(math.hypot(c1-y, c2-z) - min_rad)
else: dist = MAXINT
if dist > worst_arc_dist: worst_arc_dist = dist
mx = (x+lx)/2
my = (y+ly)/2
mz = (z+lz)/2
if plane == 17: dist = abs(math.hypot(c1-mx, c2-my) - min_rad)
elif plane == 18: dist = abs(math.hypot(c1-mx, c2-mz) - min_rad)
elif plane == 19: dist = abs(math.hypot(c1-my, c2-mz) - min_rad)
else: dist = MAXINT
lx, ly, lz = x, y, z
else:
worst_arc_dist = MAXINT
else:
worst_arc_dist = MAXINT
if worst_arc_dist < tolerance and worst_arc_dist < worst_dist:
ccw = arc_dir(plane, (c1, c2), ps, st[max_arc], pe)
if plane == 18: ccw = not ccw
yield "G1", ps, None
if ccw:
yield "G3", st[-1], arc_fmt(plane, c1, c2, ps)
else:
yield "G2", st[-1], arc_fmt(plane, c1, c2, ps)
elif worst_dist > tolerance:
if _first: yield "G1", st[0], None
for i in douglas(st[:worst+1], tolerance, plane, False):
yield i
yield "G1", st[worst], None
for i in douglas(st[worst:], tolerance, plane, False):
yield i
if _first: yield "G1", st[-1], None
else:
if _first: yield "G1", st[0], None
if _first: yield "G1", st[-1], None
# For creating rs274ngc files
class Gcode:
def __init__(self, homeheight = 1.5, safetyheight = 0.04,
tolerance=0.001, units="G20", header="", postscript="",
target=lambda s: sys.stdout.write(s + "\n"),
disable_arcs = False):
self.lastx = self.lasty = self.lastz = self.lasta = None
self.lastgcode = self.lastfeed = None
self.homeheight = homeheight
self.safetyheight = self.lastz = safetyheight
self.tolerance = tolerance
self.units = units
self.cuts = []
self.write = target
self.time = 0
self.plane = None
self.header = header
self.postscript = postscript
self.disable_arcs = disable_arcs
def set_plane(self, p):
if (not self.disable_arcs):
assert p in (17,18,19)
if p != self.plane:
self.plane = p
self.write("G%d" % p)
# This function write header and move to safety height
def begin(self):
self.write(self.header)
#self.write(self.units)
if not self.disable_arcs:
self.write("G91.1")
#self.safety()
#self.rapid(z=self.safetyheight)
self.write("G0 Z%.4f" % (self.safetyheight))
#["G17 G40","G80 G90 G94 G91.1"]
# If any 'cut' moves are stored up, send them to the simplification algorithm
# and actually output them.
#
# This function is usually used internally (e.g., when changing from a cut
# to a rapid) but can be called manually as well. For instance, when
# a contouring program reaches the end of a row, it may be desirable to enforce
# that the last 'cut' coordinate is actually in the output file, and it may
# give better performance because this means that the simplification algorithm
# will examine fewer points per run.
def flush(self):
if not self.cuts: return
for move, (x, y, z), cent in douglas(self.cuts, self.tolerance, self.plane):
if cent:
self.write("%s X%.4f Y%.4f Z%.4f %s" % (move, x, y, z, cent))
self.lastgcode = None
self.lastx = x
self.lasty = y
self.lastz = z
else:
self.move_common(x, y, z, gcode="G1")
self.cuts = []
def end(self):
#"""End the program"""
self.flush()
self.safety()
self.write(self.postscript)
# """\
#Set exact path mode. Note that unless self.tolerance is set to zero,
#the simplification algorithm may still skip over specified points."""
#def exactpath(self):
# self.write("G61")
# Set continuous mode.
#def continuous(self, tolerance=0.0): #commented V0.7
# if tolerance > 0.0: #commented V0.7
# self.write("G64 P%.4f" % tolerance)#commented V0.7
# else: #commented V0.7
# self.write("G64") #commented V0.7
def rapid(self, x=None, y=None, z=None, a=None):
#"Perform a rapid move to the specified coordinates"
self.flush()
self.move_common(x, y, z, a, "G0")
def move_common(self, x=None, y=None, z=None, a=None, gcode="G0"):
#"An internal function used for G0 and G1 moves"
gcodestring = xstring = ystring = zstring = astring = ""
if x == None: x = self.lastx
if y == None: y = self.lasty
if z == None: z = self.lastz
if a == None: a = self.lasta
if x != self.lastx:
xstring = " X%.4f" % (x)
self.lastx = x
if y != self.lasty:
ystring = " Y%.4f" % (y)
self.lasty = y
if z != self.lastz:
zstring = " Z%.4f" % (z)
self.lastz = z
if a != self.lasta:
astring = " A%.4f" % (a)
self.lasta = a
if xstring == ystring == zstring == astring == "":
return
if gcode != self.lastgcode:
gcodestring = gcode
self.lastgcode = gcode
cmd = "".join([gcodestring, xstring, ystring, zstring, astring])
if cmd:
self.write(cmd)
def set_feed(self, feed):
#"Set the feed rate to the given value"
self.flush()
self.write("F%.4f" % feed)
def cut(self, x=None, y=None, z=None):
#"Perform a cutting move at the specified feed rate to the specified coordinates"
if self.cuts:
lastx, lasty, lastz = self.cuts[-1]
else:
lastx, lasty, lastz = self.lastx, self.lasty, self.lastz
if x is None: x = lastx
if y is None: y = lasty
if z is None: z = lastz
self.cuts.append([x,y,z])
def home(self):
#"Go to the 'home' height at rapid speed"
self.flush()
self.rapid(z=self.homeheight)
def safety(self):
#"Go to the 'safety' height at rapid speed"
self.flush()
self.rapid(z=self.safetyheight)
|
import { useStaticQuery, graphql } from 'gatsby';
import getOgpImage from '../utils/get-ogp-image';
const useAllMarkdownRemarkForPopularList = (paths) => {
const { allStrapiArticle } = useStaticQuery(
graphql`
query AllMarkdownRemarkForPopular {
allStrapiArticle {
nodes {
slug
title
socialImage {
publicURL
}
}
}
}`
);
const list = allStrapiArticle.nodes
.filter((a) => paths.includes(a.slug))
.map((a) => ({
title: a.title,
socialImage: a.socialImage ? a.socialImage.publicURL : getOgpImage(a.title),
slug: a.slug
}));
return list;
};
export default useAllMarkdownRemarkForPopularList;
|
#!/bin/bash
CLIENTNAME=my-app
OUTPUT=$(docker exec keycloak /tmp/keycloak/create_client.sh $CLIENTNAME)
if [[ $OUTPUT == *"\"resource\" : \"$CLIENTNAME\""* ]]; then
echo "SUCCESS"
echo $OUTPUT
exit 0
else
echo "FAILURE"
echo "OUTPUT WAS: $OUTPUT"
exit 1
fi
|
#!/usr/bin/env bash
# increase the number of connections
echo "alter system set processes=250 scope=spfile;" | sqlplus -s SYSTEM/oracle
echo "alter system reset sessions scope=spfile sid='*';" | sqlplus -s SYSTEM/oracle
service oracle-xe restart
echo "alter system disable restricted session;" | sqlplus -s SYSTEM/oracle
echo "show parameter sessions;" | sqlplus -s SYSTEM/oracle
echo "show parameter processes;" | sqlplus -s SYSTEM/oracle |
import React from 'react';
export default class App extends React.Component {
state = {
products: [],
sortedProducts: [],
filteredProducts: [],
category: '',
sortMode: ''
};
componentDidMount() {
// fetch products from API
const products = [ ... ];
this.setState({
products: products,
sortedProducts: products,
filteredProducts: products
});
}
sortProducts = () => {
const copy = [ ...this.state.products ];
const sorted = [];
switch (this.state.sortMode) {
case 'ascending':
sorted = copy.sort((a, b) => a.name.localeCompare(b.name));
break;
case 'descending':
sorted = copy.sort((a, b) =>
b.name.localeCompare(a.name)
);
break;
default:
sorted = copy;
}
this.setState({ sortedProducts: sorted });
};
filterProducts = () => {
let { products, category } = this.state;
const filtered = products.filter(
product => product.category === category
);
this.setState({ filteredProducts: filtered });
};
render() {
return (
<div>
<h1>Product List</h1>
<p>
Sort products:
<select onChange={e => this.setState({ sortMode: e.target.value })}>
<option value="">None</option>
<option value="ascending">Ascending</option>
<option value="descending">Descending</option>
</select>
<button onClick={this.sortProducts}>Sort</button>
</p>
<p>
Filter products:
<select onChange={e => this.setState({ category: e.target.value })}>
<option value="">None</option>
<option value="books">Books</option>
<option value="clothing">Clothing</option>
</select>
<button onClick={this.filterProducts}>Filter</button>
</p>
{this.state.filteredProducts.map(product => (
<div key={product.id}>{product.name}</div>
))}
</div>
);
}
} |
import { calculateCost, Item } from './lib/calculateCost';
const apple: Item = {
id: 'apple',
displayName: 'Apple',
price: 60,
multiBuy: [2, 1],
};
const orange: Item = {
id: 'orange',
displayName: 'Orange',
price: 25,
multiBuy: [3, 2],
};
const basket = [apple, apple, orange, apple, orange, orange];
const price = calculateCost(basket);
const priceString = price.toString().padStart(3, '0');
const pence = priceString.slice(priceString.length - 2);
const pounds = priceString.slice(0, priceString.length - 2);
console.log(`The total price of your basket is £${pounds}.${pence}.`);
|
#!/bin/bash
#==============================================================================
# emacs-config installation script
#
# 28 May 2021 -- Bob Yantosca -- yantosca@seas.harvard.edu
#==============================================================================
# Copy startup files ~/.emacs.d folder
# The user can customize these further
echo "---------------------------------------"
echo "Copying the init.org file to ~/.emacs.d"
echo "---------------------------------------"
echo ""
echo "1. Copying *.org files file to .emacs.d"
echo ""
cp -f ./emacs-config.org ~/.emacs.d
cp -f ./init.el ~/.emacs.d
# Skip building the emacs vterm if any argument is passed
if [[ "x${1}" != "x" ]]; then
echo "2. Skip building emacs-libvterm..."
echo ""
echo "3. Done!"
exit 0
fi
# Load all submodules
echo "2. Attempting to build the vterm module. If this fails"
echo " on your system, you can disable vterm by setting"
echo " '(setq enable-vterm nil)' in ~/.emacs.d/init.el."
echo ""
git submodule update --init --recursive
# Build the emacs-libvterm module
cd emacs-libvterm
mkdir build
cd build
cmake ..
make
cd ..
rm -rf build
# We're done!
echo ""
echo "3. Done!"
|
#include <unordered_map>
#include <memory>
#include <typeindex>
#include <stdexcept>
class Component {
public:
using SharedPtr = std::shared_ptr<Component>;
virtual ~Component() {}
};
class Entity {
public:
template <typename T>
void addComponent(Component::SharedPtr component) {
components[typeid(T)] = component;
}
template <typename T>
const T& getComponent() const {
auto it = components.find(typeid(T));
if (it != components.end()) {
return static_cast<const T&>(*it->second);
} else {
throw std::out_of_range("Component not found");
}
}
private:
std::unordered_map<std::type_index, Component::SharedPtr> components;
}; |
<filename>src/main/java/cn/gobyte/apply/service/user/UserService.java<gh_stars>1-10
package cn.gobyte.apply.service.user;
import cn.gobyte.apply.domain.ResponseBo;
import cn.gobyte.apply.pojo.user.User;
import cn.gobyte.apply.pojo.user.UserVo;
import cn.gobyte.apply.service.IService;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* TODO: 用户方法类
*
* @author shanLan <EMAIL>
* @date 2019/4/24 2:03
*/
@Service
public interface UserService extends IService<User> {
/**
* TODO: 注册用户到数据库;需要保证身份证号和邮箱号没有被注册过
*
* @param user 需要注册的对象,保存到数据库内
* @return boolean 成功true,失败fasle
* @author shanLan <EMAIL>
* @date 2019/3/28 21:53
*/
ResponseBo register(User user);
/**
* TODO:登陆
*
* @param user 用户
* @author shanLan <EMAIL>
* @date 2019/4/5 21:34
*/
void login(UserVo user);
/**
* TODO:使用邮箱或者身份证号查找
*
* @param username 登录名,可以是身份证号或者邮箱
* @return void
* @author shanLan <EMAIL>
* @date 2019/4/11 1:28
*/
User findByEmailOrIdNumber(String username);
/**
* TODO: 更新用户登陆时间
*
* @param userName 需要更新的用户名
* @author shanLan <EMAIL>
* @date 2019/4/23 0:01
*/
void updateLoginTimeByIdNumber(String userName);
/**
* TODO: 根据id,更新user登陆次数
*
* @param id 身份号
* @param number 次数
* @author shanLan <EMAIL>
* @date 2019/4/24 20:09
*/
void updateLoginTotal(String id, String number);
/**
* TODO: 根据id获取用户信息
*
* @param id 身份证号
* @author shanLan <EMAIL>
* @date 2019/4/25 1:32
*/
User findById(String id);
/**
* TODO: 修改用户资料
*
* @param user 用户
* @return cn.gobyte.apply.domain.ResponseBo:
* @author shanLan <EMAIL>
* @date 2019/4/25 20:12
*/
ResponseBo updateUser(User user);
/**
* TODO: 修改用户密码
*
* @param password <PASSWORD>
* @param id 用户名,一般是身份证号
* @return cn.gobyte.apply.domain.ResponseBo:
* @author shanLan <EMAIL>
* @date 2019/4/26 0:59
*/
ResponseBo updatePassword(String password, String id);
/**
* TODO: 通过姓名、身份证号查询问题
*
* @param name 姓名
* @param id 身份证号
* @return cn.gobyte.apply.domain.ResponseBo:
* @author shanLan <EMAIL>
* @date 2019/4/27 20:38
*/
ResponseBo seleteAnswer(String name, String id);
/**
* TODO: 通过姓名、身份证号、问题答案查询
*
* @param name 姓名
* @param id 身份证号
* @param answer 答案
* @return cn.gobyte.apply.domain.ResponseBo:
* @author shanLan <EMAIL>
* @date 2019/4/27 20:45
*/
ResponseBo seleteAnswer(String name, String id, String answer);
/**
* TODO: 通过验证:姓名、身份证号、问题答案,来重置密码
*
* @param name 姓名
* @param id 身份证号
* @param answer 答案
* @param password1 <PASSWORD>
* @param password2 <PASSWORD>
* @return cn.gobyte.apply.domain.ResponseBo:
* @author shanLan <EMAIL>
* @date 2019/4/27 20:49
*/
ResponseBo updatePassword(String name, String id, String answer, String password1, String password2);
/**
* TODO: 查询用户列表
*
* @param user 用户
* @return java.util.List<cn.gobyte.apply.pojo.user.User>:
* @author shanLan <EMAIL>
* @date 2019/4/4 22:41
*/
List<User> findUserByUsernameOrIdNumber(User user);
/**
* TODO: 通过id物理删除用户
*
* @param userIds 用户id字符串,多个id用英文逗号,分割
* @author shanLan <EMAIL>
* @date 2019/4/7 11:02
*/
void deleteUsers(String userIds);
}
|
<filename>lib/core/src/firebase/firestore/models/firemodel.ts
import * as admin from 'firebase-admin'
import { db, timestamp, serverTimestamp } from '../../core'
type CollectionReference = admin.firestore.CollectionReference;
type DocumentReference = admin.firestore.DocumentReference;
type DocumentSnapshot = admin.firestore.DocumentSnapshot
export class Firemodel {
id: string
data: Record<string, unknown>
snapshot: DocumentSnapshot
constructor(id?) {
this.data = {}
if (id) {
if (typeof id === 'string') {
this.id = id
} else {
const snapshot = id
this.id = snapshot.id
this.data = snapshot.data()
this.snapshot = snapshot
}
}
}
get collection(): CollectionReference {
return db().collection('_')
}
get doc(): DocumentReference {
return this.collection.doc(this.id)
}
async set(id, data) {
const serverStamp = serverTimestamp()
const { writeTime } = await this.collection.doc(id).set({
...data,
createdAt: serverStamp,
updatedAt: serverStamp,
})
this.id = id
this.data = {
...data,
createdAt: writeTime,
updatedAt: writeTime,
}
return this
}
async save(data) {
const serverStamp = serverTimestamp()
if (this.id) {
// omit createdAt
const copy = {}
const keys = Object.keys(data)
for (let i = 0, len = keys.length; i < len; i++) {
const key = keys[i]
if (key !== 'createdAt') copy[key] = data[key]
}
const { writeTime } = await this.doc.update({
...copy,
updatedAt: serverStamp,
})
this.data = {
...this.data,
...data,
updatedAt: writeTime,
}
} else {
const result = await this.collection.add({
...data,
createdAt: serverStamp,
updatedAt: serverStamp,
})
// so we don't have to trigger a .get() just to see the new stamps
const emulatedStamp = timestamp()
this.id = result.id
this.data = {
...data,
createdAt: emulatedStamp,
updatedAt: emulatedStamp,
}
}
return this
}
async get() {
if (!this.id) throw new Error('Could not get document, missing id property')
const result = await this.doc.get()
this.data = result.data()
return this
}
delete() {
return this.doc.delete()
}
}
export default Firemodel
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
# Copyright (C) 2017-2020 The LineageOS Project
#
# SPDX-License-Identifier: Apache-2.0
#
# Required!
export DEVICE=platina
export VENDOR=xiaomi
export DEVICE_BRINGUP_YEAR=2020
set -e
# Load extract_utils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "${MY_DIR}" ]]; then MY_DIR="${PWD}"; fi
ANDROID_ROOT="${MY_DIR}/../../.."
HELPER="${ANDROID_ROOT}/tools/extract-utils/extract_utils.sh"
if [ ! -f "${HELPER}" ]; then
echo "Unable to find helper script at ${HELPER}"
exit 1
fi
source "${HELPER}"
# Default to sanitizing the vendor folder before extraction
CLEAN_VENDOR=true
ONLY_COMMON=
ONLY_DEVICE_COMMON=
ONLY_TARGET=
KANG=
SECTION=
while [ "${#}" -gt 0 ]; do
case "${1}" in
--only-common )
ONLY_COMMON=true
;;
--only-device-common )
ONLY_DEVICE_COMMON=true
;;
--only-target )
ONLY_TARGET=true
;;
-n | --no-cleanup )
CLEAN_VENDOR=false
;;
-k | --kang )
KANG="--kang"
;;
-s | --section )
SECTION="${2}"; shift
CLEAN_VENDOR=false
;;
* )
SRC="${1}"
;;
esac
shift
done
if [ -z "${SRC}" ]; then
SRC="adb"
fi
function blob_fixup() {
case "${1}" in
system_ext/etc/init/dpmd.rc)
sed -i "s|/system/product/bin/|/system/system_ext/bin/|g" "${2}"
;;
system_ext/etc/permissions/com.qti.dpmframework.xml | system_ext/etc/permissions/dpmapi.xml | system_ext/etc/permissions/telephonyservice.xml)
sed -i "s|/system/product/framework/|/system/system_ext/framework/|g" "${2}"
;;
system_ext/etc/permissions/qcrilhook.xml)
sed -i 's|/product/framework/qcrilhook.jar|/system_ext/framework/qcrilhook.jar|g' "${2}"
;;
system_ext/lib64/libdpmframework.so)
"${PATCHELF}" --add-needed "libshim_dpmframework.so" "${2}"
;;
esac
}
if [ -z "${ONLY_TARGET}" ] && [ -z "${ONLY_DEVICE_COMMON}" ]; then
# Initialize the helper for common device
setup_vendor "${DEVICE_COMMON}" "${VENDOR}" "${ANDROID_ROOT}" true "${CLEAN_VENDOR}"
extract "${MY_DIR}/proprietary-files.txt" "${SRC}" ${KANG} --section "${SECTION}"
extract "${MY_DIR}/proprietary-files-fm.txt" "${SRC}" "${KANG}" --section "${SECTION}"
fi
if [ -z "${ONLY_COMMON}" ] && [ -z "${ONLY_TARGET}" ] && [ -s "${MY_DIR}/../${DEVICE_SPECIFIED_COMMON}/proprietary-files.txt" ];then
# Reinitialize the helper for device specified common
source "${MY_DIR}/../${DEVICE_SPECIFIED_COMMON}/extract-files.sh"
setup_vendor "${DEVICE_SPECIFIED_COMMON}" "${VENDOR}" "${ANDROID_ROOT}" false "${CLEAN_VENDOR}"
extract "${MY_DIR}/../${DEVICE_SPECIFIED_COMMON}/proprietary-files.txt" "${SRC}" "${KANG}" --section "${SECTION}"
fi
if [ -z "${ONLY_COMMON}" ] && [ -z "${ONLY_DEVICE_COMMON}" ] && [ -s "${MY_DIR}/../${DEVICE}/proprietary-files.txt" ]; then
# Reinitialize the helper for device
source "${MY_DIR}/../${DEVICE}/extract-files.sh"
setup_vendor "${DEVICE}" "${VENDOR}" "${ANDROID_ROOT}" false "${CLEAN_VENDOR}"
extract "${MY_DIR}/../${DEVICE}/proprietary-files.txt" "${SRC}" "${KANG}" --section "${SECTION}"
fi
"${MY_DIR}/setup-makefiles.sh"
|
<filename>client/script.js
const generate = () => {
let message = quotes[Math.floor(Math.random() * (quotes.length - 1))];
document.querySelector('h2').textContent = `"${message}"`;
}
window.addEventListener('load', generate);
document.getElementById('btn__more-quotes').addEventListener('click', generate);
|
import random
def generate_random_string(length):
chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
string = ""
for c in range(length):
string += random.choice(chars)
return string
random_string = generate_random_string(length) |
#!/bin/bash ../.port_include.sh
port=less
version=530
useconfigure="true"
files="http://ftp.gnu.org/gnu/less/less-${version}.tar.gz less-${version}.tar.gz
http://ftp.gnu.org/gnu/less/less-${version}.tar.gz.sig less-${version}.tar.gz.sig
https://ftp.gnu.org/gnu/gnu-keyring.gpg gnu-keyring.gpg"
depends="ncurses"
auth_type="sig"
auth_opts="--keyring ./gnu-keyring.gpg less-${version}.tar.gz.sig" |
"use strict";
/* ***************************************************************************
*
* Copyright (c) 2021, the iexjs authors.
*
* This file is part of the iexjs library, distributed under the terms of
* the Apache License 2.0. The full license can be found in the LICENSE file.
*
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.marketPrevious = exports.marketYesterday = exports.previous = exports.yesterday = void 0;
const client_1 = require("../client");
const common_1 = require("../common");
/**
* This returns previous day adjusted price data for one or more stocks
*
* https://iexcloud.io/docs/api/#previous-day-prices
*
* @param {string} symbol ticker to request
* @param {string} token Access token
* @param {string} version API version
* @param {string} filter https://iexcloud.io/docs/api/#filter-results
* @param {string} format output format
*/
const yesterday = (symbol, { token, version, filter, format } = {}) => {
common_1._raiseIfNotStr(symbol);
return common_1._get({
url: `stock/${common_1._quoteSymbols(symbol)}/previous`,
token,
version,
filter,
format,
});
};
exports.yesterday = yesterday;
client_1.Client.prototype.yesterday = function (symbol, { filter, format } = {}) {
return exports.yesterday(symbol, {
token: this._token,
version: this._version,
filter,
format,
});
};
exports.previous = exports.yesterday;
client_1.Client.prototype.previous = client_1.Client.prototype.yesterday;
/**
* This returns previous day adjusted price data for whole market
*
* @param {string} token Access token
* @param {string} version API version
* @param {string} filter https://iexcloud.io/docs/api/#filter-results
* @param {string} format output format
*/
const marketYesterday = ({ token, version, filter, format } = {}) => common_1._get({
url: `stock/market/previous`,
token,
version,
filter,
format,
});
exports.marketYesterday = marketYesterday;
client_1.Client.prototype.marketYesterday = function ({ filter, format } = {}) {
return exports.marketYesterday({
token: this._token,
version: this._version,
filter,
format,
});
};
exports.marketPrevious = exports.marketYesterday;
client_1.Client.prototype.marketPrevious = client_1.Client.prototype.marketYesterday;
|
<reponame>msoxzw/toy-benchmark<filename>parallel.cpp
#include <cassert>
#include <chrono>
#include <execution>
#include <iostream>
#include <random>
#include <vector>
using namespace std;
int main() {
constexpr size_t N{100'000'000};
constexpr double working_set_GB{3.0 * N * sizeof(double) / exp2(30)};
random_device rd;
mt19937 gen{rd()};
normal_distribution dis;
vector<double> X(N), Y(N), Z(N), Z0(N), Z1(N);
for (auto &&x:X) x = dis(gen);
for (auto &&y:Y) y = dis(gen);
cout << fixed;
cout.precision(3);
cout << "for loop version: \n";
{
auto t0 = chrono::high_resolution_clock::now();
for (size_t i = 0; i < N; ++i) {
Z0[i] = sqrt(X[i] * X[i] + Y[i] * Y[i]);
}
auto t1 = chrono::high_resolution_clock::now();
chrono::duration<double> s = t1 - t0;
auto gbs = working_set_GB / s.count();
cout << "Time for an algebraic expression: "
<< s.count() << " s / " << gbs << " GB/s\n";
}
{
auto t0 = chrono::high_resolution_clock::now();
for (size_t i = 0; i < N; ++i) {
Z1[i] = atan2(Y[i], X[i]);
}
auto t1 = chrono::high_resolution_clock::now();
chrono::duration<double> s = t1 - t0;
auto gbs = working_set_GB / s.count();
cout << "Time for a transcendental expression: "
<< s.count() << " s / " << gbs << " GB/s\n";
}
cout << "STL algorithms version: \n";
{
auto t0 = chrono::high_resolution_clock::now();
transform(begin(X), end(X), begin(Y), begin(Z),
[](auto x, auto y) { return sqrt(x * x + y * y); });
auto t1 = chrono::high_resolution_clock::now();
chrono::duration<double> s = t1 - t0;
auto gbs = working_set_GB / s.count();
cout << "Time for an algebraic expression: "
<< s.count() << " s / " << gbs << " GB/s\n";
assert(Z == Z0);
}
{
auto t0 = chrono::high_resolution_clock::now();
transform(begin(X), end(X), begin(Y), begin(Z),
[](auto x, auto y) { return atan2(y, x); });
auto t1 = chrono::high_resolution_clock::now();
chrono::duration<double> s = t1 - t0;
auto gbs = working_set_GB / s.count();
cout << "Time for a transcendental expression: "
<< s.count() << " s / " << gbs << " GB/s\n";
assert(Z == Z1);
}
cout << "Parallel STL algorithms version:\n";
{
auto t0 = chrono::high_resolution_clock::now();
transform(execution::par, begin(X), end(X), begin(Y), begin(Z),
[](auto x, auto y) { return sqrt(x * x + y * y); });
auto t1 = chrono::high_resolution_clock::now();
chrono::duration<double> s = t1 - t0;
auto gbs = working_set_GB / s.count();
cout << "Time for an algebraic expression: "
<< s.count() << " s / " << gbs << " GB/s\n";
assert(Z == Z0);
}
{
auto t0 = chrono::high_resolution_clock::now();
transform(execution::par, begin(X), end(X), begin(Y), begin(Z),
[](auto x, auto y) { return atan2(y, x); });
auto t1 = chrono::high_resolution_clock::now();
chrono::duration<double> s = t1 - t0;
auto gbs = working_set_GB / s.count();
cout << "Time for a transcendental expression: "
<< s.count() << " s / " << gbs << " GB/s\n";
assert(Z == Z1);
}
}
|
<gh_stars>1000+
package socialcache
import (
"errors"
"sync"
. "github.com/Philipp15b/go-steam/protocol/steamlang"
. "github.com/Philipp15b/go-steam/steamid"
)
// Friends list is a thread safe map
// They can be iterated over like so:
// for id, friend := range client.Social.Friends.GetCopy() {
// log.Println(id, friend.Name)
// }
type FriendsList struct {
mutex sync.RWMutex
byId map[SteamId]*Friend
}
// Returns a new friends list
func NewFriendsList() *FriendsList {
return &FriendsList{byId: make(map[SteamId]*Friend)}
}
// Adds a friend to the friend list
func (list *FriendsList) Add(friend Friend) {
list.mutex.Lock()
defer list.mutex.Unlock()
_, exists := list.byId[friend.SteamId]
if !exists { //make sure this doesnt already exist
list.byId[friend.SteamId] = &friend
}
}
// Removes a friend from the friend list
func (list *FriendsList) Remove(id SteamId) {
list.mutex.Lock()
defer list.mutex.Unlock()
delete(list.byId, id)
}
// Returns a copy of the friends map
func (list *FriendsList) GetCopy() map[SteamId]Friend {
list.mutex.RLock()
defer list.mutex.RUnlock()
flist := make(map[SteamId]Friend)
for key, friend := range list.byId {
flist[key] = *friend
}
return flist
}
// Returns a copy of the friend of a given SteamId
func (list *FriendsList) ById(id SteamId) (Friend, error) {
list.mutex.RLock()
defer list.mutex.RUnlock()
if val, ok := list.byId[id]; ok {
return *val, nil
}
return Friend{}, errors.New("Friend not found")
}
// Returns the number of friends
func (list *FriendsList) Count() int {
list.mutex.RLock()
defer list.mutex.RUnlock()
return len(list.byId)
}
//Setter methods
func (list *FriendsList) SetName(id SteamId, name string) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.Name = name
}
}
func (list *FriendsList) SetAvatar(id SteamId, hash []byte) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.Avatar = hash
}
}
func (list *FriendsList) SetRelationship(id SteamId, relationship EFriendRelationship) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.Relationship = relationship
}
}
func (list *FriendsList) SetPersonaState(id SteamId, state EPersonaState) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.PersonaState = state
}
}
func (list *FriendsList) SetPersonaStateFlags(id SteamId, flags EPersonaStateFlag) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.PersonaStateFlags = flags
}
}
func (list *FriendsList) SetGameAppId(id SteamId, gameappid uint32) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.GameAppId = gameappid
}
}
func (list *FriendsList) SetGameId(id SteamId, gameid uint64) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.GameId = gameid
}
}
func (list *FriendsList) SetGameName(id SteamId, name string) {
list.mutex.Lock()
defer list.mutex.Unlock()
if val, ok := list.byId[id]; ok {
val.GameName = name
}
}
// A Friend
type Friend struct {
SteamId SteamId `json:",string"`
Name string
Avatar []byte
Relationship EFriendRelationship
PersonaState EPersonaState
PersonaStateFlags EPersonaStateFlag
GameAppId uint32
GameId uint64 `json:",string"`
GameName string
}
|
const tap = require("tap");
const test = tap.test;
const { getDom, getBooon } = require("./dom");
test("attr", t => {
t.plan(3);
const booon = getBooon();
const builder = booon.nodeBuilder("pre")
.attr("data-mol", "kid")
.id("joke");
t.equal(booon("p").html(builder.buildString()).find("pre#joke").data("mol"), "kid");
booon("p").html("");
t.equal(builder.buildNode("p").id, "joke");
t.equal(booon("p").find("pre#joke").data("mol"), "kid");
});
test("class", t => {
t.plan(10);
const booon = getBooon();
const builder = booon.nodeBuilder("pre")
.clazz("one")
.clazz(["two", "three"])
.clazz("four five");
booon("p").html(builder.buildString());
["one", "two", "three", "four", "five"].forEach(c => t.true(booon("p>pre").hasClass(c)));
booon("p").html("");
builder.buildNode("p");
["one", "two", "three", "four", "five"].forEach(c => t.true(booon("p>pre").hasClass(c)));
});
test("html", t => {
t.plan(4);
const booon = getBooon();
let builder = booon.nodeBuilder("pre")
.html("<h6 id=\"z\">king</h6>");
booon("p").html(builder.buildString());
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
builder.buildNode("p");
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
builder = booon.nodeBuilder("pre")
.html(booon.nodeBuilder("h6").id("z"));
booon("p").html(builder.buildString());
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
builder.buildNode("p");
t.equal(booon("p>pre>h6")[0].id, "z");
});
test("node", t => {
t.plan(6);
const booon = getBooon();
let builder = booon.nodeBuilder("pre")
.node("<h6 id=\"z\">king</h6>");
booon("p").html(builder.buildString());
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
builder.buildNode("p");
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
builder = booon.nodeBuilder("pre")
.node(booon.nodeBuilder("h6").id("z"));
booon("p").html(builder.buildString());
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
builder.buildNode("p");
t.equal(booon("p>pre>h6")[0].id, "z");
booon("p").html("");
let newNode = getDom().window.document.createElement("pre");
newNode.innerHTML = "<h6 id=\"z\">king</h6>";
builder = booon.nodeBuilder("pre")
.node(newNode);
booon("p").html(builder.buildString());
t.equal(booon("p>pre>pre>h6")[0].id, "z");
booon("p").html("");
const dom = getDom();
const b = dom.window.booon;
builder = b.nodeBuilder("pre")
.node(newNode);
newNode = dom.window.document.createElement("pre");
newNode.innerHTML = "<h6 id=\"z\">king</h6>";
builder = b.nodeBuilder("pre")
.node(newNode);
builder.buildNode("p");
t.equal(b("p>pre>pre>h6")[0].id, "z");
});
|
def ordenaVetor(a, b, c)
if (a < b)
a, b = b, a
end
if (b < c)
b, c = c, b
end
if (a < c)
a, c = c, a
end
return a, b, c
end
def verificaTriangulo(a, b, c)
if (a >= b + c) then
puts "NAO FORMA TRIANGULO"
else
if (a**2 == b**2 + c**2) then
puts "TRIANGULO RETANGULO"
end
if (a**2 > b**2 + c**2) then
puts "TRIANGULO OBTUSANGULO"
end
if (a**2 < b**2 +c**2)
puts "TRIANGULO ACUTANGULO"
end
if (a == b && b == c) then
puts "TRIANGULO EQUILATERO"
end
if ((a == b && a != c) || (b == c && a != b) || (a == c && a != b))
puts "TRIANGULO ISOSCELES"
end
end
end
valores = gets.split
a = valores[2].to_f
b = valores[1].to_f
c = valores[0].to_f
a, b, c = ordenaVetor(a, b, c)
verificaTriangulo(a, b, c) |
let facade = require('gamecloud')
let {EntityType, IndexType} = facade.const
let fetch = require("node-fetch");
/**
* CP成功注册事件
* 主网下发CP注册通知,此时应该将CP记录插入数据库
* @param {Object} data.msg { cid, name, url, address, ip, cls, grate, wid, account }
*
* @description 如果不返回 Promise 的话,事件将不能充当同步事件使用,即使外围使用 await 也起不到阻塞作用
*/
function handle(data) {
//收到CP注册事件,在本地数据库注册CP信息
return CreateRecord(data.msg, this).catch(e => {
console.error(e);
});
}
/**
* 创建新的CP对象(当收到主网通知,或者系统自检时调用,客户端不会直接调用):
* 1. 从游戏厂商接口处集采信息
* 2. 将集采信息和主网信息进行整合
* 3. 创建新的数据库记录
* @param {Object} cpInfo { cid, name, url, address, ip, cls, grate, wid, account }
*/
async function CreateRecord(cpInfo, core) {
if(!cpInfo || typeof cpInfo != 'object') {
console.log('cp.CreateRecord: error cp info.');
return {code: 0};
}
if(cpInfo.cid == "xxxxxxxx-game-gold-boss-xxxxxxxxxxxx") { //强制跳过特殊CP
return {code: 0};
}
cpInfo.stock = cpInfo.stock || {}; //主网CP注册消息中,不包含 stock 信息
let content = {
sort: 1, // `sort` int(4) '排序',
category_id: 1001, // `category_id` int(2) '游戏类别',
provider_id: 1002, // `provider_id` int(4) '供应商ID',
provider_name: '红蝶游戏', // `provider_name` varchar(32) '供应商名',
ad_title: '孤胆车神:新奥尔良 - 在线开放世界游戏', // `ad_title` varchar(32) '推广标题',
star_level: 0, // `star_level` int(2) '星级',
player_count: 368, // `player_count` int(4) '玩家人数',
down_count: 0, // `down_count` int(4) '下载次数',
comment_count: 1, // `comment_count` int(4) '评论数',
game_link_url: '', // `game_link_url` varchar(255) '游戏链接',
cpid: cpInfo.cid, // `cpid` 'CP编码',
category_title: cpInfo.cls, // `category_title` varchar(32) '类别名',
cpurl: cpInfo.url, // `cpurl` varchar(255) 'cpurl',
cp_addr: cpInfo.address, // `cp_addr` varchar(64) 'cp地址',
cp_name: cpInfo.name, // `cp_name` varchar(32) 'cp_name',
grate: cpInfo.grate, // 媒体分成比例
stock_price: cpInfo.stock.hPrice || 0, // 凭证持有均价
stock_sum: cpInfo.stock.hSum || 0, // 凭证流通总量
hHeight: cpInfo.stock.hHeight || -1, //初次众筹高度
hBonus: cpInfo.stock.hBonus || 0, //历史分红
hAds: cpInfo.stock.hAds || 0, //历史分成
};
//从CP开放接口获取CP详细信息
//@warning 如果开放接口访问异常,会导致中台CP注册流程提前终止
let res = {};
try {
res = await fetch(`${cpInfo.url}/info`, { mode: 'cors' });
res = await res.json();
let pics = '';
if(typeof res.game.pic_urls == 'string') {
res.game.pic_urls = JSON.parse(res.game.pic_urls);
}
if(Array.isArray(res.game.pic_urls)) {
pics = res.game.pic_urls.reduce((sofar,cur)=>{sofar = sofar==''? cur : sofar+','+cur; return sofar;},'');
}
content.game_version = res.game.version; // `game_version` varchar(16) '版本号',
content.developer = res.game.provider; // `developer` varchar(64) '开发者',
content.create_time = res.game.publish_time; // `create_time` int(8) '创建时间',
content.update_time = res.game.update_time; // `update_time` int(8) '更新时间',
content.game_title = res.game.game_title; // `game_title` varchar(64) '标题',
content.game_ico_uri = res.game.icon_url; // `game_ico_uri` varchar(255) '图标URI',
content.update_desc = res.game.update_content; // `update_desc` varchar(255) '更新描述',
content.game_resource_uri = res.game.large_img_url; // `game_resource_uri` varchar(255) '大图',
content.small_img_url = res.game.small_img_url; // `small_img_url` varchar(255) '小图',
content.game_screenshots = pics; // `game_screenshots` varchar(255) '游戏截图',
content.game_desc = res.game.desc; // `game_desc` varchar(255) '描述',
} catch(e) {
console.log('CP开放接口访问错误', e.message);
return {code: 0};
}
let cpObj = core.GetObject(EntityType.blockgame, cpInfo.cid, IndexType.Domain);
if(!!cpObj) { //已经有相同 cid 的记录了, 更新其内容
for(let key of Object.keys(content)) {
cpObj.setAttr(key, content[key]);
}
} else { //尚无记录,创建新的条目
content.store_status = 0; // `store_status` int(1) '状态',
content.ranking = 0, // `ranking` int(2) '排名',
await core.GetMapping(EntityType.blockgame).Create(content);
}
//完成众筹信息的入库和更新
if(cpInfo.stock.sum > 0 && cpInfo.stock.height > 0) {
let stockList = core.GetMapping(EntityType.StockBase).groupOf()
.where([['cid', cpInfo.cid]])
.orderby('height', 'desc')
.records();
let content = {
sum_left: cpInfo.stock.sum, //发行剩余数量
};
let stock = stockList[0];
if(!stock || cpInfo.stock.height > stock.orm.height) { //如果是新纪录,或者是更高高度的记录
content.sum = cpInfo.stock.sum, //发行数量
content.cid = cpInfo.cid, //CID
content.height = cpInfo.stock.height, //发行高度,可据此计算剩余天数
content.price = cpInfo.stock.price, //发行价格,单位尘
//从CP开放接口获得的数据
content.funding_text = res.crowd.funding_text;
content.funding_project_text = res.crowd.funding_project_text;
await core.GetMapping(EntityType.StockBase).Create(content);
} else if (cpInfo.stock.height == stock.orm.height) { //相同高度才更新
for(let key of Object.keys(content)) {
stock.orm[key] = content[key];
}
}
}
return { code: 0 };
}
module.exports.handle = handle;
|
import injectSheet, { Theme, WithStyles } from 'react-jss';
import { rule } from 'shared/helpers/style';
import { IProps } from './Metric';
const styles = ({ extra: theme }: Theme) => ({
root: rule({
minHeight: '100%',
display: 'flex',
}),
percent: rule({
display: 'flex',
padding: '0.5rem 0',
marginRight: '1.275rem',
[theme.breakpoints.between('sm', 'md')]: rule({
padding: 0,
}),
}),
piechart: rule({}),
content: rule({}),
text: rule({
fontFamily: theme.typography.primaryFont,
color: theme.palette.text.primary,
fontSize: '0.75rem',
}),
name: rule({
composes: '$text',
display: 'flex',
marginBottom: '0.625rem',
minHeight: '1.125rem',
}),
hintIcon: rule({
marginLeft: '0.5rem',
fontSize: '1rem',
color: theme.colors.gray,
}),
value: rule({
composes: '$text',
fontWeight: 'bold',
fontSize: '1.625rem',
marginBottom: '0.475rem',
[theme.breakpoints.up('md')]: rule({
fontSize: '2.125rem',
}),
}),
variation: rule({
composes: '$text',
display: 'flex',
alignItems: 'center',
fontWeight: 'bold',
fontSize: '0.9375rem',
color: ({ metric: { variation } }: IProps) => variation && variation < 0 ?
theme.palette.text.warning : theme.palette.text.positive,
[theme.breakpoints.up('md')]: rule({
fontSize: '1.25rem',
}),
}),
arrowIcon: rule({
transform: ({ metric: { variation } }: IProps) => variation && variation < 0 ? 'unset' : 'rotate(180deg)',
}),
});
export const provideStyles = injectSheet(styles);
export type StylesProps = WithStyles<typeof styles>;
|
#/bin/bash -e
while getopts p: flag
do
case "${flag}" in
p) passphrase=${OPTARG};;
esac
done
# generate a new ssh key pair
echo -e 'y' | ssh-keygen -f scratch -N $passphrase
# create base64 encoded versions of public & private ssh keys
priKey=$(base64 -w 0 ./scratch)
pubKey=$(base64 -w 0 ./scratch.pub)
# echo "sshPublicKey: $pubKey"
# echo "sshPrivateKey: $priKey"
# copy public key to new file
cat 'scratch.pub' > ./id_rsa.pub
# set pipeline evironment variables
echo "##vso[task.setvariable variable=sshPrivateKey]$priKey"
echo "##vso[task.setvariable variable=sshPublicKey]$pubKey"
|
function filterCitiesByMultiple(cityArray, multiple) {
return cityArray.filter(city => parseInt(city.id) % multiple === 0);
}
// Test the function
const cities = [
{
"city": "Taipei City",
"id": "1"
},
{
"city": "New Taipei City",
"id": "5"
},
{
"city": "Keelung City",
"id": "2"
},
{
"city": "Yilan County",
"id": "21"
}
];
const result = filterCitiesByMultiple(cities, 5);
console.log(result); // Output: [{"city":"New Taipei City","id":"5"}] |
#!/bin/bash
set -e
set -x
# Builds blog and community into the site by cloning the website repo, copying blog/community dirs in, running hugo.
# Also builds previous versions unless BUILD_VERSIONS=no.
# - Results are written to site/ as normal.
# - Run as "./hack/build.sh serve" to run a local preview server on site/ afterwards (requires `npm install -g http-server`).
# Releasing a new version:
# 1) Make a release-NN branch as normal.
# 2) Update VERSIONS and RELEASE_BRANCHES below (on main) to include the new version, and remove the oldest
# Order matters :-), Most recent first.
VERSIONS=("1.4" "1.3" "1.2" "1.1") # Docs version, results in the url e.g. knative.dev/docs-0.23/..
RELEASE_BRANCHES=("knative-v1.4.0" "knative-v1.3.0" "knative-v1.2.0" "v1.1.0") # Release version for serving/eventing yaml files and api references.
# 4) PR the result to main.
# 5) Party.
DOCS_BRANCHES=("release-${VERSIONS[0]}" "release-${VERSIONS[1]}" "release-${VERSIONS[2]}" "release-${VERSIONS[3]}")
latest=${VERSIONS[0]}
previous=("${VERSIONS[@]:1}")
GIT_SLUG="knative/docs"
readonly TEMP="$(mktemp -d)"
readonly SITE=$PWD/site
rm -rf site/
if [ "$BUILD_VERSIONS" == "no" ]; then
# HEAD to /docs if we're not doing versioning.
mkdocs build -f mkdocs.yml -d site/docs
else
# Versioning: pre-release (HEAD): docs => development/
cp -r . $TEMP/docs-main
curl -f -L --show-error https://raw.githubusercontent.com/knative/serving/main/docs/serving-api.md -s > "$TEMP/docs-main/docs/reference/api/serving-api.md"
curl -f -L --show-error https://raw.githubusercontent.com/knative/eventing/main/docs/eventing-api.md -s > "$TEMP/docs-main/docs/reference/api/eventing-api.md"
pushd "$TEMP/docs-main"; mkdocs build -f mkdocs.yml -d $SITE/development; popd
# Latest release branch to /docs
git clone --depth 1 -b ${DOCS_BRANCHES[0]} https://github.com/${GIT_SLUG} "$TEMP/docs-$latest"
curl -f -L --show-error https://raw.githubusercontent.com/knative/serving/${DOCS_BRANCHES[0]}/docs/serving-api.md -s > "$TEMP/docs-$latest/docs/reference/api/serving-api.md"
curl -f -L --show-error https://raw.githubusercontent.com/knative/eventing/${DOCS_BRANCHES[0]}/docs/eventing-api.md -s > "$TEMP/docs-$latest/docs/reference/api/eventing-api.md"
pushd "$TEMP/docs-$latest"; KNATIVE_VERSION=${RELEASE_BRANCHES[0]//knative-} SAMPLES_BRANCH="${DOCS_BRANCHES[0]}" mkdocs build -d $SITE/docs; popd
# Previous release branches release-$version to /v$version-docs
versionjson=""
for i in "${!previous[@]}"; do
version=${previous[$i]}
versionjson+="{\"version\": \"v$version-docs\", \"title\": \"v$version\", \"aliases\": [\"\"]},"
echo "Building for previous version $version"
git clone --depth 1 -b ${DOCS_BRANCHES[$i+1]} https://github.com/${GIT_SLUG} "$TEMP/docs-$version"
curl -f -L --show-error https://raw.githubusercontent.com/knative/serving/${DOCS_BRANCHES[i+1]}/docs/serving-api.md -s > "$TEMP/docs-$version/docs/reference/api/serving-api.md"
curl -f -L --show-error https://raw.githubusercontent.com/knative/eventing/${DOCS_BRANCHES[i+1]}/docs/eventing-api.md -s > "$TEMP/docs-$version/docs/reference/api/eventing-api.md"
pushd "$TEMP/docs-$version"; KNATIVE_VERSION=${RELEASE_BRANCHES[i+1]//knative-} SAMPLES_BRANCH="${DOCS_BRANCHES[i+1]}" VERSION_WARNING=true mkdocs build -d "$SITE/v$version-docs"; popd
done
# Set up the version file to point to the built docs.
cat << EOF > $SITE/versions.json
[
{"version": "docs", "title": "v$latest", "aliases": [""]},
$versionjson
{"version": "development", "title": "(Pre-release)", "aliases": [""]}
]
EOF
fi
# Create the blog
# TODO copy templates, stylesheets, etc. into blog directory
cp -r overrides blog/
cp -r docs/images docs/stylesheets blog/docs/
pushd blog; mkdocs build -f mkdocs.yml -d "$SITE/blog"; popd
# Handle Cookie consent
cp -r cookie-consent/js site/
# Copy go mod files so knative.dev/blahblah vanity URLs work
mkdir site/golang
cp golang/*.html site/golang/
cat golang/_redirects >> site/_redirects
# Home page is served from docs, so add a redirect.
cat << EOF > site/index.html
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Redirecting</title>
<noscript>
<meta http-equiv="refresh" content="1; url=docs/" />
</noscript>
<script>
window.location.replace("docs/");
</script>
</head>
<body>
Redirecting to <a href="docs/">docs/</a>...
</body>
</html>
EOF
# Clean up
rm -rf $TEMP
if [ "$1" = "serve" ]; then
npx http-server site
else
echo "To serve the website run:"
echo "npx http-server site"
fi
|
#!/bin/bash
function list_envs() {
ACC_ID=`aws sts get-caller-identity --query "Account" --output text --profile $1`
echo "Listing Cloud9 PROFILE $1 / ACC_ID: $ACC_ID"
# export AWS_DEFAULT_REGION="us-east-1"
# cdk deploy iot-playground codepipeline devicedefender \
# --require-approval never \
# --profile $1
# export AWS_DEFAULT_REGION="us-east-2"
# cdk deploy iot-playground codepipeline devicedefender \
# --require-approval never \
# --profile $1
export AWS_DEFAULT_REGION="us-west-2"
aws cloud9 list-environments \
--profile $1
# export AWS_DEFAULT_REGION="eu-west-1"
# cdk deploy iot-playground codepipeline devicedefender \
# --require-approval never \
# --profile $1
}
function diff_cdk_stacks() {
echo "Listing CDK Stacks PROFILE $1"
export AWS_DEFAULT_REGION="us-west-2"
cdk diff --profile $1 --region "us-west-2"
}
diff_cdk_stacks "ws10"
diff_cdk_stacks "ws01"
diff_cdk_stacks "ws02"
diff_cdk_stacks "ws03"
diff_cdk_stacks "ws04"
diff_cdk_stacks "ws05"
diff_cdk_stacks "ws06"
diff_cdk_stacks "ws07"
diff_cdk_stacks "ws08"
diff_cdk_stacks "ws09"
list_envs "ws10"
list_envs "ws01"
list_envs "ws02"
list_envs "ws03"
list_envs "ws04"
list_envs "ws05"
list_envs "ws06"
list_envs "ws07"
list_envs "ws08"
list_envs "ws09"
|
<reponame>Judgeman/H2SpringFx
package de.judgeman.H2SpringFx.Tests.ServiceTests;
import de.judgeman.H2SpringFx.Services.LogService;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.TestPropertySource;
import java.io.*;
import java.util.*;
@SpringBootTest
@TestPropertySource(locations="classpath:test.properties")
public class LogServiceTests {
private static final String TEST_LOG_DIRECTORY = "testLogsWithLock";
private static final String TEST_LOG_FILE_NAME = "testLogFile.log";
private static final String TEST_TEXT_1 = "Sam isst gerade ein 'Samich'.";
private static final String TEST_TEXT_2 = "Ivy macht den Dancing Dog. ";
private static final String TEST_TEXT_3 = "Malina lacht mit Mama.";
private static final String TEST_TEXT_4 = String.format("%s%s",TEST_TEXT_2, TEST_TEXT_3);
private static final String TEST_TEXT_5 = "One Coffee please C|_|";
private static final String TEST_TEXT_6 = "Wie nennt man ein verschwundenes Rindtier?"; // Oxford :D
private static HashMap<String, Integer> searchMap;
@BeforeAll
public static void setupTestData() {
searchMap = new HashMap<>();
searchMap.put(TEST_TEXT_1, 0);
searchMap.put(TEST_TEXT_2, 0);
searchMap.put(TEST_TEXT_3, 0);
searchMap.put(TEST_TEXT_4, 0);
searchMap.put(TEST_TEXT_5, 0);
searchMap.put(TEST_TEXT_6, 0);
}
@Test
public void loggerTest() {
File currentDir = new File("");
String fileName = LogService.generateNewFileName();
File logFile = new File(String.format("%s/%s/%s", currentDir.getAbsolutePath(), LogService.LOG_DIRECTORY_NAME, fileName));
LogService.setLogFilePrintStream(LogService.createNewLogFileAndPrintStream(LogService.LOG_DIRECTORY_NAME, fileName, true));
Logger logger = LogService.getLogger(LogServiceTests.class);
logger.info(TEST_TEXT_1);
searchInLastLogFile(logFile.getAbsolutePath(), searchMap);
Assertions.assertSame(searchMap.get(TEST_TEXT_1), 1);
}
@Test
public void writeToConsoleViaLogServiceTest() {
File currentDir = new File("");
String fileName = LogService.generateNewFileName();
File logFile = new File(String.format("%s/%s/%s", currentDir.getAbsolutePath(), LogService.LOG_DIRECTORY_NAME, fileName));
LogService.setLogFilePrintStream(LogService.createNewLogFileAndPrintStream(LogService.LOG_DIRECTORY_NAME, fileName, true));
LogService.printToLogFile(TEST_TEXT_2, false);
LogService.printToLogFile(TEST_TEXT_3, true);
searchInLastLogFile(logFile.getAbsolutePath(), searchMap);
Assertions.assertSame(1, searchMap.get(TEST_TEXT_2));
Assertions.assertSame(1, searchMap.get(TEST_TEXT_3));
Assertions.assertSame(1, searchMap.get(TEST_TEXT_4));
}
@Test
public void tieSystemOutAndErrToFileLoggingTest() {
File currentDir = new File("");
String fileName = LogService.generateNewFileName();
File logFile = new File(String.format("%s/%s/%s", currentDir.getAbsolutePath(), LogService.LOG_DIRECTORY_NAME, fileName));
LogService.setLogFilePrintStream(LogService.createNewLogFileAndPrintStream(LogService.LOG_DIRECTORY_NAME, fileName, true));
System.out.println(TEST_TEXT_5);
searchInLastLogFile(logFile.getAbsolutePath(), searchMap);
Assertions.assertSame(searchMap.get(TEST_TEXT_5), 0);
LogService.tieSystemOutAndErrToFileLogging();
System.out.println(TEST_TEXT_5);
searchInLastLogFile(logFile.getAbsolutePath(), searchMap);
Assertions.assertSame(searchMap.get(TEST_TEXT_5), 1);
}
@Test
public void createNewLogFileAndPrintStreamTest() {
File currentDirectory = new File("");
File tempDirectory = new File(String.format("%s/%s", currentDirectory.getAbsolutePath(), TEST_LOG_DIRECTORY));
File testLogFile = new File(String.format("%s%s%s",tempDirectory, "/", TEST_LOG_FILE_NAME));
removeTestDirectoryAndTestLogFile(tempDirectory, testLogFile);
Assertions.assertFalse(testLogFile.exists());
PrintStream printStream = LogService.createNewLogFileAndPrintStream(TEST_LOG_DIRECTORY, TEST_LOG_FILE_NAME, false);
LogService.setLogFilePrintStream(printStream);
Assertions.assertNull(printStream);
LogService.printToLogFile(TEST_TEXT_6, true);
searchInLastLogFile(testLogFile.getPath(), searchMap);
Assertions.assertSame(0, searchMap.get(TEST_TEXT_6));
try {
printStream = LogService.createNewLogFileAndPrintStream(TEST_LOG_DIRECTORY, TEST_LOG_FILE_NAME, true);
Assertions.assertTrue(testLogFile.exists());
assert printStream != null;
printStream.println(TEST_TEXT_6);
searchInLastLogFile(testLogFile.getAbsolutePath(), searchMap);
Assertions.assertSame(1, searchMap.get(TEST_TEXT_6));
LogService.setLogFilePrintStream(printStream);
LogService.printToLogFile(TEST_TEXT_6, true);
searchInLastLogFile(testLogFile.getAbsolutePath(), searchMap);
// second time the search text is twice in the file and we are already found one line in the last search
Assertions.assertSame( 3, searchMap.get(TEST_TEXT_6));
} finally {
if (printStream != null) {
printStream.close();
}
removeTestDirectoryAndTestLogFile(tempDirectory, testLogFile);
}
}
private void removeTestDirectoryAndTestLogFile(File tempDirectory, File testLogFile) {
if (testLogFile.exists() && testLogFile.isFile()) {
Assertions.assertTrue(testLogFile.delete());
}
if (tempDirectory.exists() && tempDirectory.isDirectory()) {
Assertions.assertTrue(tempDirectory.delete());
}
}
private void searchInLastLogFile(String path, HashMap<String, Integer> searchMap) {
BufferedReader reader;
try {
reader = new BufferedReader(new FileReader(path));
String line;
do {
line = reader.readLine();
checkKeyFromSearchMapInLine(line, searchMap);
} while (line != null);
reader.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
private void checkKeyFromSearchMapInLine(String line, HashMap<String, Integer> searchMap) {
if (line == null) {
return;
}
for (String key: searchMap.keySet()) {
if (line.contains(key)) {
searchMap.put(key, searchMap.get(key) + 1);
}
}
}
}
|
function reverseString(str) {
let reversedString = '';
for (let i = str.length -1; i >= 0; i--) {
reversedString += str[i];
}
return reversedString;
}
let userInputString = "hello";
let reversedString = reverseString(userInputString);
console.log(reversedString); // olleh |
import requests
from bs4 import BeautifulSoup
def scraper(url):
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html, 'lxml')
job_links = soup.find_all('h2', class_='job-title')
jobs = []
for job_link in job_links:
job_url = job_link.find('a')['href']
job_title = job_link.find('a').text
job_id = job_url[job_url.rfind('/') + 1:]
job = { 'title': job_title, 'url': job_url, 'id': job_id }
jobs.append(job)
return jobs
url = 'https://www.example.com/jobs'
jobs = scraper(url) |
-- Users
INSERT INTO users (username, email) VALUES ("trickster", "<EMAIL>");
INSERT INTO users (username, email) VALUES ("jokester", "<EMAIL>");
INSERT INTO users (username, email) VALUES ("sabotage", "<EMAIL>");
INSERT INTO users (username, email) VALUES ("Santa", "<EMAIL>");
INSERT INTO users (username, email) VALUES ("Grinch", "<EMAIL>");
-- Gifts
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("THE VANISHING HALF", "by <NAME>", "The lives of twin sisters who run away from a Southern Black community at age 16 diverge as one returns and the other takes on a different racial identity but their fates intertwine.", "https://www.amazon.com/dp/0525536299?tag=NYTBSREV-20&tag=NYTBS-20", "https://images4.penguinrandomhouse.com/cover/9780593286104", 1);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("THE INVISIBLE LIFE OF ADDIE LARUE", "by <NAME>", "A Faustian bargain comes with a curse that affects the adventure Addie LaRue has across centuries.", "https://www.amazon.com/dp/0765387565?tag=NYTBSREV-20&tag=NYTBS-20", "https://m.media-amazon.com/images/I/91Ql48Y0mqL.jpg", 1);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("THE SEARCHER", "by <NAME>", "After a divorce, a former Chicago police officer resettles in an Irish village where a boy goes missing.", "https://www.amazon.com/dp/073522465X?tag=NYTBSREV-20&tag=NYTBS-20", "https://images-na.ssl-images-amazon.com/images/I/41L6pJljNUL._SX342_SY445_QL70_ML2_.jpg", 1);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("Welcome to Chechnya", "by <NAME>", "Inside the Russian Republic's Deadly War on Gays", "https://www.welcometochechnya.com/", "https://upload.wikimedia.org/wikipedia/en/e/e9/Welcome_to_Chechnya.jpeg", 1);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("<NAME>", "by <NAME>", "Based on a true story. In 1933, a Welsh journalist, <NAME>, traveled to the USSR to uncover the truth behind Stalin's propaganda covering up genocidal policies. This later inspired George Orwell's 'Animal Farm'.", "https://www.amazon.com/Mr-Jones-James-Norton/dp/B089XVJB9S", "https://cdn.ticketsource.co.uk/images/promoter/banner/15223-15803178065277.jpg", 1);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("The Skatalites", "The Skatalites", "A classic album by the original ska veterans", "https://music.apple.com/us/artist/the-skatalites/1220241", "https://is5-ssl.mzstatic.com/image/thumb/Music118/v4/90/5f/ea/905feadc-cd7d-a0b0-122f-485ab655c15d/00731452442024.rgb.jpg/380x380cc-60.jpg", 4);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("1984", "<NAME>", "<NAME>'s dystopian vision of a totalitarian dictatorship where nothing is true, war is peace, language is abused, and intimacy is forbidden", "https://www.amazon.com/Nineteen-Eighty-Four-Oxford-Worlds-Classics/dp/0198829191/ref=tmm_pap_swatch_0?_encoding=UTF8&qid=&sr=", "https://m.media-amazon.com/images/I/41E9Z5XaHcL.jpg", 3);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("The Dead Don't Die", "<NAME>", "<NAME>'s absurdist take on the genre of zombie movies, with a socially-critical subtext", "https://www.imdb.com/title/tt8695030/", "https://upload.wikimedia.org/wikipedia/en/7/75/The_Dead_Don%27t_Die.jpeg", 2);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("Sterling Silver Back Scratcher", "Things You Might Need 2.0", "That indispensable self-care tool for the itch that just won't go away", "https://www.scullyandscully.com/gift-ideas/luxury-mens-gifts/mens-accessories/sterling-silver-ebony-backscratcher.axd", "https://johnsurtees.co.uk/wp-content/uploads/2013/12/M074.jpg", 2);
INSERT INTO gifts (gift, author, gift_desc, gift_url, img_url, UserId)
VALUES ("Ast<NAME> DB11", "Her Majesty the Snow Queen", "That perfect car to show how sorry you are for being green and envious", "https://www.caranddriver.com/aston-martin/db11", "https://www.motortrend.com/uploads/sites/5/2018/05/2019-Aston-Martin-DB11-AMR-Signature-Edition-front-three-quarter-in-motion-02.jpg?fit=around%7C875:492", 5); |
package kata
/**
link: https://www.codewars.com/kata/585d7d5adb20cf33cb000235
*/
/** SITUATION:
There is an array with some numbers. All numbers are equal except for one. Try to find it!
findUniq([ 1, 1, 1, 2, 1, 1 ]) === 2
findUniq([ 0, 0, 0.55, 0, 0 ]) === 0.55
It’s guaranteed that array contains at least 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
1. Find the unique number (this kata)
2. Find the unique string
3. Find The Unique
*/
func FindUniq(arr []float32) float32 {
hash := make(map[float32]int)
for _, v := range arr {
hash[v]++
}
for k, v := range hash {
if v == 1 {
return k
}
}
return 0.0
}
|
module Chikyu::Sdk
# API抽象クラス
class ApiResource
def self.handle_response(path, params, res)
if res.success?
body = res.body
data = body.instance_of?(String) ? JSON.parse(body, symbolize_names: true) : body
if data[:has_error]
raise ApiExecuteError, "APIの実行に失敗: message=#{data[:message]} / path=#{path} / params=#{params}}"
end
return data[:data]
else
p "response_code: #{res.status}"
p "response_body: #{res.body}"
end
raise HttpError, "リクエストの送信に失敗しました: code=#{res.status} / body=#{res.body}"
end
def self.build_url(api_class, api_path, with_host=true)
res = with_host ? build_host : ''
path = api_path.start_with?('/') ? api_path[1..-1] : api_path
env = ApiConfig.env_name.empty? ? '' : "/#{ApiConfig.env_name}"
"#{res}#{env}/api/v2/#{api_class}/#{path}"
end
def self.build_host
"#{ApiConfig.protocol}://#{ApiConfig.host}"
end
end
end |
<reponame>psema4/Atomic-OS
/*==================================================== -*- C++ -*-
* tcl.js "A Tcl implementation in Javascript"
*
* Patched for Atomic OS use by <NAME> 2011 (<http://psema4.github.com/Atomic-OS/>)
*
* Released under the same terms as Tcl itself.
* (BSD license found at <http://www.tcl.tk/software/tcltk/license.html>)
*
* Based on Picol by <NAME> (<http://antirez.com/page/picol>)
* (c) <NAME> 2007
* <NAME> 2007: cleanup, additions
* vim: syntax=javascript autoindent softtabwidth=4
*/
_step = 0 // set to 1 for debugging
function TclInterp () {
this.callframe = new Array(new Object());
this.level = 0;
this.commands = new Object();
this.procs = new Array();
this.OK = 0;
this.RET = 1;
this.BRK = 2;
this.CNT = 3;
this.getVar = function(name) {
var nm = name.toString();
if (nm.match("^::")) {
var val = this.callframe[0][nm.substr(2)];
} else {
var val = this.callframe[this.level][name];
}
if (val == null) throw "No such variable: "+name;
return val;
}
this.setVar = function(name, val) {
var nm = name.toString();
if (nm.match("^::")) {
this.callframe[0][nm.substr(2)] = val;
} else {
this.callframe[this.level][name] = val;
}
return val;
}
this.incrLevel = function() {
this.callframe[++this.level] = new Object();
return this.level;
}
this.decrLevel = function() {
this.callframe[this.level] = null;
this.level--;
if (this.level<0) throw "Exit application";
this.result = null;
}
this.getCommand = function(name) {
try {
return this.commands[name];
} catch (e) {throw "No such command '"+name+"'";}
}
this.registerCommand = function(name, func, privdata) {
if (func == null) throw "No such function: "+name;
this.commands[name] = new TclCommand(func, privdata);
}
this.renameCommand = function (name, newname) {
this.commands[newname] = this.commands[name];
if (this.procs[name]) {
this.procs[name] = null;
this.procs[newname] = true;
}
this.commands[name] = null;
}
this.registerSubCommand = function(name, subcmd, func, privdata) {
if (func == null) throw "No such subcommand: "+ name +" " + subcmd;
var path = name.split(" ");
var ens;
name = path.shift();
var cmd = this.commands[name];
if (cmd == null) {
ens = new Object();
ens["subcommands"] = new TclCommand(Tcl.InfoSubcommands, null);
this.commands[name] = new TclCommand(Tcl.EnsembleCommand, null, ens);
}
ens = this.commands[name].ensemble;
if (ens == null) throw "Not an ensemble command: '"+name+"'";
// walks deeply into the subcommands tree
while (path.length > 0) {
name = path.shift();
cmd = ens[name];
if (cmd == null) {
cmd = new TclCommand(Tcl.EnsembleCommand, null, new Object());
ens[name] = cmd;
ens = cmd.ensemble;
ens["subcommands"] = new TclCommand(Tcl.InfoSubcommands, null);
}
}
ens[subcmd] = new TclCommand(func, privdata);
}
this.eval = function (code) {
try {
return this.eval2(code);
} catch (e) {
// SGE
// if (! confirm(e+"/" + e.description + "\nwhile evaluating "+code.substr(0,128)+"...") ) throw(e);
throw(e);
}
}
this.eval2 = function(code) {
this.code = this.OK;
var parser = new TclParser(code);
var args = new Array(0);
var first = true;
var text, prevtype, result;
result = "";
while (true) {
prevtype = parser.type;
try {
parser.getToken();
} catch (e) {break;}
if (parser.type == (parser.EOF)) break;
text = parser.getText();
if (parser.type == (parser.VAR)) {
try {
text = this.getVar(text);
} catch (e) {throw "No such variable '" + text + "'";}
} else if (parser.type == (parser.CMD)) {
try {
text = this.eval2(text);
} catch (e) {throw (e + "\nwhile parsing \"" + text + "\"");}
} else if (parser.type == (parser.ESC)) {
// escape handling missing!
} else if (parser.type == (parser.SEP)) {
prevtype = parser.type;
continue;
}
text = this.objectify(text);
if (parser.type ==parser.EOL || parser.type == parser.EOF) {
prevtype = parser.type;
if (args.length > 0) {
result = this.call(args);
if (this.code != this.OK) return this.objectify(result);
}
args = new Array();
continue;
}
if (prevtype == parser.SEP || prevtype == parser.EOL) {
args.push(text);
} else {
args[args.length-1] = args[args.length-1].toString() + text.toString();
}
}
if (args.length > 0) result = this.call(args);
return this.objectify(result);
}
//---------------------------------- Commands in alphabetical order
this.registerCommand("and", function (interp, args) {
this.requireExactArgc(args, 3);
var a = interp.objectify(args[1]).toBoolean();
var b = interp.objectify(args[2]).toBoolean();
return (a && b);
});
this.registerCommand("append", function (interp, args) {
this.requireMinArgc(args, 2);
var vname = args[1].toString();
if (interp.callframe[interp.level][vname] != null) {
var str = interp.getVar(vname);
} else var str = "";
for (var i = 2; i < args.length; i++) str += args[i].toString();
interp.setVar(vname, str);
return str;
});
this.registerCommand("break", function (interp, args) {
interp.code = interp.BRK;
return;
});
this.registerCommand("continue", function (interp, args) {
interp.code = interp.CNT;
return;
});
this.registerSubCommand("clock", "format", function (interp, args) {
var now = new Date();
now.setTime(args[1]);
return now.toString();
});
this.registerSubCommand("clock", "scan", function (interp, args) {
return Date.parse(args[1]);
});
this.registerSubCommand("clock", "seconds", function (interp, args) {
return (new Date()).valueOf();
});
if ( (typeof(jQuery) != 'undefined') || (typeof(Zepto) != 'undefined') ) {
console.log('Tcl found jQuery or Zepto during startup, registering dom command');
this.registerCommand("dom", function (interp, args) {
var selector = args[1].toString();
var fn = args[2].toString();
args = args.slice(3);
for (var i in args) args[i] = args[i].toString();
var q = $(selector);
q[fn].apply(q,args);
return "dom " + selector;
});
}
this.registerCommand("eval",function (interp, args) {
this.requireMinArgc(args, 2);
for (var i = 1; i < args.length; i++) args[i] = args[i].toString();
if (args.length == 2) var code = args[1];
else var code = args.slice(1).join(" ");
return interp.eval(code);
});
sqrt = Math.sqrt; // "publish" other Math.* functions as needed
this.registerCommand("expr", function (interp, args) {
return eval(args[1].toString());
});
this.registerCommand("for", function (interp, args) {
this.requireExactArgc(args, 5);
interp.eval(args[1].toString());
if(interp.code != interp.OK) return;
var cond = "set _ "+args[2].toString();
var step = args[3].toString();
var body = args[4].toString();
interp.inLoop = true;
interp.code = interp.OK;
while (true) {
test = interp.objectify(interp.eval(cond));
if (!test.toBoolean()) break;
interp.eval(body);
var ic = interp.code; // tested after step command
interp.eval(step);
if(ic == interp.BRK) break;
if(ic == interp.CNT) continue;
}
interp.inLoop = false;
if(interp.code == interp.BRK || interp.code == interp.CNT)
interp.code=interp.OK;
return "";
});
this.registerCommand("foreach", function (interp, args) {
this.requireExactArgc(args, 4);
var list = args[2].toList();
var body = args[3].toString();
var res = "";
interp.inLoop = true;
interp.code = interp.OK;
for(i in list) {
interp.setVar(args[1],interp.objectify(list[i]));
interp.eval(body);
if(interp.code == interp.BRK) break;
if(interp.code == interp.CNT) continue;
}
interp.inLoop = false;
if(interp.code == interp.BRK || interp.code == interp.CNT)
interp.code=interp.OK;
return "";
});
this.registerCommand("gets", function (interp, args) {
this.requireArgcRange(args, 2, 3);
var reply = prompt(args[1],"");
if(args[2] != null) {
interp.setVar(args[2],interp.objectify(reply));
return reply.length;
} else return reply;
});
this.registerCommand("if", function (interp, args) {
this.requireMinArgc(args, 3);
var test = interp.objectify(interp.eval("set _ "+args[1].toString()));
if (test.toBoolean()) return interp.eval(args[2].toString());
if (args.length == 3) return;
for (var i = 3; i < args.length; ) {
switch (args[i].toString()) {
case "else":
this.requireExactArgc(args, i + 2);
return interp.eval(args[i+1].toString());
case "elseif":
this.requireMinArgc(args, i + 3);
test = interp.objectify(interp.eval("set _ "+args[i+1].toString()));
if (test.toBoolean())
return interp.eval(args[i+2].toString());
i += 3;
break;
default:
throw "Expected 'else' or 'elseif', got "+ args[i];
}
}
});
this.registerCommand("incr", function (interp, args) {
this.requireArgcRange(args, 2, 3);
var name = args[1];
if (args.length == 2) var incr = 1;
else var incr = interp.objectify(args[2]).toInteger();
incr += interp.getVar(name).toInteger();
return interp.setVar(name, new TclObject(incr, "INTEGER"));
});
this.registerSubCommand("info", "body", function (interp, args) {
this.requireExactArgc(args, 2);
var name = args[1].toString();
if (!interp.procs[name]) throw "Not a procedure: "+name;
return interp.getCommand(name).privdata[1];
});
this.registerSubCommand("info", "commands", function (interp, args) {
return interp.mkList(interp.commands);
});
this.registerSubCommand("info", "globals", function (interp, args) {
return interp.mkList(interp.callframe[0]);
});
this.registerSubCommand("info", "isensemble", function (interp, args) {
this.requireExactArgc(args, 2);
var name = args[1].toString();
return (interp.getCommand(name).ensemble != null);
});
this.registerSubCommand("info", "procs", function (interp, args) {
return interp.mkList(interp.procs);
});
this.registerSubCommand("info", "vars", function (interp, args) {
return interp.mkList(interp.callframe[interp.level]);
});
this.registerCommand("jseval", function (interp, args) {
return eval(args[1].toString());
});
this.registerCommand("lappend", function (interp, args) {
this.requireMinArgc(args, 2);
var vname = args[1].toString();
if (interp.callframe[interp.level][vname] != null) {
var list = interp.getVar(vname);
} else var list = new TclObject([]);
list.toList();
for (var i = 2; i < args.length; i++) {
list.content.push(interp.objectify(args[i]));
}
interp.setVar(vname, list);
return list;
});
this.registerCommand("lindex", function (interp, args) {
this.requireMinArgc(args, 3);
var list = interp.objectify(args[1]);
for (var i = 2; i < args.length; i++) {
try {
var index = list.listIndex(args[i]);
} catch (e) {
if (e == "Index out of bounds") return "";
throw e;
}
list = list.content[index];
}
return interp.objectify(list);
});
this.registerCommand("list", function (interp, args) {
args.shift();
return new TclObject(args);
});
this.registerCommand("llength", function (interp, args) {
this.requireExactArgc(args, 2);
return args[1].toList().length;
});
this.registerCommand("lrange", function (interp, args) {
this.requireExactArgc(args, 4);
var list = interp.objectify(args[1]);
var start = list.listIndex(args[2]);
var end = list.listIndex(args[3])+1;
try {
return list.content.slice(start, end);
} catch (e) {return new Array();}
});
this.registerCommand("lset", function (interp, args) {
this.requireMinArgc(args, 4);
var list = interp.getVar(args[1].toString());
var elt = list;
for (var i = 2; i < args.length-2; i++) {
elt.toList();
elt = interp.objectify(elt.content[elt.listIndex(args[i])]);
}
elt.toList();
i = args.length - 2;
elt.content[elt.listIndex(args[i])] = interp.objectify(args[i+1]);
return list;
});
this.registerCommand("lsort", function (interp, args) {
this.requireExactArgc(args, 2);
return args[1].toList().sort();
});
this.registerCommand("not", function (interp, args) {
this.requireExactArgc(args, 2);
return !(interp.objectify(args[1]).toBoolean());
});
this.registerCommand("or", function (interp, args) {
this.requireExactArgc(args, 3);
var a = interp.objectify(args[1]).toBoolean();
var b = interp.objectify(args[2]).toBoolean();
return (a || b);
});
this.registerCommand("puts", function (interp, args) {
this.requireExactArgc(args, 2);
// FIXME: Redirection support
system.proc.wash.fd[1].write(args[1]);
});
this.registerCommand("proc", function (interp, args) {
this.requireExactArgc(args, 4);
var name = args[1].toString();
var argl = interp.parseList(args[2]);
var body = args[3].toString();
var priv = new Array(argl,body);
interp.commands[name] = new TclCommand(Tcl.Proc, priv);
interp.procs[name] = true;
});
this.registerCommand("regexp", function (interp, args) {
this.requireExactArgc(args, 3);
var re = new RegExp(args[1].toString());
var str = args[2].toString();
return (str.search(re) > -1);
});
this.registerCommand("rename", function (interp, args) {
this.requireExactArgc(args, 3);
interp.renameCommand(args[1], args[2]);
});
this.registerCommand("return", function (interp, args) {
this.requireArgcRange(args, 1, 2);
var r = args[1];
interp.code = interp.RET;
return r;
});
this.registerCommand("set", function (interp, args) {
this.requireArgcRange(args, 2, 3);
var name = args[1];
if (args.length == 3) interp.setVar(name, args[2]);
return interp.getVar(name);
});
this.registerCommand("source", function (interp, args) {
this.requireExactArgc(args, 2);
return Tcl.Source(interp, args[1]);
});
this.registerSubCommand("string", "equal", function (interp, args) {
this.requireExactArgc(args, 3);
return (args[1].toString() == args[2].toString());
});
this.registerSubCommand("string", "index", function (interp, args) {
this.requireExactArgc(args, 3);
var s = args[1].toString();
try {
return s.charAt(args[1].stringIndex(args[2]));
} catch (e) {return "";}
});
this.registerSubCommand("string", "range", function (interp, args) {
this.requireExactArgc(args, 4);
var s = args[1];
try {
var b = s.stringIndex(args[2].toString());
var e = s.stringIndex(args[3].toString());
if (b > e) return "";
return s.toString().substring(b, e + 1);
} catch (e) {return "";}
});
function sec_msec () {
var t = new Date();
return t.getSeconds()*1000 + t.getMilliseconds();
}
this.registerCommand("time", function (interp, args) {
this.requireArgcRange(args, 2, 3);
if (args.length == 3) var n = args[2]; else var n = 1;
var t0 = sec_msec();
for(var i = 0; i < n; i++) interp.eval(args[1].toString());
return (sec_msec()-t0)*1000/n + " microseconds per iteration";
});
this.registerCommand("unset", function (interp, args) {
this.requireExactArgc(args, 2);
interp.setVar(args[1], null);
});
this.registerCommand("while", function (interp, args) {
this.requireExactArgc(args, 3);
var cond = "set _ "+args[1].toString();
var body = args[2].toString();
var res = "";
interp.inLoop = true;
interp.code = interp.OK;
while (true) {
test = interp.objectify(interp.eval(cond));
if (!test.toBoolean()) break;
res = interp.eval(body);
if(interp.code == interp.CNT) continue;
if(interp.code != interp.OK) break;
}
interp.inLoop = false;
if(interp.code == interp.BRK || interp.code == interp.CNT)
interp.code=interp.OK;
return interp.objectify(res);
});
// native cmdname {function(interp, args) {...}}
this.registerCommand("native", function (interp, args) {
this.requireExactArgc(args, 3);
var cmd = args[1].toList();
var func = eval(args[2].toString());
//alert("in: "+args[2].toString()+", func: "+ func);
if (cmd.length == 1) {
interp.registerCommand(cmd[0].toString(), func);
return;
}
base = cmd[0].toString();
cmd.shift();
interp.registerSubCommand(base, cmd.join(" "), eval(args[2].toString()));
return;
});
this.math = function (name, a, b) {
switch (name) {
case "+": return a + b;
case "-": return a - b;
case "*": return a * b;
case "/": return a / b;
case "%": return a % b;
case "<": return a < b;
case ">": return a > b;
case "=": case "==": return a == b;
case "!=": return a != b;
default: throw "Unknown operator: '"+name+"'";
}
}
var ops = ["+","-","*","/","%","<",">","=","==","!="];
for (i in ops) this.registerCommand(ops[i],function (interp, args) {
this.requireExactArgc(args, 3);
var name = args[0].toString();
var a = interp.objectify(args[1]);
var b = interp.objectify(args[2]);
var x = a.getNumber();
var y = b.getNumber();
if (a.isInteger() && b.isInteger())
return new TclObject(interp.math(name, x, y),"INTEGER");
if (a.isReal() && b.isReal())
return new TclObject(interp.math(name, x, y),"REAL");
return new TclObject(interp.math(name, x, y).toString());
});
this.mkList = function(x) {
var list = new Array();
for (var name in x) {list.push(name);}
return list;
}
this.objectify = function (text) {
if (text == null) text = "";
else if (text instanceof TclObject) return text;
return new TclObject(text);
}
this.parseString = function (text) {
text = text.toString();
switch (text.charAt(0)+text.substr(text.length-1)) {
case "{}":
case "\"\"":
text = text.substr(1,text.length-2);
break;
}
return this.objectify(text);
}
this.parseList = function (text) {
text = text.toString();
switch (text.charAt(0)+text.substr(text.length-1)) {
case "{}":
case "\"\"":
text = new Array(text);
break;
}
return this.objectify(text);
}
this.call = function(args) {
if(_step && !confirm("this.call "+args)) throw "user abort";
var func = this.getCommand(args[0].toString());
var r = func.call(this,args);
switch (this.code) {
case this.OK:
case this.RET:
return r;
case this.BRK:
if (!this.inLoop) throw "Invoked break outside of a loop";
break;
case this.CNT:
if (!this.inLoop) throw "Invoked continue outside of a loop";
break;
default:
throw "Unknown return code " + this.code;
}
return r;
}
if(typeof(jQuery) != 'undefined') {
this.eval('proc puts s {dom body appendTo \"<div style=\'font-family:Courier\'>$s</div>\";list}');
}
} // END TclInterp()
var Tcl = new Object();
Tcl.isReal = new RegExp("^[+\\-]?[0-9]+\\.[0-9]*([eE][+\\-]?[0-9]+)?$");
Tcl.isDecimal = new RegExp("^[+\\-]?[1-9][0-9]*$");
Tcl.isHex = new RegExp("^0x[0-9a-fA-F]+$");
Tcl.isOctal = new RegExp("^[+\\-]?0[0-7]*$");
Tcl.isHexSeq = new RegExp("[0-9a-fA-F]*");
Tcl.isOctalSeq = new RegExp("[0-7]*");
Tcl.isList = new RegExp("[\\{\\} ]");
Tcl.isNested = new RegExp("^\\{.*\\}$");
Tcl.getVar = new RegExp("^[a-zA-Z0-9_]+", "g");
Tcl.Source = function (interp, url) {
var xhr_object = null;
if(window.ActiveXObject) // Internet Explorer
xhr_object = new ActiveXObject("Microsoft.XMLHTTP");
else if(window.XMLHttpRequest) // Firefox
xhr_object = new XMLHttpRequest();
else { // XMLHttpRequest non supporté par le navigateur
alert("Your browser does not support XMLHTTP requests. " +
"Sorry that we cannot deliver this page.");
return;
}
xhr_object.open("GET", url, false);
xhr_object.send(null);
return interp.eval(xhr_object.responseText);
}
Tcl.Proc = function (interp, args) {
var priv = this.privdata;
interp.incrLevel();
var arglist = priv[0].toList();
var body = priv[1];
args.shift();
for (var i = 0; i < arglist.length; i++) {
var name = arglist[i].toString();
if (i >= args.length) {
if (name == "args") {
interp.setVar("args", Tcl.empty);
break;
}
}
if (Tcl.isList.test(name)) {
name = interp.parseString(name).toList();
if (name[0] == "args") throw "'args' defaults to the empty string";
if (i >= args.length)
interp.setVar(name.shift(), interp.parseString(name.join(" ")));
else interp.setVar(name[0], interp.objectify(args[i]));
} else if (name == "args") {
interp.setVar("args", new TclObject(args.slice(i, args.length)));
break;
}
interp.setVar(name, interp.objectify(args[i]));
}
if (name == "args" && i+1 < arglist.length)
throw "'args' should be the last argument";
try {
var r = interp.eval(body);
interp.code = interp.OK;
interp.decrLevel();
return r;
} catch (e) {
interp.decrLevel();
throw "Tcl.Proc exception "+e;
}
}
/** Manage subcommands */
Tcl.EnsembleCommand = function (interp, args) {
var sub = args[1].toString();
var main = args.shift().toString()+sub;
args[0] = main;
var ens = this.ensemble;
if (ens == null || ens[sub] == null) {
throw "Not an ensemble command: "+main;
}
return ens[sub].call(interp, args);
}
/** Get subcommands of the current ensemble command. */
Tcl.InfoSubcommands = function(interp, args) {
var r = new Array();
for (var i in this.ensemble) r.push(i);
return interp.objectify(r);
}
function TclObject(text) {
this.TEXT = 0;
this.LIST = 1;
this.INTEGER = 2;
this.REAL = 3;
this.BOOL = 4;
switch (arguments[0]) {
case "LIST":
case "INTEGER":
case "REAL":
case "BOOL":
this.type = this[arguments[0]];
break;
default:
this.type = this.TEXT;
if (text instanceof Array) this.type = this.LIST;
else text = text.toString();
break;
}
this.content = text;
this.stringIndex = function (i) {
this.toString();
return this.index(i, this.content.length);
}
this.listIndex = function (i) {
this.toList();
return this.index(i, this.content.length);
}
this.index = function (i, len) {
var index = i.toString();
if (index.substring(0,4) == "end-")
index = len - parseInt(index.substring(4)) - 1;
else if (index == "end") index = len-1;
else index = parseInt(index);
if (isNaN(index)) throw "Bad index "+i;
if (index < 0 || index >= len) throw "Index out of bounds";
return index;
}
this.isInteger = function () {return (this.type == this.INTEGER);}
this.isReal = function () {return (this.type == this.REAL);}
this.getString = function (list, nested) {
var res = new Array();
for (var i in list) {
res[i] = list[i].toString();
if (Tcl.isList.test(res[i]) && !Tcl.isNested.test(res[i]))
res[i] = "{" + res[i] + "}";
}
if (res.length == 1) return res[0];
return res.join(" ");
}
this.toString = function () {
if (this.type != this.TEXT) {
if (this.type == this.LIST)
this.content = this.getString(this.content);
else this.content = this.content.toString();
this.type = this.TEXT;
}
return this.content;
}
this.getList = function (text) {
if (text.charAt(0) == "{" && text.charAt(text.length-1) == "}")
text = text.substring(1, text.length-1);
if (text == "") return [];
var parser = new TclParser(text.toString());
var content = new Array();
for (var i = 0; ; i++) {
parser.parseList();
content[i] = new TclObject(parser.getText());
if (parser.type == parser.EOL || parser.type == parser.ESC)
break;
}
return content;
}
this.toList = function () {
if (this.type != this.LIST) {
if (this.type != this.TEXT) this.content[0] = this.content;
else this.content = this.getList(this.content);
this.type = this.LIST;
}
return this.content;
}
this.toInteger = function () {
if (this.type == this.INTEGER) return this.content;
this.toString();
if (this.content.match(Tcl.isHex))
this.content = parseInt(this.content.substring(2), 16);
else if (this.content.match(Tcl.isOctal))
this.content = parseInt(this.content, 8);
else if (this.content.match(Tcl.isDecimal))
this.content = parseInt(this.content);
else throw "Not an integer: '"+this.content+"'";
if (isNaN(this.content)) throw "Not an integer: '"+this.content+"'";
this.type = this.INTEGER;
return this.content;
}
this.getFloat = function (text) {
if (!text.toString().match(Tcl.isReal))
throw "Not a real: '"+text+"'";
return parseFloat(text);
}
this.toReal = function () {
if (this.type == this.REAL)
return this.content;
this.toString();
// parseFloat doesn't control all the string, so need to check it
this.content = this.getFloat(this.content);
if (isNaN(this.content)) throw "Not a real: '"+this.content+"'";
this.type = this.REAL;
return this.content;
}
this.getNumber = function () {
try {
return this.toInteger();
} catch (e) {return this.toReal();}
}
this.toBoolean = function () {
if (this.type == this.BOOL) return this.content;
try {
this.content = (this.toInteger() != 0);
} catch (e) {
var t = this.content;
if (t instanceof Boolean) return t;
switch (t.toString().toLowerCase()) {
case "yes":
case "true":
case "on":
this.content = true;
break;
case "false":
case "off":
case "no":
this.content = false;
break;
default:
throw "Boolean expected, got: '"+this.content+"'";
}
}
this.type = this.BOOL;
return this.content;
}
} // END TclObject()
function TclCommand(func, privdata) {
if (func == null) throw "No such function";
this.func = func;
this.privdata = privdata;
this.ensemble = arguments[2];
this.call = function(interp, args) {
var r = (this.func)(interp, args);
r = interp.objectify(r);
if (r != null) interp.setVar("_", r);
return r;
}
this.requireExactArgc = function (args, argc) {
if (args.length != argc) {
throw argc+" arguments expected, got "+args.length;
}
}
this.requireMinArgc = function (args, argc) {
if (args.length < argc) {
throw argc+" arguments expected at least, got "+args.length;
}
}
this.requireArgcRange = function (args, min, max) {
if (args.length < min || args.length > max) {
throw min+" to "+max+" arguments expected, got "+args.length;
}
}
} // END TclCommand()
function TclParser(text) {
this.OK = 0;
this.SEP = 0;
this.STR = 1;
this.EOL = 2;
this.EOF = 3;
this.ESC = 4;
this.CMD = 5;
this.VAR = 6;
this.text = text;
this.start = 0;
this.end = 0;
this.insidequote = false;
this.index = 0;
this.len = text.length;
this.type = this.EOL;
this.cur = this.text.charAt(0);
this.getText = function () {
return this.text.substring(this.start,this.end+1);
}
this.parseString = function () {
var newword = (this.type==this.SEP || this.type == this.EOL || this.type == this.STR);
if (newword && this.cur == "{") return this.parseBrace();
else if (newword && this.cur == '"') {
this.insidequote = true;
this.feedchar();
}
this.start = this.index;
while (true) {
if (this.len == 0) {
this.end = this.index-1;
this.type = this.ESC;
return this.OK;
}
if (this.cur == "\\") {
if (this.len >= 2) this.feedSequence();
} else if ("$[ \t\n\r;".indexOf(this.cur)>=0) {
if ("$[".indexOf(this.cur)>=0 || !this.insidequote) {
this.end = this.index-1;
this.type = this.ESC;
return this.OK;
}
} else if (this.cur == '"' && this.insidequote) {
this.end = this.index-1;
this.type = this.ESC;
this.feedchar();
this.insidequote = false;
return this.OK;
}
this.feedchar();
}
return this.OK;
}
this.parseList = function () {
level = 0;
this.start = this.index;
while (true) {
if (this.len == 0) {
this.end = this.index;
this.type = this.EOL;
return;
}
switch (this.cur) {
case "\\":
if (this.len >= 2) this.feedSequence();
break;
case " ":
case "\t":
case "\n":
case "\r":
if (level > 0) break;
this.end = this.index - 1;
this.type = this.SEP;
this.feedchar();
return;
case '{':
level++;
break;
case '}':
level--;
break;
}
this.feedchar();
}
if (level != 0) throw "Not a list";
this.end = this.index;
return;
}
this.parseSep = function () {
this.start = this.index;
while (" \t\r\n".indexOf(this.cur)>=0) this.feedchar();
this.end = this.index - 1;
this.type = this.SEP;
return this.OK;
}
this.parseEol = function () {
this.start = this.index;
while(" \t\n\r;".indexOf(this.cur)>=0) this.feedchar();
this.end = this.index - 1;
this.type = this.EOL;
return this.OK;
}
this.parseCommand = function () {
var level = 1;
var blevel = 0;
this.feedcharstart();
while (true) {
if (this.len == 0) break;
if (this.cur == "[" && blevel == 0)
level++;
else if (this.cur == "]" && blevel == 0) {
level--;
if (level == 0) break;
} else if (this.cur == "\\") {
this.feedSequence();
} else if (this.cur == "{") {
blevel++;
} else if (this.cur == "}") {
if (blevel != 0) blevel--;
}
this.feedchar();
}
this.end = this.index-1;
this.type = this.CMD;
if (this.cur == "]")
this.feedchar();
return this.OK;
}
this.parseVar = function () {
this.feedcharstart();
this.end = this.index + this.text.substring(this.index).match(Tcl.getVar).toString().length-1;
if (this.end == this.index-1) {
this.end = --this.index;
this.type = this.STR;
} else this.type = this.VAR;
this.setPos(this.end+1);
return this.OK;
}
this.parseBrace = function () {
var level = 1;
this.feedcharstart();
while (true) {
if (this.len > 1 && this.cur == "\\") {
this.feedSequence();
} else if (this.len == 0 || this.cur == "}") {
level--;
if (level == 0 || this.len == 0) {
this.end = this.index-1;
if (this.len > 0) this.feedchar();
this.type = this.STR;
return this.OK;
}
} else if (this.cur == "{") level++;
this.feedchar();
}
return this.OK; // unreached
}
this.parseComment = function () {
while (this.cur != "\n" && this.cur != "\r") this.feedchar();
}
this.getToken = function () {
while (true) {
if (this.len == 0) {
if (this.type == this.EOL) this.type = this.EOF;
if (this.type != this.EOF) this.type = this.EOL;
return this.OK;
}
switch (this.cur) {
case ' ':
case '\t':
if (this.insidequote) return this.parseString();
return this.parseSep();
case '\n':
case '\r':
case ';':
if (this.insidequote) return this.parseString();
return this.parseEol();
case '[':
return this.parseCommand();
case '$':
return this.parseVar();
}
if (this.cur == "#" && this.type == this.EOL) {
this.parseComment();
continue;
}
return this.parseString();
}
return this.OK; // unreached
}
this.feedSequence = function () {
if (this.cur != "\\") throw "Invalid escape sequence";
var cur = this.steal(1);
var specials = new Object();
specials.a = "\a";
specials.b = "\b";
specials.f = "\f";
specials.n = "\n";
specials.r = "\r";
specials.t = "\t";
specials.v = "\v";
switch (cur) {
case 'u':
var hex = this.steal(4);
if (hex != Tcl.isHexSeq.exec(hex))
throw "Invalid unicode escape sequence: "+hex;
cur = String.fromCharCode(parseInt(hex,16));
break;
case 'x':
var hex = this.steal(2);
if (hex != Tcl.isHexSeq.exec(hex))
throw "Invalid unicode escape sequence: "+hex;
cur = String.fromCharCode(parseInt(hex,16));
break;
case "a":
case "b":
case "f":
case "n":
case "r":
case "t":
case "v":
cur = specials[cur];
break;
default:
if ("0123456789".indexOf(cur) >= 0) {
cur = cur + this.steal(2);
if (cur != Tcl.isOctalSeq.exec(cur))
throw "Invalid octal escape sequence: "+cur;
cur = String.fromCharCode(parseInt(cur, 8));
}
break;
}
this.text[index] = cur;
this.feedchar();
}
this.steal = function (n) {
var tail = this.text.substring(this.index+1);
var word = tail.substr(0, n);
this.text = this.text.substring(0, this.index-1) + tail.substring(n);
return word;
}
this.feedcharstart = function () {
this.feedchar();
this.start = this.index;
}
this.setPos = function (index) {
var d = index-this.index;
this.index = index;
this.len -= d;
this.cur = this.text.charAt(this.index);
}
this.feedchar = function () {
this.index++;
this.len--;
if (this.len < 0) throw "End of file reached";
this.cur = this.text.charAt(this.index);
}
} //END TclParser()
|
<reponame>totalgameplay/sittuyinai
#pragma strict
static class PieceValidMoves
{
private static var BlackAttackBoard : boolean[];
private static var blackKingPosition : byte;
private static var WhiteAttackBoard : boolean[];
private static var whiteKingPosition : byte;
private static function AnalyzeMovePawn(board : BoardEV, dstPos : byte, pcMoving : PieceEV)
{
var pcAttacked : PieceEV = board.pieces[dstPos];
//If there no piece there I can potentialy kill
if (pcAttacked == null)
return;
//Regardless of what is there I am attacking this square
if (!pcMoving.Black)
{
WhiteAttackBoard[dstPos] = true;
// if that piece is the same color
if (pcAttacked.Black == pcMoving.Black)
{
pcAttacked.DefendedValue += pcMoving.PieceActionValue;
return;
}
pcAttacked.AttackedValue += pcMoving.PieceActionValue;
//If this is a king set it in check
if (pcAttacked.type == PieceEV.PIECE_KING)
{
board.BlackCheck = true;
// NYI - still add move
pcMoving.ValidMoves.Push(dstPos);
}
else
{
//Add this as a valid move
pcMoving.ValidMoves.Push(dstPos);
}
}
else
{
BlackAttackBoard[dstPos] = true;
//if that piece is the same color
if (pcAttacked.Black == pcMoving.Black)
{
pcAttacked.DefendedValue += pcMoving.PieceActionValue;
return;
}
pcAttacked.AttackedValue += pcMoving.PieceActionValue;
//If this is a king set it in check
if (pcAttacked.type == PieceEV.PIECE_KING)
{
board.WhiteCheck = true;
// NYI - still add move
pcMoving.ValidMoves.Push(dstPos);
}
else
{
//Add this as a valid move
pcMoving.ValidMoves.Push(dstPos);
}
}
return;
}
private static function AnalyzeMove(board : BoardEV, dstPos : byte, pcMoving : PieceEV) : boolean
{
//If I am not a pawn everywhere I move I can attack
if (!pcMoving.Black)
{
WhiteAttackBoard[dstPos] = true;
}
else
{
BlackAttackBoard[dstPos] = true;
}
//If there no piece there I can potentialy kill just add the move and exit
if (board.pieces[dstPos] == null)
{
pcMoving.ValidMoves.Push(dstPos);
return true;
}
var pcAttacked : PieceEV = board.pieces[dstPos];
//if that piece is a different color
if (pcAttacked.Black != pcMoving.Black)
{
pcAttacked.AttackedValue += pcMoving.PieceActionValue;
//If this is a king set it in check
if (pcAttacked.type == PieceEV.PIECE_KING)
{
if (pcAttacked.Black)
{
board.BlackCheck = true;
}
else
{
board.WhiteCheck = true;
}
// NYI - still add move
pcMoving.ValidMoves.Push(dstPos);
}
else
{
//Add this as a valid move
pcMoving.ValidMoves.Push(dstPos);
}
//We don't continue movement past this piece
return false;
}
//Same Color I am defending
pcAttacked.DefendedValue += pcMoving.PieceActionValue;
//Since this piece is of my kind I can't move there
return false;
}
private static function CheckValidMovesPawn(moves : System.Collections.Generic.List.<byte>,
pcMoving : PieceEV, srcPosition : byte,
board : BoardEV, count : byte)
{
var i : int;
for (i = 0; i < count; i++)
{
var dstPos : byte = moves[i];
if (dstPos%8 != srcPosition%8)
{
//If there is a piece there I can potentialy kill
AnalyzeMovePawn(board, dstPos, pcMoving);
if (!pcMoving.Black)
{
WhiteAttackBoard[dstPos] = true;
}
else
{
BlackAttackBoard[dstPos] = true;
}
}
// if there is something if front pawns can't move there
else if (board.pieces[dstPos] != null)
{
return;
}
//if there is nothing in front of me (blocked == false)
else
{
pcMoving.ValidMoves.Push(dstPos);
}
}
}
private static function GenerateValidMovesKing(piece : PieceEV, board : BoardEV, srcPosition : byte)
{
if (piece == null)
{
return;
}
var i : byte;
for (i = 0; i < MoveArrays.KingTotalMoves[srcPosition]; i++)
{
var dstPos : byte = MoveArrays.KingMoves[srcPosition].Moves[i];
if (!piece.Black)
{
//I can't move where I am being attacked
if (BlackAttackBoard[dstPos])
{
WhiteAttackBoard[dstPos] = true;
continue;
}
}
else
{
if (WhiteAttackBoard[dstPos])
{
BlackAttackBoard[dstPos] = true;
continue;
}
}
AnalyzeMove(board, dstPos, piece);
}
}
internal static function GenerateValidMoves(board : BoardEV)
{
// Reset Board
board.BlackCheck = false;
board.WhiteCheck = false;
WhiteAttackBoard = new boolean[64];
BlackAttackBoard = new boolean[64];
//Generate Moves
var x : byte;
var i : byte;
for (x = 0; x < 64; x++)
{
var p : PieceEV = board.pieces[x];
if (p == null)
continue;
p.ValidMoves = new System.Collections.Generic.Stack.<byte>();
switch (p.type)
{
case PieceEV.PIECE_NEL:
if (!p.Black)
{
CheckValidMovesPawn(MoveArrays.WhitePawnMoves[x].Moves, p, x,
board,
MoveArrays.WhitePawnTotalMoves[x]);
break;
}
if (p.Black)
{
CheckValidMovesPawn(MoveArrays.BlackPawnMoves[x].Moves, p, x,
board,
MoveArrays.BlackPawnTotalMoves[x]);
break;
}
break;
case PieceEV.PIECE_HORSE:
for (i = 0; i < MoveArrays.KnightTotalMoves[x]; i++)
{
AnalyzeMove(board, MoveArrays.KnightMoves[x].Moves[i], p);
}
break;
case PieceEV.PIECE_ELEPHANT:
if (!p.Black)
{
for (i = 0; i < MoveArrays.WhiteElephantTotalMoves[x]; i++)
{
AnalyzeMove(board, MoveArrays.WhiteElephantMoves[x].Moves[i], p);
}
}
if (p.Black)
{
for (i = 0; i < MoveArrays.BlackElephantTotalMoves[x]; i++)
AnalyzeMove(board, MoveArrays.BlackElephantMoves[x].Moves[i], p);
}
break;
case PieceEV.PIECE_CASTLE:
// for castle, we need to check all four directions
for (i = 0; i < MoveArrays.RookTotalMoves1[x]; i++)
{
if (!AnalyzeMove(board, MoveArrays.RookMoves1[x].Moves[i], p))
break;
}
for (i = 0; i < MoveArrays.RookTotalMoves2[x]; i++)
{
if (!AnalyzeMove(board, MoveArrays.RookMoves2[x].Moves[i], p))
break;
}
for (i = 0; i < MoveArrays.RookTotalMoves3[x]; i++)
{
if (!AnalyzeMove(board, MoveArrays.RookMoves3[x].Moves[i], p))
break;
}
for (i = 0; i < MoveArrays.RookTotalMoves4[x]; i++)
{
if (!AnalyzeMove(board, MoveArrays.RookMoves4[x].Moves[i], p))
break;
}
break;
case PieceEV.PIECE_SITKEL:
for (i = 0; i < MoveArrays.SitkelTotalMoves[x]; i++)
AnalyzeMove(board, MoveArrays.SitkelMoves[x].Moves[i], p);
break;
case PieceEV.PIECE_KING:
if (!p.Black)
{
whiteKingPosition = x;
}
else
{
blackKingPosition = x;
}
break;
}
}
if (!board.BlackMove)
{
GenerateValidMovesKing(board.pieces[blackKingPosition], board,
blackKingPosition);
GenerateValidMovesKing(board.pieces[whiteKingPosition], board,
whiteKingPosition);
}
else
{
GenerateValidMovesKing(board.pieces[whiteKingPosition], board,
whiteKingPosition);
GenerateValidMovesKing(board.pieces[blackKingPosition], board,
blackKingPosition);
}
}
} |
<filename>model_team_list.go<gh_stars>0
/*
* The User API
*
* API to manage teams, members and tokens
*
* API version: 1.3.11 lucky-fremont
* Contact: <EMAIL>
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package userapi
import (
"encoding/json"
)
// TeamList struct for TeamList
type TeamList struct {
Teams *[]Team `json:"teams,omitempty"`
}
// NewTeamList instantiates a new TeamList object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewTeamList() *TeamList {
this := TeamList{}
return &this
}
// NewTeamListWithDefaults instantiates a new TeamList object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewTeamListWithDefaults() *TeamList {
this := TeamList{}
return &this
}
// GetTeams returns the Teams field value if set, zero value otherwise.
func (o *TeamList) GetTeams() []Team {
if o == nil || o.Teams == nil {
var ret []Team
return ret
}
return *o.Teams
}
// GetTeamsOk returns a tuple with the Teams field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *TeamList) GetTeamsOk() (*[]Team, bool) {
if o == nil || o.Teams == nil {
return nil, false
}
return o.Teams, true
}
// HasTeams returns a boolean if a field has been set.
func (o *TeamList) HasTeams() bool {
if o != nil && o.Teams != nil {
return true
}
return false
}
// SetTeams gets a reference to the given []Team and assigns it to the Teams field.
func (o *TeamList) SetTeams(v []Team) {
o.Teams = &v
}
func (o TeamList) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Teams != nil {
toSerialize["teams"] = o.Teams
}
return json.Marshal(toSerialize)
}
type NullableTeamList struct {
value *TeamList
isSet bool
}
func (v NullableTeamList) Get() *TeamList {
return v.value
}
func (v *NullableTeamList) Set(val *TeamList) {
v.value = val
v.isSet = true
}
func (v NullableTeamList) IsSet() bool {
return v.isSet
}
func (v *NullableTeamList) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableTeamList(val *TeamList) *NullableTeamList {
return &NullableTeamList{value: val, isSet: true}
}
func (v NullableTeamList) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableTeamList) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
|
<gh_stars>0
import * as aws from '@pulumi/aws';
import * as awsx from '@pulumi/awsx';
import * as eks from '@pulumi/eks';
import * as pulumi from '@pulumi/pulumi';
import * as k8s from '@pulumi/kubernetes';
import * as kx from '@pulumi/kubernetesx';
import { Config, Output } from '@pulumi/pulumi';
import * as random from '@pulumi/random';
const config = new Config();
const kubeSystemNamespace = 'kube-system';
const pulumiStack = pulumi.getStack();
// Create a new VPC for the cluster.
const vpc = new awsx.ec2.Vpc(`ai-eks-vpc-${pulumiStack}`, {
numberOfNatGateways: 1,
tags: {
managedBy: 'aitomatic',
stack: pulumiStack
},
});
// IAM roles for the node group
const role = new aws.iam.Role(`ai-eks-ngrole-${pulumiStack}`, {
assumeRolePolicy: aws.iam.assumeRolePolicyForPrincipal({
Service: 'ec2.amazonaws.com'
}),
tags: {
managedBy: 'aitomatic',
stack: pulumiStack
}
});
let counter = 0;
for (const policyArn of [
'arn:aws:iam::aws:policy/AmazonEKSWorkerNodePolicy',
'arn:aws:iam::aws:policy/AmazonEKS_CNI_Policy',
'arn:aws:iam::aws:policy/AmazonEC2ContainerRegistryReadOnly'
]) {
new aws.iam.RolePolicyAttachment(
`ai-eks-ngrole-policy-${pulumiStack}-${counter++}`,
{ policyArn, role }
);
}
// Create the EKS cluster itself without default node group.
const cluster = new eks.Cluster(`ai-eks-cluster-${pulumiStack}`, {
skipDefaultNodeGroup: true,
vpcId: vpc.id,
privateSubnetIds: vpc.privateSubnetIds,
publicSubnetIds: vpc.publicSubnetIds,
nodeAssociatePublicIpAddress: true,
createOidcProvider: true,
enabledClusterLogTypes: [
'api',
'audit',
'authenticator',
'controllerManager',
'scheduler'
],
tags: {
managedBy: 'aitomatic',
stack: pulumiStack
},
instanceRoles: [role],
providerCredentialOpts: {
profileName: config.get('aws:profile')
}
});
// Create a simple AWS managed node group using a cluster as input.
const managedNodeGroup = eks.createManagedNodeGroup(
`ai-eks-mng--${pulumiStack}`,
{
cluster: cluster,
nodeGroupName: `ai-eks-mng--${pulumiStack}`,
nodeRoleArn: role.arn,
labels: { ondemand: 'true' },
tags: {
org: 'pulumi',
managedBy: 'aitomatic',
stack: 'pulumiStack'
},
scalingConfig: {
minSize: 2,
maxSize: 20,
desiredSize: 2
},
},
cluster
);
// Export the cluster's kubeconfig.
export const kubeconfig = cluster.kubeconfig;
// Create PostgreSQL database for System
const dbPassword = new random.RandomPassword('<PASSWORD>', {
length: 16,
special: false
});
const dbSubnetGroup = new aws.rds.SubnetGroup(`ai-db-sn-${pulumiStack}`, {
subnetIds: vpc.privateSubnetIds,
tags: {
Name: 'RDS Subnet Group',
managedBy: 'aitomatic',
stack: pulumiStack
}
});
const db = new aws.rds.Instance(`ai-db-${pulumiStack}`, {
allocatedStorage: 10,
maxAllocatedStorage: 100,
engine: 'postgres',
engineVersion: '11.10',
instanceClass: 'db.t3.medium',
password: <PASSWORD>,
skipFinalSnapshot: true,
vpcSecurityGroupIds: [
cluster.clusterSecurityGroup.id,
cluster.nodeSecurityGroup.id
],
username: 'postgres',
dbSubnetGroupName: dbSubnetGroup.name,
tags: {
managedBy: 'aitomatic',
stack: pulumiStack
}
});
// Create namespaces
const aiSystemNs = new k8s.core.v1.Namespace(
'aitomatic-system',
{
metadata: {
name: 'aitomatic-system',
labels: { 'istio-injection': 'enabled' }
}
},
{
dependsOn: [cluster, managedNodeGroup],
provider: cluster.provider
}
);
const aiInfraNs = new k8s.core.v1.Namespace(
'aitomatic-infra',
{
metadata: {
name: 'aitomatic-infra',
labels: { 'istio-injection': 'enabled' }
}
},
{
dependsOn: [cluster, managedNodeGroup],
provider: cluster.provider
}
);
const aiAppsNs = new k8s.core.v1.Namespace(
'aitomatic-apps',
{
metadata: {
name: 'aitomatic-apps',
labels: { 'istio-injection': 'enabled' }
}
},
{
dependsOn: [cluster, managedNodeGroup],
provider: cluster.provider
}
);
// Deploy metrics-server from Bitnami Helm Repo to aitomatic-system Namespace
const metricsServerChart = new k8s.helm.v3.Chart(
'kubesys-ms',
{
chart: 'metrics-server',
version: '3.5.0',
namespace: kubeSystemNamespace,
fetchOpts: {
repo: 'https://kubernetes-sigs.github.io/metrics-server/'
},
values: {}
},
{
dependsOn: [cluster, aiSystemNs],
provider: cluster.provider
}
);
// Setup Kubernetes Autoscaler
const clusterOidcProvider = cluster.core.oidcProvider;
const clusterOidcProviderUrl = clusterOidcProvider.url;
const clusterOidcArn = clusterOidcProvider.arn;
const autoscalerAssumeRolePolicy = pulumi
.all([clusterOidcProviderUrl, clusterOidcArn])
.apply(([url, arn]) =>
aws.iam.getPolicyDocument({
statements: [
{
effect: 'Allow',
principals: [
{
identifiers: [arn],
type: 'Federated'
}
],
actions: ['sts:AssumeRoleWithWebIdentity'],
conditions: [
{
test: 'StringEquals',
values: [
'system:serviceaccount:kube-system:autoscaler-aws-cluster-autoscaler'
],
variable: `${url}:sub`
}
]
}
]
})
);
const autoscalerRole = new aws.iam.Role('cluster-autoscaler', {
assumeRolePolicy: autoscalerAssumeRolePolicy.json
});
const autoscalerPolicy = new aws.iam.Policy('autoscaler-policy', {
description: pulumi.interpolate`Autoscaler policy for ${cluster.eksCluster.id}`,
policy: JSON.stringify({
Version: '2012-10-17',
Statement: [
{
Effect: 'Allow',
Action: [
'autoscaling:DescribeAutoScalingGroups',
'autoscaling:DescribeAutoScalingInstances',
'autoscaling:DescribeLaunchConfigurations',
'autoscaling:DescribeTags',
'autoscaling:SetDesiredCapacity',
'autoscaling:TerminateInstanceInAutoScalingGroup',
'ec2:DescribeLaunchTemplateVersions'
],
Resource: '*'
}
]
})
});
new aws.iam.RolePolicyAttachment('autoscaler-role-attach-policy', {
policyArn: autoscalerPolicy.arn,
role: autoscalerRole.name
});
const autoscaler = new k8s.helm.v3.Chart(
'autoscaler',
{
namespace: kubeSystemNamespace,
chart: 'cluster-autoscaler',
fetchOpts: {
repo: 'https://kubernetes.github.io/autoscaler'
},
version: '9.10.7',
values: {
cloudProvider: 'aws',
rbac: {
serviceAccount: {
annotations: {
'eks.amazonaws.com/role-arn': autoscalerRole.arn
}
}
},
awsRegion: config.get('aws.region'),
autoDiscovery: {
enabled: true,
clusterName: cluster.eksCluster.name
}
}
},
{
provider: cluster.provider,
dependsOn: [cluster, metricsServerChart]
}
);
// Setup Istio
const aiIstioNs = new k8s.core.v1.Namespace(
'istio-system',
{ metadata: { name: 'istio-system' } },
{
provider: cluster.provider,
dependsOn: [cluster, managedNodeGroup]
}
);
const aiIstioIngressNs = new k8s.core.v1.Namespace(
'istio-ingress',
{ metadata: { name: 'istio-ingress' } },
{
provider: cluster.provider,
dependsOn: [cluster, managedNodeGroup]
}
);
new k8s.rbac.v1.ClusterRoleBinding(
'cluster-admin-binding',
{
metadata: { name: 'cluster-admin-binding' },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'ClusterRole',
name: 'cluster-admin'
},
subjects: [
{
apiGroup: 'rbac.authorization.k8s.io',
kind: 'User',
name: config.get('username') || 'admin'
}
]
},
{
dependsOn: [cluster],
provider: cluster.provider
}
);
const prometheus = new k8s.helm.v3.Release(
'aisys-prometheus',
{
chart: 'prometheus',
namespace: aiIstioNs.id,
repositoryOpts: {
repo: 'https://prometheus-community.github.io/helm-charts'
},
values: {},
},
{
dependsOn: [aiIstioNs, cluster],
provider: cluster.provider
}
);
const grafana = new k8s.helm.v3.Release(
'aisys-grafana',
{
chart: 'grafana',
namespace: aiIstioNs.id,
repositoryOpts: {
repo: 'https://grafana.github.io/helm-charts'
},
values: {}
},
{
dependsOn: [prometheus, aiIstioNs, cluster],
provider: cluster.provider
}
);
const istio = new k8s.helm.v3.Release(
'aisys-istio',
{
chart: 'istio',
version: '1.11.1',
namespace: aiIstioNs.id,
repositoryOpts: {
repo: 'https://getindata.github.io/helm-charts/'
},
values: {}
},
{
dependsOn: [aiIstioNs, cluster],
provider: cluster.provider
}
);
const kiali = new k8s.helm.v3.Release(
'aisys-kiali',
{
chart: 'kiali-server',
namespace: aiIstioNs.id,
repositoryOpts: {
repo: 'https://kiali.org/helm-charts/'
},
values: {}
},
{
dependsOn: [istio, cluster],
provider: cluster.provider
}
);
//Put DB Secrets in Infra Namespace
const secretInfra = new kx.Secret(
'aitomatic-infradb-secrets',
{
stringData: {
'aitomatic-db-user': db.username,
'aitomatic-db-password': <PASSWORD>,
'aitomatic-db-host': db.address,
'aitomatic-db-port': db.port.apply((x) => `x`),
'aitomatic-db-dbname': db.id
},
metadata: {
namespace: aiInfraNs.id
}
},
{
dependsOn: [cluster],
provider: cluster.provider
}
);
//Put DB Secrets in Apps Namespace
const secretApps = new kx.Secret(
'aitomatic-appsdb-secrets',
{
stringData: {
'aitomatic-db-user': db.username,
'aitomatic-db-password': <PASSWORD>,
'aitomatic-db-host': db.address,
'aitomatic-db-port': db.port.apply((p) => `${p}`),
'aitomatic-db-dbname': db.name
},
metadata: {
namespace: aiAppsNs.id
}
},
{
dependsOn: [cluster],
provider: cluster.provider
}
);
// Install JenkinsX
const jxGitopNsName = 'jx-git-operator';
const jxGitopNs = new k8s.core.v1.Namespace(
jxGitopNsName,
{ metadata: { name: jxGitopNsName } },
{
provider: cluster.provider,
dependsOn: [cluster, managedNodeGroup]
}
);
const jxgit = new k8s.helm.v3.Chart(
'jxgo',
{
chart: 'jx-git-operator',
namespace: jxGitopNsName,
fetchOpts: {
repo: 'https://jenkins-x-charts.github.io/repo'
},
values: {
url: config.get("jx.giturl"),
username: config.get("jx.gitusername"),
password: config.getSecret("jx.gittoken"),
},
},
{
dependsOn: [managedNodeGroup, cluster],
provider: cluster.provider
}
);
const seldonChart = new k8s.helm.v3.Release(
'aiinfra-seldon',
{
chart: 'seldon-core-operator',
version: '1.11',
namespace: aiInfraNs.id,
repositoryOpts: {
repo: 'https://storage.googleapis.com/seldon-charts/'
},
values: {
istio: {
enabled: true,
gateway: 'istio-ingressgateway'
},
ambassador: {
enabled: false
},
usageMetrics: {
enabled: true
}
}
},
{
dependsOn: [cluster, istio, aiInfraNs],
provider: cluster.provider
}
);
// Install Spark Operator
const sparkOperatorRelease = new k8s.helm.v3.Chart(
'spark-operator',
{
chart: 'spark-operator',
version: '1.1.6',
namespace: aiInfraNs.id,
fetchOpts: {
repo: 'https://googlecloudplatform.github.io/spark-on-k8s-operator'
},
values: {
istio: {
enabled: true
},
image: {
tag: 'v1beta2-1.2.3-3.1.1'
},
sparkJobNamespace: aiAppsNs.id,
serviceAccounts: {
spark: {
name: 'spark'
},
sparkoperator: {
name: 'spark-operator'
}
}
}
},
{
dependsOn: [istio, cluster],
provider: cluster.provider
}
);
|
require('scss/index.scss');
import React from 'react';
import ReactDOM from 'react-dom';
import App from 'App.jsx';
ReactDOM.render(
<App />,
document.getElementById('app')
);
console.log('update');
|
require 'rails_helper'
require 'data_import/media'
RSpec.describe DataImport::Media do
subject { Class.new { include DataImport::Media }.new }
context 'without #get_media overridden' do
describe '#get_media' do
it 'should fail with an error message telling you to override' do
expect {
subject.get_media('1234-one-punch-man')
}.to raise_error.with_message(/override/i)
end
end
end
describe '#get_multiple_media' do
it 'should call #get_media for each and yield what it yields' do
allow(subject).to receive(:get_media).and_yield('ohayou')
expect { |b|
subject.get_multiple_media(%w[1234 5678], &b)
}.to yield_successive_args(%w[1234 ohayou], %w[5678 ohayou])
end
end
end
|
<reponame>abircb/acquire-module
const should = require('should')
const acquire = require('../')
const path = require('path')
describe('relative file path', function() {
it('should locate the package', function() {
acquire('./test/test-node-modules/m1/a-node-module/lib/some-main-file.js', {
paths: '.'
}).name.should.be.exactly('validModule')
})
})
describe('relative file path with absolute cwd', function() {
it('should locate the package', function() {
acquire('./test/test-node-modules/m1/a-node-module/lib/some-main-file.js', {
paths: process.cwd()
}).name.should.be.exactly('validModule')
})
})
describe('relative dir', function() {
it('should locate the package', function() {
acquire('./test/test-node-modules/m1/a-node-module/', {
paths: '.'
}).name.should.be.exactly('validModule')
})
})
describe('relative dir with absolute cwd', function() {
it('should locate the package', function() {
acquire('./test/test-node-modules/m1/a-node-module/', {
paths: process.cwd()
}).name.should.be.exactly('validModule')
})
})
describe('relative dir with right and wrong paths (respectively)', function() {
it('should locate the package', function() {
acquire('./test/test-node-modules/m1/a-node-module/', {
paths: ['.', './some/where/wrong']
}).name.should.be.exactly('validModule')
})
})
describe('relative dir with wrong and right paths (respectively)', function() {
it('should locate the package', function() {
acquire('./test/test-node-modules/m1/a-node-module/', {
paths: ['./some/place', '.']
}).name.should.be.exactly('validModule')
})
})
describe('name with dir path', function() {
it('should locate the package', function() {
acquire('another-module', {
paths: path.resolve('./test/test-node-modules/m2')
}).name.should.be.exactly('validModule')
})
})
describe('name with good and bad absolute paths', function() {
it('should locate the package', function() {
acquire('another-module', {
paths: [path.resolve('test', 'test-node-modules', 'm1'), path.resolve('test', 'test-node-modules', 'm2')]
}).name.should.be.exactly('validModule')
})
})
describe('name with right and wrong absolute paths (respectively)', function() {
it('should locate the package', function() {
acquire('a-node-module', {
paths: [path.resolve('test', 'test-node-modules', 'm1'), path.resolve('test', 'test-node-modules', 'm2')]
}).name.should.be.exactly('validModule')
})
})
describe('name with wrong and right absolute paths (respectively)', function() {
it('should locate the package', function() {
acquire('another-module', {
paths: [path.resolve('test', 'test-node-modules', 'm1'), path.resolve('test', 'test-node-modules', 'm2')]
}).name.should.be.exactly('validModule')
})
})
describe('main-file as module name with the right absolute path', function() {
it('should locate the package', function() {
acquire('some-main-file', {
paths: [path.resolve('test', 'test-node-modules', 'm2', 'another-module', 'lib')]
}).name.should.be.exactly('validModule')
})
})
|
#!/bin/bash
set +x
HELM_RELEASE_NAME="stardog-helm-tests"
NAMESPACE="stardog"
NUM_STARDOGS="3"
NUM_ZKS="3"
STARDOG_ADMIN=
STARDOG_IP=
function dependency_checks() {
echo "Checking for dependencies"
helm version >/dev/null 2>&1 || { echo >&2 "The helm tests require helm but it's not installed, exiting."; exit 1; }
kubectl version >/dev/null 2>&1 || { echo >&2 "The helm tests require kubectl but it's not installed, exiting."; exit 1; }
echo "Dependency check passed."
}
function minikube_start_tunnel() {
pushd ~
echo "Starting minikube tunnel"
echo "sudo minikube tunnel" > ~/start-minikube-tunnel.sh
chmod u+x ~/start-minikube-tunnel.sh
nohup ~/start-minikube-tunnel.sh > ~/minikube_tunnel.log 2> ~/minikube_tunnel.err < /dev/null &
echo "Minikube tunnel started in the background"
popd
}
function install_stardog() {
mkdir -p ~/stardog-binaries/
pushd ~/stardog-binaries/
curl -Lo stardog-latest.zip https://downloads.stardog.com/stardog/stardog-latest.zip
unzip stardog-latest.zip
export STARDOG_ADMIN="$(ls ${HOME}/stardog-binaries/stardog-*/bin/stardog-admin)"
popd
}
function helm_setup_cluster() {
echo "Creating stardog namespace"
kubectl create ns stardog
echo "Adding license"
kubectl -n ${NAMESPACE} create secret generic stardog-license --from-file stardog-license-key.bin=${HOME}/stardog-license-key.bin
}
function helm_install_stardog_cluster_with_zookeeper() {
echo "Installing Stardog Cluster"
echo "Running helm install for ${HELM_RELEASE_NAME}"
pushd charts/stardog/
helm dependencies update
popd
helm install ${HELM_RELEASE_NAME} charts/stardog \
--namespace ${NAMESPACE} \
--wait \
--timeout 15m0s \
-f ./tests/minikube.yaml \
--set "replicaCount=${NUM_STARDOGS}" \
--set "zookeeper.replicaCount=${NUM_ZKS}"
rc=$?
if [ ${rc} -ne 0 ]; then
echo "Helm install for Stardog Cluster failed, exiting"
echo "Listing pods"
kubectl -n ${NAMESPACE} get pods
echo "Listing services"
kubectl -n ${NAMESPACE} get svc
exit ${rc}
fi
echo "Stardog Cluster installed."
}
function helm_install_single_node_stardog() {
echo "Installing single node Stardog"
echo "Running helm install for ${HELM_RELEASE_NAME}"
pushd charts/stardog/
helm dependencies update
popd
helm install ${HELM_RELEASE_NAME} charts/stardog \
--namespace ${NAMESPACE} \
--wait \
--timeout 15m0s \
-f ./tests/minikube.yaml \
--set "cluster.enabled=false" \
--set "replicaCount=1" \
--set "zookeeper.enabled=false"
rc=$?
if [ ${rc} -ne 0 ]; then
echo "Helm install for Stardog Cluster failed, exiting"
exit ${rc}
fi
echo "Single node Stardog installed."
}
function check_helm_release_exists() {
echo "Checking if the Helm release exists"
helm ls --namespace ${NAMESPACE} | grep ${HELM_RELEASE_NAME}
rc=$?
if [ ${rc} -ne 0 ]; then
echo "The helm release ${HELM_RELEASE_NAME} is missing, exiting"
exit ${rc}
fi
echo "The helm release exists."
}
function check_helm_release_deleted() {
echo "Checking if the Helm release has been deleted"
helm ls --namespace ${NAMESPACE} | grep ${HELM_RELEASE_NAME}
rc=$?
if [ ${rc} -eq 0 ]; then
echo "The helm release ${HELM_RELEASE_NAME} wasn't deleted as expected, exiting"
exit 1
fi
echo "The helm release has been deleted."
}
function check_expected_num_stardog_pods() {
local -r num_stardogs=$1
echo "Checking if there are the expected number of Stardog pods (${num_stardogs})"
FOUND_STARDOGS=$(kubectl -n ${NAMESPACE} get pods -o wide | grep "${HELM_RELEASE_NAME}-stardog-" | wc -l)
# the post install pod for stardog will match here too, but it may disappear before this check runs,
# so either ${num_stardogs} or ${num_stardogs} + 1 is fine here
if [[ ${FOUND_STARDOGS} -lt ${num_stardogs} || ${FOUND_STARDOGS} -gt $((num_stardogs+1)) ]]; then
echo "Found ${FOUND_STARDOGS} but expected ${num_stardogs} Stardog pods, exiting"
exit 1
fi
echo "Found the correct number of Stardog pods."
}
function check_expected_num_zk_pods() {
local -r num_zookeepers=$1
echo "Checking if there are the expected number of ZooKeeper pods (${num_zookeepers})"
FOUND_ZKS=$(kubectl -n ${NAMESPACE} get pods -o wide | grep "${HELM_RELEASE_NAME}-zookeeper-" | wc -l)
if [ ${FOUND_ZKS} -ne ${num_zookeepers} ]; then
echo "Found ${FOUND_ZKS} but expected ${num_zookeepers} ZooKeeper pods, exiting"
exit 1
fi
echo "Found the correct number of ZooKeeper pods."
}
function set_stardog_ip() {
export STARDOG_IP=$(kubectl -n ${NAMESPACE} get svc | grep "${HELM_RELEASE_NAME}-stardog" | awk '{print $4}')
}
function create_and_drop_db() {
echo "Creating database on Stardog server ${STARDOG_IP}"
${STARDOG_ADMIN} --server http://${STARDOG_IP}:5820 db create -n testdb
rc=$?
if [ ${rc} -ne 0 ]; then
echo "Failed to create Stardog db on ${STARDOG_IP}, exiting"
exit ${rc}
fi
echo "Successfully created database."
echo "Dropping database on Stardog server ${STARDOG_IP}"
${STARDOG_ADMIN} --server http://${STARDOG_IP}:5820 db drop testdb
rc=$?
if [ ${rc} -ne 0 ]; then
echo "Failed to drop Stardog db on ${STARDOG_IP}, exiting"
exit ${rc}
fi
echo "Successfully dropped database."
}
function helm_delete_stardog_release() {
echo "Deleting Stardog release"
helm delete ${HELM_RELEASE_NAME} --namespace ${NAMESPACE}
rc=$?
if [ ${rc} -ne 0 ]; then
echo "Helm failed to delete Stardog release, exiting"
exit ${rc}
fi
echo "Stardog release deleted."
}
echo "Starting the Helm smoke tests"
dependency_checks
minikube_start_tunnel
install_stardog
helm_setup_cluster
echo "Test: Stardog 3 node cluster with ZooKeeper"
helm_install_stardog_cluster_with_zookeeper
check_helm_release_exists
check_expected_num_stardog_pods ${NUM_STARDOGS}
check_expected_num_zk_pods ${NUM_ZKS}
set_stardog_ip
create_and_drop_db
echo "Cleaning up Helm deployment"
helm_delete_stardog_release
check_helm_release_deleted
echo "Test: single node Stardog without ZooKeeper"
helm_install_single_node_stardog
check_helm_release_exists
check_expected_num_stardog_pods 1
check_expected_num_zk_pods 0
echo "Cleaning up Helm deployment"
helm_delete_stardog_release
check_helm_release_deleted
echo "Helm smoke tests completed."
|
def isCollision(circle1, circle2):
distance_between_centers = (((circle1["x"] - circle2["x"])**2) +
((circle1["y"] - circle2["y"])**2))**(1/2)
if distance_between_centers < circle1["r"] + circle2["r"]:
return True
else:
return False |
def extract_location_info(row):
properties = {
"ref": row["LocationNumber"],
"name": row["Name"],
"addr_full": row["ExtraData"]["Address"]["AddressNonStruct_Line1"],
"city": row["ExtraData"]["Address"]["Locality"],
"state": row["ExtraData"]["Address"]["Region"],
"postcode": row["ExtraData"]["Address"]["PostalCode"],
"lat": row["Location"]["coordinates"][1],
"lon": row["Location"]["coordinates"][0],
"phone": row["ExtraData"]["Phone"],
}
def parse_hours(hours_str):
parsed_hours = {}
days_hours = hours_str.split(", ")
for day_hour in days_hours:
day, time = day_hour.split(": ")
parsed_hours[day] = time
return parsed_hours
hours = parse_hours(row["ExtraData"]["Hours of operations"])
properties["hours"] = hours
return properties |
class TicTacToe {
constructor() {
this.board = [
[null, null, null],
[null, null, null],
[null, null, null],
];
}
getBoard() {
return this.board;
}
placeMark(x, y, player) {
if (this.board[x][y] === null) {
this.board[x][y] = player;
return true;
} else {
return false;
}
}
checkWinner() {
let winner = null;
const board = this.board;
// Check rows
for (let i = 0; i < 3; i++) {
if (board[i][0] !== null && board[i][0] === board[i][1] && board[i][1] === board[i][2]) {
winner = board[i][0];
break;
}
}
// Check columns
for (let i = 0; i < 3; i++) {
if (board[0][i] !== null && board[0][i] === board[1][i] && board[1][i] === board[2][i]) {
winner = board[0][i];
break;
}
}
// Check diagonals
if (board[0][0] !== null && board[0][0] === board[1][1] && board[1][1] === board[2][2]) {
winner = board[0][0];
}
if (board[0][2] !== null && board[0][2] === board[1][1] && board[1][1] === board[2][0]) {
winner = board[0][2];
}
return winner;
}
} |
/*
* overload.sql
* Chapter 8, Oracle10g PL/SQL Programming
* by <NAME>, <NAME>, and <NAME>
*
* This version of InventoryOps demonstrates an overloaded procedure,
* StatusList.
*/
CREATE OR REPLACE PACKAGE InventoryOps AS
-- Modifies the inventory data for the specified book.
PROCEDURE UpdateISBN(p_ISBN IN inventory.isbn%TYPE,
p_Status IN inventory.status%TYPE,
p_StatusDate IN inventory.status_date%TYPE,
p_Amount IN inventory.amount%TYPE);
-- Deletes the inventory data for the specified book.
PROCEDURE DeleteISBN(p_ISBN IN inventory.isbn%TYPE);
-- Exception raised by UpdateISBN or DeleteISBN when the specified
-- ISBN is not in the inventory table.
e_ISBNNotFound EXCEPTION;
TYPE t_ISBNTable IS TABLE OF inventory.isbn%TYPE
INDEX BY BINARY_INTEGER;
-- Returns an array containing the books with the specified status.
PROCEDURE StatusList(p_Status IN inventory.status%TYPE,
p_Books OUT t_ISBNTable,
p_NumBooks OUT BINARY_INTEGER);
TYPE c_ISBNCur IS REF CURSOR;
-- Returns an opened cursor containing the books with the specified
-- status.
PROCEDURE StatusList(p_Status IN inventory.status%TYPE,
p_BookCur OUT c_ISBNCur);
END InventoryOps;
/
show errors
CREATE OR REPLACE PACKAGE BODY InventoryOps AS
-- Validates the supplied status and raises an error if it is
-- not IN STOCK, BACKORDERED, or FUTURE.
PROCEDURE ValidateStatus(p_Status IN inventory.status%TYPE) IS
BEGIN
IF p_Status = 'IN STOCK' OR
p_Status = 'BACKORDERED' OR
p_Status = 'FUTURE' THEN
RETURN; -- No error
ELSE
RAISE_APPLICATION_ERROR(20000,
'Supplied status ' || p_Status || ' is not valid');
END IF;
END ValidateStatus;
-- Modifies the inventory data for the specified book.
PROCEDURE UpdateISBN(p_ISBN IN inventory.isbn%TYPE,
p_Status IN inventory.status%TYPE,
p_StatusDate IN inventory.status_date%TYPE,
p_Amount IN inventory.amount%TYPE) IS
BEGIN
ValidateStatus(p_Status);
UPDATE inventory
SET status = p_Status, status_date = p_StatusDate, amount = p_Amount
WHERE isbn = p_ISBN;
-- Check for no books updated, and raise the exception.
IF SQL%ROWCOUNT = 0 THEN
RAISE e_ISBNNotFound;
END IF;
END UpdateISBN;
-- Deletes the inventory data for the specified book.
PROCEDURE DeleteISBN(p_ISBN IN inventory.isbn%TYPE) IS
BEGIN
DELETE FROM inventory
WHERE isbn = p_ISBN;
-- Check for no books deleted, and raise the exception.
IF SQL%ROWCOUNT = 0 THEN
RAISE e_ISBNNotFound;
END IF;
END DeleteISBN;
-- Returns an array containing the books with the specified status.
PROCEDURE StatusList(p_Status IN inventory.status%TYPE,
p_Books OUT t_ISBNTable,
p_NumBooks OUT BINARY_INTEGER) IS
v_ISBN inventory.isbn%TYPE;
CURSOR c_Books IS
SELECT isbn
FROM inventory
WHERE status = p_Status;
BEGIN
ValidateStatus(p_Status);
/* p_NumBooks will be the array index. It will start at
* 0, and be incremented each time through the fetch loop.
* At the end of the loop, it will have the number of rows
* fetched, and therefore the number of rows returned in
* p_Books. */
p_NumBooks := 0;
OPEN c_Books;
LOOP
FETCH c_Books INTO v_ISBN;
EXIT WHEN c_Books%NOTFOUND;
p_NumBooks := p_NumBooks + 1;
p_Books(p_NumBooks) := v_ISBN;
END LOOP;
CLOSE c_Books;
END StatusList;
-- Returns an opened cursor containing the books with the specified
-- status.
PROCEDURE StatusList(p_Status IN inventory.status%TYPE,
p_BookCur OUT c_ISBNCur) IS
BEGIN
ValidateStatus(p_Status);
OPEN p_BookCur FOR
SELECT isbn
FROM inventory
WHERE status = p_Status;
END StatusList;
END InventoryOps;
/
show errors
set serveroutput on
DECLARE
v_BooksInStock InventoryOps.t_ISBNTable;
v_NumBooks BINARY_INTEGER;
v_BookCur InventoryOps.c_ISBNCur;
v_ISBN inventory.isbn%TYPE;
BEGIN
DBMS_OUTPUT.PUT_LINE('First version of StatusList:');
-- Fill the PL/SQL table with the ISBNs of the books which
-- are backordered.
InventoryOps.StatusList('BACKORDERED', v_BooksInStock, v_NumBooks);
-- And print them out.
FOR v_LoopCounter IN 1..v_NumBooks LOOP
DBMS_OUTPUT.PUT_LINE(' ISBN ' || v_BooksInStock(v_LoopCounter) ||
' is backordered');
END LOOP;
DBMS_OUTPUT.PUT_LINE('Second version of StatusList:');
-- Get an opened cursor with the ISBNs of the books which are
-- backordered.
InventoryOps.StatusList('BACKORDERED', v_BookCur);
-- And print them out.
LOOP
FETCH v_BookCur INTO v_ISBN;
EXIT WHEN v_BookCur%NOTFOUND;
DBMS_OUTPUT.PUT_LINE(' ISBN ' || v_ISBN || ' is backordered');
END LOOP;
CLOSE v_BookCur;
END;
/
|
<gh_stars>1-10
from setuptools import setup
version = open('VERSION').read().strip()
setup(name='r12',
version=version,
description='Low-level interface for ST Robotics R12 robotic arm.',
url='https://github.com/adamheins/r12',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['r12'],
install_requires=[
'colorama',
'pyusb',
'pyserial>=3',
],
scripts=['r12/r12-shell'],
include_package_data=True,
zip_safe=False,
)
|
/*
* Copyright (c) 2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __UVISOR_API_UVISOR_SPINLOCK_H__
#define __UVISOR_API_UVISOR_SPINLOCK_H__
#include "api/inc/uvisor_exports.h"
#include <stdbool.h>
typedef struct {
bool acquired;
} UvisorSpinlock;
/* This function is safe to call from interrupt context. */
UVISOR_EXTERN void uvisor_spin_init(UvisorSpinlock * spinlock);
/* Attempt to spin lock once. Return true if the lock was obtained, false if
* otherwise. This function is safe to call from interrupt context. */
UVISOR_EXTERN bool uvisor_spin_trylock(UvisorSpinlock * spinlock);
/* Spin in a tight loop until the lock is obtained. This function is safe to
* call from interrupt context, but probably not wise. */
UVISOR_EXTERN void uvisor_spin_lock(UvisorSpinlock * spinlock);
/* Unlock the spin lock. This function is safe to call from interrupt context.
* */
UVISOR_EXTERN void uvisor_spin_unlock(UvisorSpinlock * spinlock);
#endif /* __UVISOR_API_UVISOR_SPINLOCK_H__ */
|
package view
import (
"github.com/ungerik/go-start/errs"
)
// IndirectURL encapsulates pointers to URL implementations.
// To break circular dependencies, addresses of URL implementing variables
// can be passed to this function that encapsulates it with an URL
// implementation that dereferences the pointers at runtime.
func IndirectURL(urlPtr interface{}) URL {
switch s := urlPtr.(type) {
case **Page:
return &indirectPageURL{s}
case *ViewWithURL:
return IndirectViewWithURL(s)
case *URL:
return &indirectURL{s}
}
panic(errs.Format("%T not a pointer to a view.URL", urlPtr))
}
type indirectURL struct {
url *URL
}
func (self *indirectURL) URL(ctx *Context) string {
return (*self.url).URL(ctx)
}
type indirectPageURL struct {
page **Page
}
func (self *indirectPageURL) URL(ctx *Context) string {
return self.page.URL(ctx)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.