text stringlengths 1 1.05M |
|---|
package com.leetcode;
import org.testng.annotations.Test;
public class Solution_160Test {
@Test
public void testGetIntersectionNode() {
ListNode listNode1 = new ListNode(2);
ListNode listNode2 = new ListNode(6);
ListNode listNode3 = new ListNode(4);
ListNode listNode4 = new ListNode(1);
ListNode listNode5 = new ListNode(5);
listNode1.next = listNode2;
listNode2.next = listNode3;
listNode4.next = listNode5;
Solution_160 solution_160 = new Solution_160();
solution_160.getIntersectionNode(listNode1, listNode4);
}
@Test
public void testTestGetIntersectionNode() {
}
} |
#!/bin/bash
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a script that runs on npm install postinstall phase.
# It contains all prerequisites required to use the build system.
./node_modules/.bin/bower install --allow-root
# Godep is required by the project. Install it in the tools directory.
GOPATH=`pwd`/.tools/go go get github.com/tools/godep
|
#!/bin/bash
#使用deepin-screenshot
# deepin-screenshot -s ~/Pictures
flameshot gui
|
#!/bin/bash
if [ $USER == 'root' ]
then
echo ""
echo "********************************************"
echo "* Run this as your user, NOT root or sudo! *"
echo "********************************************"
else
echo ""
echo "************************************"
echo "* Installing Nginx, PHP, and MySql *"
echo "************************************"
sudo apt install nginx php-fpm mysql-server php-mysql php-mbstring php-xml php-gd php-curl
sudo systemctl start nginx
sudo systemctl enable nginx
sudo systemctl start mysql
sudo systemctl enable mysql
echo ""
echo ""
echo "***********************"
echo "* Setting Permissions *"
echo "***********************"
sudo chgrp -R www-data /var/www/html
sudo find /var/www/html -type d -exec chmod g+rx {} +
sudo find /var/www/html -type f -exec chmod g+r {} +
sudo chown -R $USER /var/www/html
sudo find /var/www/html -type d -exec chmod u+rwx {} +
sudo find /var/www/html -type f -exec chmod u+rw {} +
sudo find /var/www/html -type d -exec chmod g+s {} +
printf "<?php\n\techo phpinfo();\n?>" > /var/www/html/info.php
sudo mysql -u root -e "update mysql.user set plugin = 'mysql_native_password' where User='root';";
sudo mysql -u root -e "FLUSH PRIVILEGES;";
echo ""
echo ""
echo "*********************************"
echo "* Setting a root MySQL password *"
echo "*********************************"
mysql_secure_installation
echo ""
echo ""
echo "****************************************************************"
echo "* Install Complete! *"
echo "****************************************************************"
echo "* *"
echo "* - Visit 'http://localhost/info.php' to confirm PHP is set up *"
echo "* - Your document root is set to: '/var/www/html/' *"
echo "* - Your apache config is at: '/etc/nginx/' *"
echo "* - Your php config is at: '/etc/php/7.x/' *"
echo "* - Log into mysql with: 'mysql -u root -p' *"
echo "* *"
echo "* - Restart apache: 'sudo systemctl restart nginx' *"
echo "* - Restart mysql: 'sudo systemctl restart mysql' *"
echo "* *"
echo "****************************************************************"
fi
# In /etc/php/7.4/fpm/php.ini set:
# "session.gc_probability = 1"
# In /etc/php/7.4/fpm/pool.d/www.conf, set:
# pm.start_servers = 8
# pm.min_spare_servers = 8
# pm.max_spare_servers = 16
# pm.max_children = 100 |
# -*- encoding: utf-8 -*-
# this is required because of the use of eval interacting badly with require_relative
require 'razor/acceptance/utils'
confine :except, :roles => %w{master dashboard database frictionless}
test_name 'Command - "repos"'
step 'https://testrail.ops.puppetlabs.net/index.php?/cases/view/446'
reset_database
agents.each do |agent|
step "Test empty query results on #{agent}"
text = on(agent, "razor repos").output
assert_match /There are no items for this query./, text
end
razor agents, 'create-repo --name puppet-test-repo --url "http://provisioning.example.com/centos-6.4/x86_64/os/" --task centos' do |agent|
step "Test single query result on #{agent}"
text = on(agent, "razor repos").output
assert_match /puppet-test-repo/, text
end |
import React from 'react'
import {Link} from 'react-router-dom'
export default class Home extends React.Component {
render() {
return (
<div>
<main>
<h2 className="eBudget-header">Welcome to eBudget!</h2>
<h3 className="eBudget-subheader">
Log in or sign up to get started
</h3>
<ul className="center-align" id="login-signup-container">
<Link
to="/login"
className="btn-large light-blue lighten-2"
id="login-signup"
>
Login
</Link>
<Link to="/signup" className="btn-large orange" id="login-signup">
Sign Up
</Link>
</ul>
</main>
</div>
)
}
}
|
const { exec } = require('child_process');
const getPackagePath = require('../utilities/getPackagePath');
const getPackages = require('../utilities/getPackages');
const runExec = (version, packageDir) =>
new Promise((resolve, reject) => {
exec(
`npm version ${version}`,
{
cwd: packageDir,
},
error => {
if (error) {
reject(error);
} else {
resolve();
}
},
);
});
module.exports = ({ packageNames, version }) => {
const packagePaths = getPackages();
const bumpVersion = packageName =>
runExec(version, getPackagePath(packageName));
return Promise.all([
packageNames,
packagePaths,
...packageNames.map(bumpVersion),
]);
};
|
#!/bin/bash
npm stop |
#!/bin/bash
if [ "$npm_package_version" == "" ]; then
echo "Not running in NPM, please run as 'npm run build:publish'"
exit 1
fi
# check we're on the release branch
BRANCH=$(git branch --list release)
if [ "$BRANCH" != "* release" ]; then
echo "Not on release branch, not publishing"
exit 0 # exit 0 so we don't cause npm to show errors
fi
# do a build to make sure we're up to date
if npm run build; then
# and now publish
echo -n "Ready to publish ${npm_package_name}:${npm_package_version}, enter Y to continue: "
read line
if [ "$line" == "Y" ]; then
npm publish --access public
else
echo "Not publising since you entered '${line}'"
fi
else
echo "Build failed, not publishing"
fi |
<filename>bleich.go<gh_stars>0
package main
import (
"fmt"
"os"
"strconv"
"github.com/qrowsxi/modulo"
)
type server struct {
pubkey, privkey, e int64
}
func (s *server) leakyDecrypt(cryptedMessage int64) int {
message := modulo.RSADecrypt(cryptedMessage, s.privkey, s.pubkey)
return int(message & int64(1))
}
type client struct {
pubkey, e int64
}
func (c *client) encrypt(message int64) int64 {
return modulo.RSAEncrypt(message, c.e, c.pubkey)
}
// Filter will filter an []in64 array
func Filter(array []int64, f func(elem int64) bool) []int64 {
var result []int64
for _, elem := range array {
if f(elem) {
result = append(result, elem)
}
}
return result
}
// GuessFromLeaks will guess the password given the leaked bits
func GuessFromLeaks(leaks []int64, pubkey int64) []int64 {
var guess []int64
for i := int64(0); i < pubkey; i++ {
guess = append(guess, i)
}
for i, bit := range leaks {
if bit%2 == 0 {
guess = Filter(guess, func(elem int64) bool {
pow2, _ := modulo.PowerMod(int64(2), int64(i), pubkey)
if ((elem*pow2)%pubkey)%2 == 0 {
return true
}
return false
})
} else {
guess = Filter(guess, func(elem int64) bool {
pow2, _ := modulo.PowerMod(int64(2), int64(i), pubkey)
if ((elem*pow2)%pubkey)%2 != 0 {
return true
}
return false
})
}
fmt.Println(fmt.Sprintf("%d * message mod %d first bit is: %d", (2 << i), pubkey, bit))
fmt.Println("guess is in\n", guess)
}
return guess
}
func UsageError(name string) {
fmt.Println(fmt.Sprintf("usage: %s pubkey e privkey message", name))
os.Exit(1)
}
func main() {
var pubkey int64
var e int64
var privkey int64
var message int64
if len(os.Args) < 2 {
UsageError("bleichenbacher-toy")
}
pubkey, err := strconv.ParseInt(os.Args[1], 10, 64)
if err != nil {
UsageError("bleichenbacher-toy")
}
e, err = strconv.ParseInt(os.Args[2], 10, 64)
if err != nil {
UsageError("bleichenbacher-toy")
}
privkey, err = strconv.ParseInt(os.Args[3], 10, 64)
if err != nil {
UsageError("bleichenbacher-toy")
}
message, err = strconv.ParseInt(os.Args[4], 10, 64)
if err != nil {
UsageError("bleichenbacher-toy")
}
if pubkey >= 256 || e >= 256 || privkey >= 256 || message >= 256 {
fmt.Println("all value should be 1byte value (0,255)")
os.Exit(1)
}
cl := client{pubkey, e}
srv := server{pubkey, privkey, e}
cryptedMessage := cl.encrypt(message)
fmt.Println("crypted message known is: ", cryptedMessage)
var leaks []int64
for i := int64(0); i < 8; i++ {
pow2, _ := modulo.PowerMod(2, i*cl.e, cl.pubkey)
bit := srv.leakyDecrypt(cryptedMessage * pow2 % cl.pubkey)
leaks = append(leaks, int64(bit))
}
fmt.Println("leaks: ", leaks)
guess := GuessFromLeaks(leaks, cl.pubkey)
fmt.Println("message is: ", message)
fmt.Println("guess is: ", guess)
}
|
import { checkAuth, getGuildById, updateGuildById } from "../../../../utils/functions";
export default async function handler(req, res) {
const { method, query } = req;
try {
await checkAuth(req);
} catch (e) {
return res.json({ status: "error", error: e });
}
const guild = await getGuildById(query.id);
const lang = await req.bot.getGuildLang(query.id);
switch (method) {
case "POST": {
const body = JSON.parse(req.body);
if (!body.name || !body.price) {
return res.json({
error: "Please fill in all fields",
status: "error",
});
}
const isNumber = /^\d+$/;
const price = Number(body.price);
const name = body.name.toLowerCase();
if (!isNumber.test(price)) {
return res.status(400).json({
error: lang.ECONOMY.MUST_BE_NUMBER,
status: "error",
});
}
if (guild.custom_commands?.find((x) => x.name === name))
return res.status(400).json({
error: lang.ECONOMY.ALREADY_EXISTS.replace("{item}", name),
status: "error",
});
await updateGuildById(query.id, {
store: [...guild.store, { name: name, price: price }],
});
return res.json({ status: "success" });
}
case "DELETE": {
const filtered = guild.store?.filter(
(item) => item.name.toLowerCase() !== query.name.toLowerCase()
);
await updateGuildById(query.id, { store: filtered });
return res.json({
status: "success",
message: lang.ECONOMY.REMOVED_FROM_STORE.replace("{item}", query.name),
});
}
default: {
return res.status(405).json({ error: "Method not allowed", status: "error" });
}
}
}
|
<filename>src/app/common/constants.js<gh_stars>0
export const LOCALE_STRING = 'en-GB';
export const CARD_MASK_SETTINGS = {
START_INDEX: 2,
FINISH_INDEX_TO_END: 4,
};
export const MASK_SYMBOL = '*';
export const USER_PREFIX_BY_GENDER = {
Male: 'Mr.',
Female: 'Ms.',
};
export const FEMALE_GENDER = 'Female';
export const CLASS_NAMES = {
TABLE: {
MAIN: 'main-table',
BODY: 'main-table-body',
HEAD: 'main-table-head',
},
SEARCH_INPUT: 'search-input',
USER: {
LINK: 'user-link',
DETAILS: 'user-details',
},
HIDE: 'hide',
CURRENCY_SELECT: 'currency-select',
};
export const DEFAULT_SORT_BY_PROPERTY = 'order';
export const SORTED_LABEL = '↓';
export const MONEY_FLOAT_DIGITS = 2;
export const EMPTY_AMOUNT_TO_PRINT = 'n/a';
export const DEFAULT_CURRENCY = 'USD';
export const DEFAULT_CURRENCY_RATIO = 1;
|
<filename>src/main/java/br/com/alinesolutions/anotaai/model/produto/EstoqueMovimentacao.java
package br.com.alinesolutions.anotaai.model.produto;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.Any;
import org.hibernate.annotations.AnyMetaDef;
import org.hibernate.annotations.MetaValue;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Where;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import br.com.alinesolutions.anotaai.model.BaseEntity;
/**
* Classe para agrupar todas as movimentacoes de um produto
* @author gleidson
*
*/
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = EstoqueMovimentacao.class)
@NamedQueries({
})
@Entity
@Where(clause = "ativo = true")
@SQLDelete(sql = "update EstoqueMovimentacao set ativo = false where id = ?")
@XmlRootElement
public class EstoqueMovimentacao extends BaseEntity<Long, EstoqueMovimentacao> {
private static final long serialVersionUID = 1L;
@NotNull
@ManyToOne(optional = false)
private Estoque estoque;
@NotNull
@Any(metaColumn = @Column(name = "tipo_movimentacao", length = 16), fetch = FetchType.LAZY)
@AnyMetaDef(
idType = "long", metaType = "string",
metaValues = {
@MetaValue(targetEntity = ItemBalanco.class, value = "BALANCO"),
@MetaValue(targetEntity = ItemEntrada.class, value = "ENTRADA"),
@MetaValue(targetEntity = ItemVenda.class, value = "VENDA"),
@MetaValue(targetEntity = ItemQuebra.class, value = "QUEBRA"),
@MetaValue(targetEntity = ItemDevolucao.class, value = "DEVOLUCAO"),
@MetaValue(targetEntity = ItemEstorno.class, value = "ESTORNO")
}
)
@JoinColumn(name="movimentacao_id")
private IMovimentacao movimentacao;
public Estoque getEstoque() {
return estoque;
}
public void setEstoque(Estoque estoque) {
this.estoque = estoque;
}
public IMovimentacao getMovimentacao() {
return movimentacao;
}
public void setMovimentacao(IMovimentacao movimentacao) {
this.movimentacao = movimentacao;
}
}
|
#!/bin/bash
# IBM_PROLOG_BEGIN_TAG
#
# Copyright 2003,2016 IBM International Business Machines Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IBM_PROLOG_END_TAG
num_nodes=$(ls -d /sys/devices/system/node/node[0-9]* 2>/dev/null | wc -l )
printf "num_nodes=%d\n" $num_nodes
for node in $(ls -d /sys/devices/system/node/node[0-9]* 2>/dev/null | sort -n -t : -k 1.30)
do
#echo "node "$node
if [ -d $node ]; then
node_num=${node##*/node/node}
#cpus_in_node=$(ls -d $node/cpu[0-9]* 2>/dev/null | wc -l )
cpus_in_node=$(find $node/ -name cpu[0-9]* | awk ' { printf("grep -H 1 %s/online\n",$1) } ' | sh | wc -l )
printf "node=%d,cpus_in_node=%d,cpus" ${node##*/node/node} $cpus_in_node
fi
str_len=$((${#node}+5))
# for cpu in $(ls -d $node/cpu[0-9]* 2>/dev/null | sort -n -t : -k 1.$str_len )
for cpu in $(find $node/ -name cpu[0-9]* | awk ' { printf("grep -H 1 %s/online\n",$1) } ' | sh | cut -d / -f 7 | awk -F 'cpu' ' {print $2} ' )
do
# if [ -d $cpu ]; then
# #echo "cpu "$cpu
# printf ":%d" ${cpu##*cpu}
# fi
echo -n ":$cpu"
done
printf "\n"
done
|
<reponame>FarooqAlaulddin/Autonomous-Book-Solution
package com.api.abs.user;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping(path = "auth/isLoggedIn")
public class userController {
@GetMapping
public boolean isLoggedIn(){
return true;
}
}
|
#!/usr/bin/env bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
CODEGEN_PKG=${CODEGEN_PKG:-$(cd "${SCRIPT_ROOT}"; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)}
CODEGEN_PKG=$SCRIPT_ROOT"/"$CODEGEN_PKG
echo $CODEGEN_PKG
# generate the code with:
# --output-base because this script should also be able to run inside the vendor dir of
# k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir
# instead of the $GOPATH directly. For normal projects this can be dropped.
bash "${CODEGEN_PKG}"/generate-groups.sh "deepcopy,client,informer,lister" \
../pkg/generated ../pkg/apis \
samplecontroller:v1alpha1 \
--output-base "../pkg" \
--go-header-file "./boilerplate.go.txt"
# To use your own boilerplate text append:
# --go-header-file "${SCRIPT_ROOT}"/hack/custom-boilerplate.go.txt
|
#!/bin/zsh
if [[ ! -a ./build-filebeat/filebeat ]]; then
./compile-filebeat-arm.sh
fi
VCS_REF=$(git rev-parse --short HEAD)
VCS_VERSION=$(git describe)
docker buildx build --platform linux/arm/v7 --build-arg VCS_REF=$VCS_REF --build-arg VCS_VERSION=$VCS_VERSION -f Dockerfile -t radicand/docker-mailserver:`git rev-parse --verify --short HEAD` -t radicand/docker-mailserver:latest .
docker push radicand/docker-mailserver:`git rev-parse --verify --short HEAD`
docker push radicand/docker-mailserver:latest
|
<filename>packages/browser-repl/src/components/editor.tsx
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import AceEditor from 'react-ace';
import { Autocompleter } from '@mongosh/browser-runtime-core';
import { AceAutocompleterAdapter } from './ace-autocompleter-adapter';
import 'brace/ext/language_tools';
import 'brace/mode/javascript';
import './ace-theme';
import ace from 'brace';
const tools = ace.acequire('ace/ext/language_tools');
const noop = (): void => {
//
};
interface EditorProps {
onEnter?(): void | Promise<void>;
onArrowUpOnFirstLine?(): void | Promise<void>;
onArrowDownOnLastLine?(): void | Promise<void>;
onChange?(value: string): void | Promise<void>;
autocompleter?: Autocompleter;
value?: string;
}
export class Editor extends Component<EditorProps> {
static propTypes = {
onEnter: PropTypes.func,
onArrowUpOnFirstLine: PropTypes.func,
onArrowDownOnLastLine: PropTypes.func,
onChange: PropTypes.func,
value: PropTypes.string
};
static defaultProps = {
onEnter: noop,
onArrowUpOnFirstLine: noop,
onArrowDownOnLastLine: noop,
onChange: noop,
value: ''
};
private editor: any;
private onEditorLoad = (editor: any): void => {
this.editor = editor;
if (this.props.autocompleter) {
editor.commands.on('afterExec', function(e) {
if (e.command.name === 'insertstring' && /^[\w.]$/.test(e.args)) {
editor.execCommand('startAutocomplete');
}
});
tools.setCompleters([new AceAutocompleterAdapter(this.props.autocompleter)]);
}
};
render(): JSX.Element {
return (<AceEditor
showPrintMargin={false}
showGutter={false}
highlightActiveLine
setOptions={{
enableBasicAutocompletion: !!this.props.autocompleter,
enableLiveAutocompletion: !!this.props.autocompleter,
enableSnippets: false,
showLineNumbers: false,
tabSize: 2
}}
name={`mongosh-ace-${Date.now()}`}
mode="javascript"
theme="mongosh"
onChange={this.props.onChange}
onLoad={this.onEditorLoad}
commands={[
{
name: 'return',
bindKey: { win: 'Return', mac: 'Return' },
exec: (): void => {
this.props.onEnter();
}
},
{
name: 'arrowUpOnFirstLine',
bindKey: { win: 'Up', mac: 'Up' },
exec: (): void => {
const selectionRange = this.editor.getSelectionRange();
if (!selectionRange.isEmpty() || selectionRange.start.row !== 0) {
return this.editor.selection.moveCursorUp();
}
this.props.onArrowUpOnFirstLine();
}
},
{
name: 'arrowDownOnLastLine',
bindKey: { win: 'Down', mac: 'Down' },
exec: (): void => {
const selectionRange = this.editor.getSelectionRange();
const lastRowIndex = this.editor.session.getLength() - 1;
if (!selectionRange.isEmpty() || selectionRange.start.row !== lastRowIndex) {
return this.editor.selection.moveCursorDown();
}
this.props.onArrowDownOnLastLine();
}
}
]}
width="100%"
maxLines={Infinity}
editorProps={{
$blockScrolling: Infinity
}}
value={this.props.value}
/>);
}
}
|
<reponame>JoeQuattrone/LB-Client-Travis<filename>src/components/SongRow.test.js<gh_stars>0
import React from 'react'
import Enzyme, { render, shallow, mount, instance } from 'enzyme'
import Adapter from 'enzyme-adapter-react-16'
import SongRow from './SongRow'
import SongCard from './SongCard'
import sinon from "sinon";
Enzyme.configure({ adapter: new Adapter() })
describe('<SongRow />', () => {
const songs = [
{track_name: "Lose Yourself", artist_name: "Eminem", genre: 'rap', likes: "5 likes"},
{track_name: "Lose Yourself to Dance", artist_name: "<NAME>", genre: 'EDM', likes: "6 likes"}
]
// can't mount SongRow because of link from songcard
it('maps over songs and renders <SongCard />', () => {
const wrapper = shallow(<SongRow songs={songs} />)
const songCard1 = <SongCard song={songs[0]} key={0}/>
const songCard2 = <SongCard song={songs[1]} key={1}/>
expect(wrapper.props().children[0]).toEqual(songCard1);
expect(wrapper.props().children[1]).toEqual(songCard2);
})
})
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
import six
from decimal import Decimal
from ..config import ERROR
# Module API
def cast_integer(format, value, **options):
if isinstance(value, six.integer_types):
if value is True or value is False:
return ERROR
pass
elif isinstance(value, six.string_types):
if not options.get('bareNumber', _DEFAULT_BARE_NUMBER):
value = re.sub(r'((^\D*)|(\D*$))', '', value)
try:
value = int(value)
except Exception:
return ERROR
elif isinstance(value, float) and value.is_integer():
value = int(value)
elif isinstance(value, Decimal) and value % 1 == 0:
value = int(value)
else:
return ERROR
return value
# Internal
_DEFAULT_BARE_NUMBER = True
|
package com.resms.lightsentinel.common.service;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.utils.CloseableUtils;
import org.apache.curator.x.discovery.ServiceDiscovery;
import org.apache.curator.x.discovery.ServiceDiscoveryBuilder;
import org.apache.curator.x.discovery.ServiceInstance;
import org.apache.curator.x.discovery.details.JsonInstanceSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
/**
* 嵌入应用用于服务实例注册
*
* @author sam
*/
public class LightSentinelServiceRegistry implements Closeable
{
private static final Logger logger = LoggerFactory.getLogger(LightSentinelServiceRegistry.class);
private final ServiceDiscovery<ServiceInstanceDetail> serviceDiscovery;
private final CuratorFramework client;
private final String basePath;
private boolean running = false;
public LightSentinelServiceRegistry(CuratorFramework client, String basePath) throws Exception
{
this.client = client;
this.basePath = basePath;
serviceDiscovery = ServiceDiscoveryBuilder.builder(ServiceInstanceDetail.class)
.client(this.client)
.basePath(this.basePath)
.serializer(new JsonInstanceSerializer<ServiceInstanceDetail>(ServiceInstanceDetail.class))
.build();
}
public void registerService(ServiceInstance<ServiceInstanceDetail> serviceInstance) throws Exception {
if (isRunning()) {
serviceDiscovery.registerService(serviceInstance);
} else {
logger.warn("LightSentinelServiceRegistry is not running");
}
}
public void unregisterService(ServiceInstance<ServiceInstanceDetail> serviceInstance) throws Exception {
if (isRunning()) {
serviceDiscovery.unregisterService(serviceInstance);
} else {
logger.warn("LightSentinelServiceRegistry is not running");
}
}
public void updateService(ServiceInstance<ServiceInstanceDetail> serviceInstance) throws Exception {
if (isRunning()) {
serviceDiscovery.updateService(serviceInstance);
} else {
logger.warn("LightSentinelServiceRegistry is not running");
}
}
public Collection<ServiceInstance<ServiceInstanceDetail>> queryForServiceInstances(String name) throws Exception {
if (isRunning()) {
return serviceDiscovery.queryForInstances(name);
} else {
logger.warn("LightSentinelServiceRegistry is not running");
}
return null;
}
public ServiceInstance<ServiceInstanceDetail> queryForVm(String name, String id) throws Exception {
if (isRunning()) {
return serviceDiscovery.queryForInstance(name, id);
} else {
logger.warn("LightSentinelServiceRegistry is not running");
}
return null;
}
public void start() throws Exception
{
serviceDiscovery.start();
running = true;
}
@Override
public void close() throws IOException
{
running = false;
CloseableUtils.closeQuietly(serviceDiscovery);
}
public boolean isRunning() {
return running;
}
} |
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.commons.util;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.google.common.collect.ImmutableList;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.collection.CollectionReaderDescription;
import org.apache.uima.fit.component.NoOpAnnotator;
import org.apache.uima.fit.factory.AnalysisEngineFactory;
import org.apache.uima.fit.factory.TypeSystemDescriptionFactory;
import org.apache.uima.fit.pipeline.JCasIterable;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.tcas.Annotation;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.resource.metadata.TypeSystemDescription;
import com.textocat.textokit.commons.cpe.XmiCollectionReader;
import java.io.File;
import java.util.List;
/**
* @author <NAME>
*/
public class PrintAnnotations {
@Parameter(names = "-t")
private List<String> typeSystemNames;
@Parameter(names = "-i", required = true)
private File corpusDir;
@Parameter(names = "--annotation-type", required = true)
private String annotationTypeName;
private PrintAnnotations() {
}
public static void main(String[] args) throws Exception {
PrintAnnotations launcher = new PrintAnnotations();
new JCommander(launcher, args);
launcher.run();
}
private static AnalysisEngineDescription getNoOpAEDesc() throws ResourceInitializationException {
return AnalysisEngineFactory.createEngineDescription(NoOpAnnotator.class);
}
public void run() throws ResourceInitializationException, ClassNotFoundException {
@SuppressWarnings("unchecked")
Class<? extends Annotation> annotationClass = (Class<? extends Annotation>) Class.forName(annotationTypeName);
if (typeSystemNames == null) {
typeSystemNames = ImmutableList.of();
}
TypeSystemDescription inputTSD = TypeSystemDescriptionFactory.createTypeSystemDescription(
typeSystemNames.toArray(new String[typeSystemNames.size()]));
CollectionReaderDescription colReaderDesc = XmiCollectionReader.createDescription(corpusDir, inputTSD);
JCasIterable corpus = new JCasIterable(colReaderDesc, getNoOpAEDesc());
for (JCas doc : corpus) {
for (Annotation anno : JCasUtil.select(doc, annotationClass)) {
System.out.println(anno.getCoveredText());
}
}
}
}
|
/*
Unit tests for the MixClient API
- *These tests use the transpiled files in the /dist folder*
- You will need to run 'npm run prepublish' to transpile the files in the /src folder
if you make any changes.
*/
var chai = require('chai'),
expect = chai.expect,
Web3 = require('web3-mock'); // Use web3-mock for testing
import MixClient from '../src/MixClient';
describe('Mix API',
function(){
var mixClient = null,
web3 = new Web3('https://localhost:8545');
it('Should connect to a blockchain',
function(){
mixClient = new MixClient(null, web3);
expect(mixClient.isConnected()).to.equal(true);
}
);
it('Should have retrieved the network stats',
function(done){
let stats = null;
mixClient.getSystemStats().then(
function(stats){
expect(stats.state).to.equal('synchronised');
expect(stats.gasPrice).to.equal(10);
expect(stats.latestBlocks.length).to.equal(10);
done();
}
);
}
);
it('Should retrieve a transaction',
function(done){
var transaction = '0xc977a829b78f0a7c039441465202fff990687f10e3dbef0987ed5ded9bc511f7';
mixClient.getTransaction(transaction).then(
function(transaction){
expect(transaction.to).to.equal('0xfa3caabc8eefec2b5e2895e5afbf79379e7268a7');
done();
}
)
}
);
it('Should retrieve a block',
function(done){
mixClient.getBlock(2742).then(
function(block){
expect(block.number).to.equal(2742);
done();
},
function(error){
console.error(error);
done();
}
)
}
);
it('Should retrieve the latest blocks',
function(done){
mixClient.getBlocks().then(
function(blocks){
expect(blocks.length).to.equal(10);
done();
}
)
}
)
}
);
|
const { config } = require("dotenv");
const { getInfo, getInfoFromPullRequest } = require("@changesets/get-github-info");
config();
module.exports = {
getDependencyReleaseLine: async (
) => {
return ""
},
getReleaseLine: async (changeset, type, options) => {
if (!options || !options.repo) {
throw new Error(
'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
);
}
let prFromSummary;
let commitFromSummary;
let usersFromSummary = [];
const replacedChangelog = changeset.summary
.replace(/^\s*(?:pr|pull|pull\s+request):\s*#?(\d+)/im, (_, pr) => {
let num = Number(pr);
if (!isNaN(num)) prFromSummary = num;
return "";
})
.replace(/^\s*commit:\s*([^\s]+)/im, (_, commit) => {
commitFromSummary = commit;
return "";
})
.replace(/^\s*(?:author|user):\s*@?([^\s]+)/gim, (_, user) => {
usersFromSummary.push(user);
return "";
})
.trim();
const [firstLine, ...futureLines] = replacedChangelog
.split("\n")
.map(l => l.trimRight());
const links = await (async () => {
if (prFromSummary !== undefined) {
let { links } = await getInfoFromPullRequest({
repo: options.repo,
pull: prFromSummary
});
if (commitFromSummary) {
links = {
...links,
commit: `[\`${commitFromSummary}\`](https://github.com/${options.repo}/commit/${commitFromSummary})`
};
}
return links;
}
const commitToFetchFrom = commitFromSummary || changeset.commit;
if (commitToFetchFrom) {
let { links } = await getInfo({
repo: options.repo,
commit: commitToFetchFrom
});
return links;
}
return {
commit: null,
pull: null,
user: null
};
})();
const users = usersFromSummary.length
? usersFromSummary
.map(
userFromSummary =>
`[@${userFromSummary}](https://github.com/${userFromSummary})`
)
.join(", ")
: links.user;
const pull = links.pull === null ? "" : ` ${links.pull}`
const commit = !!pull || links.commit === null ? "" : ` ${links.commit}`
const prefix = [
pull,
commit,
users === null ? "" : ` by ${users}`
].join("");
let lines = `${firstLine}\n${futureLines
.map(l => ` ${l}`)
.join("\n")}`;
if (firstLine[0] === '-') {
lines = `\n ${firstLine}\n${futureLines
.map(l => ` ${l}`)
.join("\n")}`;
}
return `\n\n-${prefix ? `${prefix} –` : ""} ${lines}`;
}
};
|
/* Copyright 2020 Freerware
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package header
import (
"errors"
"fmt"
"strings"
)
var (
// ErrEmptyTCNValue is an error that indicates that the TCN value cannot
// be empty.
ErrEmptyTCNValue = errors.New("TCN value cannot be empty")
)
// ResponseType represents the type of transparent negotiation response type.
type ResponseType string
const (
// ResponseTypeList indicates the response to the transparent negotiation
// request contains a list of the available representations.
ResponseTypeList ResponseType = "list"
// ResponseTypeChoice indicates the response to the transparent negotiation
// request contains a chosen representation using a server-side algorithm.
ResponseTypeChoice ResponseType = "choice"
// ResponseTypeAdhoc indicates the response to the transparent negotiation
// request is acting in the interest of achieving compatibility with a
// non-negotiation or buggy client.
ResponseTypeAdhoc ResponseType = "adhoc"
)
// String provides the textual representation of the response type.
func (rt ResponseType) String() string {
return string(rt)
}
// OverrideDirective represents a server-side override performed when producting
// a response during transparent negotiation.
type OverrideDirective string
const (
// OverrideDirectiveReChoose indicates to the user agent it SHOULD use its
// internal variant selection algorithm to choose, retrieve, and display
// the best variant from this list.
OverrideDirectiveReChoose OverrideDirective = "re-choose"
// OverrideDirectiveKeep indicates to the user agent it should not renegotiation
// on the response to the transparent negotiation request and use it directly.
OverrideDirectiveKeep OverrideDirective = "keep"
)
// String provides the textual representation of the override directive.
func (od OverrideDirective) String() string {
return string(od)
}
// TCNValue represents a value specified within the TCN header.
type TCNValue string
// NewTCNValue constructs a new value for the TCN header.
func NewTCNValue(value string) (TCNValue, error) {
if len(value) == 0 {
return TCNValue(""), ErrEmptyTCNValue
}
return TCNValue(value), nil
}
// IsExtension indicates if the TCN value is an extension.
func (v TCNValue) IsExtension() bool {
override := map[OverrideDirective]bool{
OverrideDirectiveReChoose: true,
OverrideDirectiveKeep: true,
}
responseType := map[ResponseType]bool{
ResponseTypeList: true,
ResponseTypeChoice: true,
ResponseTypeAdhoc: true,
}
return !override[OverrideDirective(v)] && !responseType[ResponseType(v)]
}
// String provides the textual representation of the TCN value.
func (v TCNValue) String() string {
return string(v)
}
var (
// headerTCN is the header key for the TCN header.
headerTCN = "TCN"
// EmptyTCN is an empty TCN header.
EmptyTCN = TCN([]TCNValue{})
)
// TCN represents the TCN header.
type TCN []TCNValue
// NewTCN constructs a new TCN header with the value provided.
func NewTCN(values []string) (TCN, error) {
if len(values) == 0 {
return EmptyTCN, nil
}
var vals []TCNValue
for _, value := range values {
val, err := NewTCNValue(value)
if err != nil {
return EmptyTCN, err
}
vals = append(vals, val)
}
return TCN(vals), nil
}
// String provides the textual representation of the TCN header value.
func (t TCN) String() string {
return fmt.Sprintf("%s: %s", headerTCN, t.ValuesAsString())
}
// ValuesAsStrings provides the string representation for each value of
// for the TCN header.
func (t TCN) ValuesAsStrings() []string {
var s []string
for _, v := range t {
s = append(s, v.String())
}
return s
}
// ValuesAsString provides a single string containing all of the values for
// the TCN header.
func (t TCN) ValuesAsString() string {
return strings.Join(t.ValuesAsStrings(), ",")
}
|
package gov.cms.bfd.pipeline.rda.grpc.server;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Files;
import gov.cms.mpsm.rda.v1.fiss.FissClaim;
import java.io.BufferedWriter;
import java.io.File;
import java.util.NoSuchElementException;
import org.junit.jupiter.api.Test;
public class JsonMessageSourceTest {
private static final String CLAIM_1 =
"{"
+ " \"dcn\": \"63843470\","
+ " \"hicNo\": \"916689703543\","
+ " \"currStatusEnum\": \"CLAIM_STATUS_PAID\","
+ " \"currLoc1Enum\": \"PROCESSING_TYPE_MANUAL\","
+ " \"currLoc2Unrecognized\": \"uma\","
+ " \"totalChargeAmount\": \"3.75\","
+ " \"currTranDtCymd\": \"2021-03-20\","
+ " \"principleDiag\": \"uec\","
+ " \"mbi\": \"c1ihk7q0g3i57\","
+ " \"fissProcCodes\": ["
+ " {"
+ " \"procCd\": \"uec\","
+ " \"procFlag\": \"nli\""
+ " },"
+ " {"
+ " \"procCd\": \"egkkkw\","
+ " \"procFlag\": \"hsw\","
+ " \"procDt\": \"2021-02-03\""
+ " },"
+ " {"
+ " \"procCd\": \"zhaj\","
+ " \"procDt\": \"2021-01-07\""
+ " },"
+ " {"
+ " \"procCd\": \"ods\","
+ " \"procDt\": \"2021-01-03\""
+ " }"
+ " ],"
+ " \"medaProvId\": \"oducjgzt67joc\""
+ "}";
private static final String CLAIM_2 =
"{"
+ " \"dcn\": \"2643602\","
+ " \"hicNo\": \"640930211775\","
+ " \"currStatusEnum\": \"CLAIM_STATUS_REJECT\","
+ " \"currLoc1Enum\": \"PROCESSING_TYPE_OFFLINE\","
+ " \"currLoc2Unrecognized\": \"p6s\","
+ " \"totalChargeAmount\": \"55.91\","
+ " \"recdDtCymd\": \"2021-05-14\","
+ " \"currTranDtCymd\": \"2020-12-21\","
+ " \"principleDiag\": \"egnj\","
+ " \"npiNumber\": \"5764657700\","
+ " \"mbi\": \"0vtc7u321x0se\","
+ " \"fedTaxNb\": \"2845244764\","
+ " \"fissProcCodes\": ["
+ " {"
+ " \"procCd\": \"egnj\","
+ " \"procDt\": \"2021-05-13\""
+ " },"
+ " {"
+ " \"procCd\": \"vvqtwoz\","
+ " \"procDt\": \"2021-04-29\""
+ " },"
+ " {"
+ " \"procCd\": \"fipyd\","
+ " \"procFlag\": \"g\""
+ " }"
+ " ]"
+ "}";
@Test
public void singleClaimString() throws Exception {
JsonMessageSource<FissClaim> source =
new JsonMessageSource<>(CLAIM_1, JsonMessageSource::parseFissClaim);
assertTrue(source.hasNext());
FissClaim claim = source.next();
assertEquals("63843470", claim.getDcn());
assertFalse(source.hasNext());
assertNextPastEndOfDataThrowsException(source);
assertMultipleCallsToCloseOk(source);
}
@Test
public void twoClaimsString() throws Exception {
JsonMessageSource<FissClaim> source =
new JsonMessageSource<>(
CLAIM_1 + System.lineSeparator() + CLAIM_2, JsonMessageSource::parseFissClaim);
assertTrue(source.hasNext());
FissClaim claim = source.next();
assertEquals("63843470", claim.getDcn());
assertTrue(source.hasNext());
claim = source.next();
assertEquals("2643602", claim.getDcn());
assertFalse(source.hasNext());
assertNextPastEndOfDataThrowsException(source);
assertMultipleCallsToCloseOk(source);
}
@Test
public void claimsList() throws Exception {
JsonMessageSource<FissClaim> source =
new JsonMessageSource<>(
ImmutableList.of(CLAIM_1, CLAIM_2), JsonMessageSource::parseFissClaim);
assertTrue(source.hasNext());
FissClaim claim = source.next();
assertEquals("63843470", claim.getDcn());
assertTrue(source.hasNext());
claim = source.next();
assertEquals("2643602", claim.getDcn());
assertFalse(source.hasNext());
assertNextPastEndOfDataThrowsException(source);
assertMultipleCallsToCloseOk(source);
}
@Test
public void claimsFile() throws Exception {
final File jsonFile = File.createTempFile(getClass().getSimpleName(), ".jsonl");
try {
try (BufferedWriter writer = Files.newWriter(jsonFile, Charsets.UTF_8)) {
writer.write(CLAIM_1);
writer.write(System.lineSeparator());
writer.write(CLAIM_2);
}
try (JsonMessageSource<FissClaim> source =
new JsonMessageSource<>(jsonFile, JsonMessageSource::parseFissClaim)) {
assertTrue(source.hasNext());
FissClaim claim = source.next();
assertEquals("63843470", claim.getDcn());
assertTrue(source.hasNext());
claim = source.next();
assertEquals("2643602", claim.getDcn());
assertFalse(source.hasNext());
assertNextPastEndOfDataThrowsException(source);
assertMultipleCallsToCloseOk(source);
}
} finally {
jsonFile.delete();
}
}
@Test
public void skip() throws Exception {
MessageSource<FissClaim> source =
new JsonMessageSource<>(
ImmutableList.of(CLAIM_1, CLAIM_2), JsonMessageSource::parseFissClaim)
.skip(1);
assertTrue(source.hasNext());
FissClaim claim = source.next();
assertEquals("2643602", claim.getDcn());
assertFalse(source.hasNext());
}
private void assertNextPastEndOfDataThrowsException(JsonMessageSource<?> source)
throws Exception {
try {
source.next();
fail("expected exception");
} catch (NoSuchElementException ignored) {
// expected
}
// ensures calling hasNext() multiple times past the end is safe
assertFalse(source.hasNext());
assertFalse(source.hasNext());
}
private void assertMultipleCallsToCloseOk(JsonMessageSource<?> source) throws Exception {
source.close();
source.close();
}
}
|
!/bin/bash
# Throughput operations for a MongoDB API database and collection
# Generate a unique 10 character alphanumeric string to ensure unique resource names
uniqueId=$(env LC_CTYPE=C tr -dc 'a-z0-9' < /dev/urandom | fold -w 10 | head -n 1)
# Variables for MongoDB API resources
resourceGroupName="Group-$uniqueId"
location='westus2'
accountName="cosmos-$uniqueId" #needs to be lower case
databaseName='database1'
collectionName='collection1'
originalThroughput=400
updateThroughput=500
# Create a resource group, Cosmos account, database and collection
az group create -n $resourceGroupName -l $location
az cosmosdb create -n $accountName -g $resourceGroupName --kind MongoDB
az cosmosdb mongodb database create -a $accountName -g $resourceGroupName -n $databaseName --throughput $originalThroughput
# Define a minimal index policy for the collection
idxpolicy=$(cat << EOF
[ {"key": {"keys": ["user_id"]}} ]
EOF
)
echo "$idxpolicy" > "idxpolicy-$uniqueId.json"
# Create a MongoDB API collection
az cosmosdb mongodb collection create -a $accountName -g $resourceGroupName -d $databaseName -n $collectionName --shard 'user_id' --throughput $originalThroughput --idx @idxpolicy-$uniqueId.json
# Clean up temporary index policy file
rm -f "idxpolicy-$uniqueId.json"
# Throughput operations for MongoDB API database
# Read the current throughput
# Read the minimum throughput
# Make sure the updated throughput is not less than the minimum
# Update the throughput
read -p 'Press any key to read current provisioned throughput on database'
az cosmosdb mongod database throughput show \
-g $resourceGroupName \
-a $accountName \
-n $databaseName \
--query resource.throughput \
-o tsv
read -p 'Press any key to read minimum throughput on database'
minimumThroughput=$(az cosmosdb mongodb database throughput show \
-g $resourceGroupName \
-a $accountName \
-n $databaseName \
--query resource.minimumThroughput \
-o tsv)
echo $minimumThroughput
# Make sure the updated throughput is not less than the minimum allowed throughput
if [ $updateThroughput -lt $minimumThroughput ]; then
updateThroughput=$minimumThroughput
fi
read -p 'Press any key to update Database throughput'
az cosmosdb mongodb database throughput update \
-a $accountName \
-g $resourceGroupName \
-n $databaseName \
--throughput $updateThroughput
# Throughput operations for MongoDB API collection
# Read the current throughput
# Read the minimum throughput
# Make sure the updated throughput is not less than the minimum
# Update the throughput
read -p 'Press any key to read current provisioned throughput on collection'
az cosmosdb mongodb collection throughput show \
-a $accountName \
-g $resourceGroupName \
-d $databaseName \
-n $collectionName \
--query resource.throughput \
-o tsv
read -p 'Press any key to read minimum throughput on collection'
minimumThroughput=$(az cosmosdb mongodb collection throughput show \
-a $accountName \
-g $resourceGroupName \
-d $databaseName \
-n $collectionName \
--query resource.minimumThroughput \
-o tsv)
echo $minimumThroughput
# Make sure the updated throughput is not less than the minimum allowed throughput
if [ $updateThroughput -lt $minimumThroughput ]; then
updateThroughput=$minimumThroughput
fi
read -p 'Press any key to update collection throughput'
az cosmosdb mongodb collection throughput update \
-a $accountName \
-g $resourceGroupName \
-d $databaseName \
-n $collectionName \
--throughput $updateThroughput
|
#!/bin/sh
MODALITY=M #RGB:M, Depth:K
GPU_IDS=0,1,2,3
FRAME=32
python -u train_AutoGesture_3DCDC.py -m valid -t $MODALITY -g $GPU_IDS | tee ./log/model-$MODALITY-$FRAME.log
|
def reverseStack(s):
# If stack is empty then
# return
if (isEmpty(s)):
return
# Hold all items in Function
# Call Stack until we
# reach end of the stack
temp = top(s)
pop(s)
reverseStack(s)
# Insert all the items held
# in Function Call Stack
# one by one from the bottom
# to top. Every item is
# inserted at the bottom
insertAtBottom(s, temp)
def insertAtBottom(s, data):
# If stack is empty then
# insert new item
if(isEmpty(s)):
push(s, data)
else:
# All items are held in
# Function Call Stack until
# we reach end of the stack
# when the stack becomes empty
# insert the item at the bottom
temp = top(s)
pop(s)
insertAtBottom(s, data)
# Push all the items held
# in Function Call Stack
# once the item is inserted
# at the bottom
push(s, temp) |
<gh_stars>10-100
/********************************************************************************************************
* @file gpio_8267.h
*
* @brief for TLSR chips
*
* @author BLE Group
* @date May. 12, 2018
*
* @par Copyright (c) Telink Semiconductor (Shanghai) Co., Ltd.
* All rights reserved.
*
* The information contained herein is confidential and proprietary property of Telink
* Semiconductor (Shanghai) Co., Ltd. and is available under the terms
* of Commercial License Agreement between Telink Semiconductor (Shanghai)
* Co., Ltd. and the licensee in separate contract or the terms described here-in.
* This heading MUST NOT be removed from this file.
*
* Licensees are granted free, non-transferable use of the information in this
* file under Mutual Non-Disclosure Agreement. NO WARRENTY of ANY KIND is provided.
*
*******************************************************************************************************/
#pragma once
#include "../common/types.h"
#include "../common/bit.h"
#include "../common/utility.h"
#include "../mcu/compiler.h"
#include "../mcu/register.h"
#if(__TL_LIB_8269__ || MCU_CORE_TYPE == MCU_CORE_8269)
#include "gpio_default_8269.h"
#else
#include "gpio_default_8267.h"
#endif
enum{
GPIO_PA0 = 0x000 | BIT(0), GPIO_PWM0A0=GPIO_PA0, GPIO_DMIC_DI=GPIO_PA0,
GPIO_PA1 = 0x000 | BIT(1), GPIO_DMIC_CK=GPIO_PA1,
GPIO_PA2 = 0x000 | BIT(2), GPIO_PWM0NA2=GPIO_PA2, GPIO_DO=GPIO_PA2,
GPIO_PA3 = 0x000 | BIT(3), GPIO_PWM1A3=GPIO_PA3, GPIO_DI=GPIO_PA3,
GPIO_PA4 = 0x000 | BIT(4), GPIO_PWM1NA4=GPIO_PA4, GPIO_CK=GPIO_PA4,
GPIO_PA5 = 0x000 | BIT(5), GPIO_PWM2NA5=GPIO_PA5, GPIO_CN=GPIO_PA5,
GPIO_PA6 = 0x000 | BIT(6), GPIO_UTXA6=GPIO_PA6,
GPIO_PA7 = 0x000 | BIT(7), GPIO_URXA7=GPIO_PA7, GPIO_SWM=GPIO_PA7,
GPIO_PB0 = 0x100 | BIT(0), GPIO_PWM2B0=GPIO_PB0, GPIO_SWS=GPIO_PB0,
GPIO_PB1 = 0x100 | BIT(1), GPIO_PWM2NB0=GPIO_PB1,
GPIO_PB2 = 0x100 | BIT(2), GPIO_PWM3B2=GPIO_PB2, GPIO_UTXB2=GPIO_PB2,
GPIO_PB3 = 0x100 | BIT(3), GPIO_PWM3NB3=GPIO_PB3, GPIO_URXB3=GPIO_PB3,
GPIO_PB4 = 0x100 | BIT(4), GPIO_PWM4B4=GPIO_PB4,
GPIO_PB5 = 0x100 | BIT(5), GPIO_PWM4NB5=GPIO_PB5,
GPIO_PB6 = 0x100 | BIT(6), GPIO_PWM5B6=GPIO_PB6,
GPIO_PB7 = 0x100 | BIT(7), GPIO_PWM5NB7=GPIO_PB7,
GPIO_PC0 = 0x200 | BIT(0), GPIO_PWM0C0=GPIO_PC0,
GPIO_PC1 = 0x200 | BIT(1), GPIO_PWM1C1=GPIO_PC1,
GPIO_PC2 = 0x200 | BIT(2), GPIO_PWM2C2=GPIO_PC2, GPIO_UTXC2=GPIO_PC2,
GPIO_PC3 = 0x200 | BIT(3), GPIO_PWM3C3=GPIO_PC3, GPIO_URXC3=GPIO_PC3,
GPIO_PC4 = 0x200 | BIT(4), GPIO_PWM4C4=GPIO_PC4, GPIO_URTSC4=GPIO_PC4,
GPIO_PC5 = 0x200 | BIT(5), GPIO_PWM4C5=GPIO_PC5, GPIO_UCTSC5=GPIO_PC5,
GPIO_PC6 = 0x200 | BIT(6),
GPIO_PC7 = 0x200 | BIT(7),
GPIO_PD0 = 0x300 | BIT(0),
GPIO_PD1 = 0x300 | BIT(1),
GPIO_PD2 = 0x300 | BIT(2),
GPIO_PD3 = 0x300 | BIT(3),
GPIO_PD4 = 0x300 | BIT(4),
GPIO_PD5 = 0x300 | BIT(5), GPIO_PWM0D5=GPIO_PD5,
GPIO_PD6 = 0x300 | BIT(6), GPIO_PWM1D6=GPIO_PD6,
GPIO_PD7 = 0x300 | BIT(7), GPIO_PWM2D7=GPIO_PD7,
GPIO_PE0 = 0x400 | BIT(0), GPIO_PWM0E0=GPIO_PE0, GPIO_SDMP=GPIO_PE0,
GPIO_PE1 = 0x400 | BIT(1), GPIO_PWM1E1=GPIO_PE1, GPIO_SDMN=GPIO_PE1,
GPIO_PE2 = 0x400 | BIT(2), GPIO_DM=GPIO_PE2,
GPIO_PE3 = 0x400 | BIT(3), GPIO_DP=GPIO_PE3,
GPIO_PE4 = 0x400 | BIT(4), GPIO_MSDO=GPIO_PE4,
GPIO_PE5 = 0x400 | BIT(5), GPIO_MCLK=GPIO_PE5,
GPIO_PE6 = 0x400 | BIT(6), GPIO_MSCN=GPIO_PE6,
GPIO_PE7 = 0x400 | BIT(7), GPIO_MSDI=GPIO_PE7,
GPIO_PF0 = 0x500 | BIT(0),
GPIO_PF1 = 0x500 | BIT(1),
GPIO_MAX_COUNT = 56,
};
#define reg_gpio_in(i) REG_ADDR8(0x580+((i>>8)<<3))
#define reg_gpio_ie(i) REG_ADDR8(0x581+((i>>8)<<3))
#define reg_gpio_oen(i) REG_ADDR8(0x582+((i>>8)<<3))
#define reg_gpio_out(i) REG_ADDR8(0x583+((i>>8)<<3))
#define reg_gpio_pol(i) REG_ADDR8(0x584+((i>>8)<<3))
#define reg_gpio_ds(i) REG_ADDR8(0x585+((i>>8)<<3))
#define reg_gpio_gpio_func(i) REG_ADDR8(0x586+((i>>8)<<3))
#define reg_gpio_config_func(i) REG_ADDR8(0x5b0 +(i>>8)) //5b0 5b1 5b2 5b3 5b4 5b5
#define reg_gpio_irq_wakeup_en(i) REG_ADDR8(0x587+((i>>8)<<3)) // reg_irq_mask: FLD_IRQ_GPIO_EN
#define reg_gpio_irq_risc0_en(i) REG_ADDR8(0x5b8 + (i >> 8)) // reg_irq_mask: FLD_IRQ_GPIO_RISC0_EN
#define reg_gpio_irq_risc1_en(i) REG_ADDR8(0x5c0 + (i >> 8)) // reg_irq_mask: FLD_IRQ_GPIO_RISC1_EN
#define reg_gpio_irq_risc2_en(i) REG_ADDR8(0x5c8 + (i >> 8)) // reg_irq_mask: FLD_IRQ_GPIO_RISC2_EN
#define reg_gpio_wakeup_irq REG_ADDR8(0x5b5)
enum{
FLD_GPIO_CORE_WAKEUP_EN = BIT(2),
FLD_GPIO_CORE_INTERRUPT_EN = BIT(3),
};
static inline void gpio_core_wakeup_enable_all (int en)
{
if (en) {
BM_SET(reg_gpio_wakeup_irq, FLD_GPIO_CORE_WAKEUP_EN);
}
else {
BM_CLR(reg_gpio_wakeup_irq, FLD_GPIO_CORE_WAKEUP_EN);
}
}
static inline void gpio_core_irq_enable_all (int en)
{
if (en) {
BM_SET(reg_gpio_wakeup_irq, FLD_GPIO_CORE_INTERRUPT_EN);
}
else {
BM_CLR(reg_gpio_wakeup_irq, FLD_GPIO_CORE_INTERRUPT_EN);
}
}
static inline int gpio_is_output_en(u32 pin){
return !BM_IS_SET(reg_gpio_oen(pin), pin & 0xff);
}
static inline int gpio_is_input_en(u32 pin){
return BM_IS_SET(reg_gpio_ie(pin), pin & 0xff);
}
static inline void gpio_set_output_en(u32 pin, u32 value){
u8 bit = pin & 0xff;
if(!value){
BM_SET(reg_gpio_oen(pin), bit);
}else{
BM_CLR(reg_gpio_oen(pin), bit);
}
}
static inline void gpio_set_input_en(u32 pin, u32 value){
u8 bit = pin & 0xff;
if(value){
BM_SET(reg_gpio_ie(pin), bit);
}else{
BM_CLR(reg_gpio_ie(pin), bit);
}
}
static inline void gpio_set_data_strength(u32 pin, u32 value){
u8 bit = pin & 0xff;
if(value){
BM_SET(reg_gpio_ds(pin), bit);
}else{
BM_CLR(reg_gpio_ds(pin), bit);
}
}
static inline void gpio_write(u32 pin, u32 value){
u8 bit = pin & 0xff;
if(value){
BM_SET(reg_gpio_out(pin), bit);
}else{
BM_CLR(reg_gpio_out(pin), bit);
}
}
static inline void gpio_toggle(u32 pin) {
reg_gpio_out(pin) ^= (pin & 0xFF);
}
static inline u32 gpio_read(u32 pin){
return BM_IS_SET(reg_gpio_in(pin), pin & 0xff);
}
static inline u32 gpio_read_cache(u32 pin, u8 *p){
return p[pin>>8] & (pin & 0xff);
}
static inline void gpio_read_all(u8 *p){
p[0] = REG_ADDR8(0x580);
p[1] = REG_ADDR8(0x588);
p[2] = REG_ADDR8(0x590);
p[3] = REG_ADDR8(0x598);
p[4] = REG_ADDR8(0x5a0);
p[5] = REG_ADDR8(0x5a8);
}
static inline void gpio_set_interrupt_pol(u32 pin, u32 falling){
u8 bit = pin & 0xff;
if(falling){
BM_SET(reg_gpio_pol(pin), bit);
}else{
BM_CLR(reg_gpio_pol(pin), bit);
}
}
static inline void gpio_en_interrupt(u32 pin, int en){ // reg_irq_mask: FLD_IRQ_GPIO_EN
u8 bit = pin & 0xff;
if(en){
BM_SET(reg_gpio_irq_wakeup_en(pin), bit);
}
else{
BM_CLR(reg_gpio_irq_wakeup_en(pin), bit);
}
}
static inline void gpio_set_interrupt(u32 pin, u32 falling){
u8 bit = pin & 0xff;
BM_SET(reg_gpio_irq_wakeup_en(pin), bit);
if(falling){
BM_SET(reg_gpio_pol(pin), bit);
}else{
BM_CLR(reg_gpio_pol(pin), bit);
}
}
static inline void gpio_en_interrupt_risc0(u32 pin, int en){ // reg_irq_mask: FLD_IRQ_GPIO_RISC0_EN
u8 bit = pin & 0xff;
if(en){
BM_SET(reg_gpio_irq_risc0_en(pin), bit);
}
else{
BM_CLR(reg_gpio_irq_risc0_en(pin), bit);
}
}
static inline void gpio_set_interrupt_risc0(u32 pin, u32 falling){
u8 bit = pin & 0xff;
BM_SET(reg_gpio_irq_risc0_en(pin), bit);
if(falling){
BM_SET(reg_gpio_pol(pin), bit);
}else{
BM_CLR(reg_gpio_pol(pin), bit);
}
}
static inline void gpio_en_interrupt_risc1(u32 pin, int en){ // reg_irq_mask: FLD_IRQ_GPIO_RISC1_EN
u8 bit = pin & 0xff;
if(en){
BM_SET(reg_gpio_irq_risc1_en(pin), bit);
}
else{
BM_CLR(reg_gpio_irq_risc1_en(pin), bit);
}
}
static inline void gpio_set_interrupt_risc1(u32 pin, u32 falling){
u8 bit = pin & 0xff;
BM_SET(reg_gpio_irq_risc1_en(pin), bit);
if(falling){
BM_SET(reg_gpio_pol(pin), bit);
}else{
BM_CLR(reg_gpio_pol(pin), bit);
}
}
static inline void gpio_en_interrupt_risc2(u32 pin, int en){ // reg_irq_mask: FLD_IRQ_GPIO_RISC2_EN
u8 bit = pin & 0xff;
if(en){
BM_SET(reg_gpio_irq_risc2_en(pin), bit);
}
else{
BM_CLR(reg_gpio_irq_risc2_en(pin), bit);
}
}
static inline void gpio_set_interrupt_risc2(u32 pin, u32 falling){
u8 bit = pin & 0xff;
BM_SET(reg_gpio_irq_risc2_en(pin), bit);
if(falling){
BM_SET(reg_gpio_pol(pin), bit);
}else{
BM_CLR(reg_gpio_pol(pin), bit);
}
}
static inline void gpio_init(void){
//return;
reg_gpio_pa_setting1 =
(PA0_INPUT_ENABLE<<8) | (PA1_INPUT_ENABLE<<9) | (PA2_INPUT_ENABLE<<10) | (PA3_INPUT_ENABLE<<11) |
(PA4_INPUT_ENABLE<<12) | (PA5_INPUT_ENABLE<<13) | (PA6_INPUT_ENABLE<<14) | (PA7_INPUT_ENABLE<<15) |
((PA0_OUTPUT_ENABLE?0:1)<<16) | ((PA1_OUTPUT_ENABLE?0:1)<<17) | ((PA2_OUTPUT_ENABLE?0:1)<<18) | ((PA3_OUTPUT_ENABLE?0:1)<<19) |
((PA4_OUTPUT_ENABLE?0:1)<<20) | ((PA5_OUTPUT_ENABLE?0:1)<<21) | ((PA6_OUTPUT_ENABLE?0:1)<<22) | ((PA7_OUTPUT_ENABLE?0:1)<<23) |
(PA0_DATA_OUT<<24) | (PA1_DATA_OUT<<25) | (PA2_DATA_OUT<<26) | (PA3_DATA_OUT<<27) |
(PA4_DATA_OUT<<28) | (PA5_DATA_OUT<<29) | (PA6_DATA_OUT<<30) | (PA7_DATA_OUT<<31) ;
reg_gpio_pa_setting2 =
(PA0_DATA_STRENGTH<<8) | (PA1_DATA_STRENGTH<<9)| (PA2_DATA_STRENGTH<<10) | (PA3_DATA_STRENGTH<<11) |
(PA4_DATA_STRENGTH<<12) | (PA5_DATA_STRENGTH<<13) | (PA6_DATA_STRENGTH<<14) | (PA7_DATA_STRENGTH<<15) |
(PA0_FUNC==AS_GPIO ? BIT(16):0) | (PA1_FUNC==AS_GPIO ? BIT(17):0)| (PA2_FUNC==AS_GPIO ? BIT(18):0)| (PA3_FUNC==AS_GPIO ? BIT(19):0) |
(PA4_FUNC==AS_GPIO ? BIT(20):0) | (PA5_FUNC==AS_GPIO ? BIT(21):0)| (PA6_FUNC==AS_GPIO ? BIT(22):0)| (PA7_FUNC==AS_GPIO ? BIT(23):0);
reg_gpio_pb_setting1 =
(PB0_INPUT_ENABLE<<8) | (PB1_INPUT_ENABLE<<9) | (PB2_INPUT_ENABLE<<10) | (PB3_INPUT_ENABLE<<11) |
(PB4_INPUT_ENABLE<<12) | (PB5_INPUT_ENABLE<<13)| (PB6_INPUT_ENABLE<<14) | (PB7_INPUT_ENABLE<<15) |
((PB0_OUTPUT_ENABLE?0:1)<<16) | ((PB1_OUTPUT_ENABLE?0:1)<<17) | ((PB2_OUTPUT_ENABLE?0:1)<<18) | ((PB3_OUTPUT_ENABLE?0:1)<<19) |
((PB4_OUTPUT_ENABLE?0:1)<<20) | ((PB5_OUTPUT_ENABLE?0:1)<<21) | ((PB6_OUTPUT_ENABLE?0:1)<<22) | ((PB7_OUTPUT_ENABLE?0:1)<<23) |
(PB0_DATA_OUT<<24) | (PB1_DATA_OUT<<25) | (PB2_DATA_OUT<<26) | (PB3_DATA_OUT<<27) |
(PB4_DATA_OUT<<28) | (PB5_DATA_OUT<<29) | (PB6_DATA_OUT<<30) | (PB7_DATA_OUT<<31) ;
reg_gpio_pb_setting2 =
(PB0_DATA_STRENGTH<<8) | (PB1_DATA_STRENGTH<<9) | (PB2_DATA_STRENGTH<<10) | (PB3_DATA_STRENGTH<<11) |
(PB4_DATA_STRENGTH<<12) | (PB5_DATA_STRENGTH<<13) | (PB6_DATA_STRENGTH<<14) | (PB7_DATA_STRENGTH<<15) |
(PB0_FUNC==AS_GPIO ? BIT(16):0) | (PB1_FUNC==AS_GPIO ? BIT(17):0)| (PB2_FUNC==AS_GPIO ? BIT(18):0)| (PB3_FUNC==AS_GPIO ? BIT(19):0) |
(PB4_FUNC==AS_GPIO ? BIT(20):0) | (PB5_FUNC==AS_GPIO ? BIT(21):0)| (PB6_FUNC==AS_GPIO ? BIT(22):0)| (PB7_FUNC==AS_GPIO ? BIT(23):0);
reg_gpio_pc_setting1 =
(PC0_INPUT_ENABLE<<8) | (PC1_INPUT_ENABLE<<9) | (PC2_INPUT_ENABLE<<10) | (PC3_INPUT_ENABLE<<11) |
(PC4_INPUT_ENABLE<<12) | (PC5_INPUT_ENABLE<<13)| (PC6_INPUT_ENABLE<<14) | (PC7_INPUT_ENABLE<<15) |
((PC0_OUTPUT_ENABLE?0:1)<<16) | ((PC1_OUTPUT_ENABLE?0:1)<<17) | ((PC2_OUTPUT_ENABLE?0:1)<<18) | ((PC3_OUTPUT_ENABLE?0:1)<<19) |
((PC4_OUTPUT_ENABLE?0:1)<<20) | ((PC5_OUTPUT_ENABLE?0:1)<<21) | ((PC6_OUTPUT_ENABLE?0:1)<<22) | ((PC7_OUTPUT_ENABLE?0:1)<<23) |
(PC0_DATA_OUT<<24) | (PC1_DATA_OUT<<25) | (PC2_DATA_OUT<<26) | (PC3_DATA_OUT<<27) |
(PC4_DATA_OUT<<28) | (PC5_DATA_OUT<<29) | (PC6_DATA_OUT<<30) | (PC7_DATA_OUT<<31) ;
reg_gpio_pc_setting2 =
(PC0_DATA_STRENGTH<<8) | (PC1_DATA_STRENGTH<<9) | (PC2_DATA_STRENGTH<<10) | (PC3_DATA_STRENGTH<<11) |
(PC4_DATA_STRENGTH<<12) | (PC5_DATA_STRENGTH<<13) | (PC6_DATA_STRENGTH<<14) | (PC7_DATA_STRENGTH<<15) |
(PC0_FUNC==AS_GPIO ? BIT(16):0) | (PC1_FUNC==AS_GPIO ? BIT(17):0)| (PC2_FUNC==AS_GPIO ? BIT(18):0)| (PC3_FUNC==AS_GPIO ? BIT(19):0) |
(PC4_FUNC==AS_GPIO ? BIT(20):0) | (PC5_FUNC==AS_GPIO ? BIT(21):0)| (PC6_FUNC==AS_GPIO ? BIT(22):0)| (PC7_FUNC==AS_GPIO ? BIT(23):0);
reg_gpio_pd_setting1 =
(PD0_INPUT_ENABLE<<8) | (PD1_INPUT_ENABLE<<9) | (PD2_INPUT_ENABLE<<10) | (PD3_INPUT_ENABLE<<11) |
(PD4_INPUT_ENABLE<<12) | (PD5_INPUT_ENABLE<<13)| (PD6_INPUT_ENABLE<<14) | (PD7_INPUT_ENABLE<<15) |
((PD0_OUTPUT_ENABLE?0:1)<<16) | ((PD1_OUTPUT_ENABLE?0:1)<<17) | ((PD2_OUTPUT_ENABLE?0:1)<<18) | ((PD3_OUTPUT_ENABLE?0:1)<<19) |
((PD4_OUTPUT_ENABLE?0:1)<<20) | ((PD5_OUTPUT_ENABLE?0:1)<<21) | ((PD6_OUTPUT_ENABLE?0:1)<<22) | ((PD7_OUTPUT_ENABLE?0:1)<<23) |
(PD0_DATA_OUT<<24) | (PD1_DATA_OUT<<25) | (PD2_DATA_OUT<<26) | (PD3_DATA_OUT<<27) |
(PD4_DATA_OUT<<28) | (PD5_DATA_OUT<<29) | (PD6_DATA_OUT<<30) | (PD7_DATA_OUT<<31) ;
reg_gpio_pd_setting2 =
(PD0_DATA_STRENGTH<<8) | (PD1_DATA_STRENGTH<<9) | (PD2_DATA_STRENGTH<<10) | (PD3_DATA_STRENGTH<<11) |
(PD4_DATA_STRENGTH<<12) | (PD5_DATA_STRENGTH<<13) | (PD6_DATA_STRENGTH<<14) | (PD7_DATA_STRENGTH<<15) |
(PD0_FUNC==AS_GPIO ? BIT(16):0) | (PD1_FUNC==AS_GPIO ? BIT(17):0)| (PD2_FUNC==AS_GPIO ? BIT(18):0)| (PD3_FUNC==AS_GPIO ? BIT(19):0) |
(PD4_FUNC==AS_GPIO ? BIT(20):0) | (PD5_FUNC==AS_GPIO ? BIT(21):0)| (PD6_FUNC==AS_GPIO ? BIT(22):0)| (PD7_FUNC==AS_GPIO ? BIT(23):0);
reg_gpio_pe_setting1 =
(PE0_INPUT_ENABLE<<8) | (PE1_INPUT_ENABLE<<9) | (PE2_INPUT_ENABLE<<10) | (PE3_INPUT_ENABLE<<11) |
(PE4_INPUT_ENABLE<<12) | (PE5_INPUT_ENABLE<<13)| (PE6_INPUT_ENABLE<<14) | (PE7_INPUT_ENABLE<<15) |
((PE0_OUTPUT_ENABLE?0:1)<<16) | ((PE1_OUTPUT_ENABLE?0:1)<<17) | ((PE2_OUTPUT_ENABLE?0:1)<<18) | ((PE3_OUTPUT_ENABLE?0:1)<<19) |
((PE4_OUTPUT_ENABLE?0:1)<<20) | ((PE5_OUTPUT_ENABLE?0:1)<<21) | ((PE6_OUTPUT_ENABLE?0:1)<<22) | ((PE7_OUTPUT_ENABLE?0:1)<<23) |
(PE0_DATA_OUT<<24) | (PE1_DATA_OUT<<25) | (PE2_DATA_OUT<<26) | (PE3_DATA_OUT<<27) |
(PE4_DATA_OUT<<28) | (PE5_DATA_OUT<<29) | (PE6_DATA_OUT<<30) | (PE7_DATA_OUT<<31);
reg_gpio_pe_setting2 =
(PE0_DATA_STRENGTH<<8) | (PE1_DATA_STRENGTH<<9) | (PE2_DATA_STRENGTH<<10) | (PE3_DATA_STRENGTH<<11) |
(PE4_DATA_STRENGTH<<12) | (PE5_DATA_STRENGTH<<13) | (PE6_DATA_STRENGTH<<14) | (PE7_DATA_STRENGTH<<15) |
(PE0_FUNC==AS_GPIO ? BIT(16):0) | (PE1_FUNC==AS_GPIO ? BIT(17):0)| (PE2_FUNC==AS_GPIO ? BIT(18):0)| (PE3_FUNC==AS_GPIO ? BIT(19):0) |
(PE4_FUNC==AS_GPIO ? BIT(20):0) | (PE5_FUNC==AS_GPIO ? BIT(21):0)| (PE6_FUNC==AS_GPIO ? BIT(22):0)| (PE7_FUNC==AS_GPIO ? BIT(23):0);
/*
reg_gpio_pf_setting1 =
(PF0_INPUT_ENABLE<<8) | (PF1_INPUT_ENABLE<<9) |
((PF0_OUTPUT_ENABLE?0:1)<<16) | ((PF1_OUTPUT_ENABLE?0:1)<<17) |
(PF0_DATA_OUT<<24) | (PF1_DATA_OUT<<25);
reg_gpio_pf_setting2 =
(PF0_DATA_STRENGTH<<8) | (PF1_DATA_STRENGTH<<9) |
(PF0_FUNC==AS_GPIO ? BIT(16):0) | (PF1_FUNC==AS_GPIO ? BIT(17):0);
*/
/* do later
reg_gpio_config_func = ((PA0_FUNC==AS_DMIC||PA4_FUNC==AS_DMIC) ? BITS(0,7):0) | (PA1_FUNC==AS_PWM ? BIT(2):0) |
((PA2_FUNC==AS_UART||PA3_FUNC==AS_UART) ? BITS(3,5):0) |
(PA2_FUNC==AS_PWM ? BIT(4):0) | (PA3_FUNC==AS_PWM ? BIT(6):0) |
((PB0_FUNC==AS_SDM||PB1_FUNC==AS_SDM||PB6_FUNC==AS_SDM||PB7_FUNC==AS_SDM) ? BIT_RNG(12,15):0) |
((PA0_FUNC==AS_I2S||PA1_FUNC==AS_I2S||PA2_FUNC==AS_I2S||PA3_FUNC==AS_I2S||PA4_FUNC==AS_I2S) ? (BIT_RNG(21,23)|BIT_RNG(29,30)):0);
*/
u8 areg = analog_read (0x0a) & 0x0f;
analog_write (0x0a, areg | (PULL_WAKEUP_SRC_PA0<<4) |
(PULL_WAKEUP_SRC_PA1<<6));
analog_write (0x0b, PULL_WAKEUP_SRC_PA2 |
(PULL_WAKEUP_SRC_PA3<<2) |
(PULL_WAKEUP_SRC_PA4<<4) |
(PULL_WAKEUP_SRC_PA5<<6));
analog_write (0x0c, PULL_WAKEUP_SRC_PA6 |
(PULL_WAKEUP_SRC_PA7<<2) |
(PULL_WAKEUP_SRC_PB0<<4) |
(PULL_WAKEUP_SRC_PB1<<6));
analog_write (0x0d, PULL_WAKEUP_SRC_PB2 |
(PULL_WAKEUP_SRC_PB3<<2) |
(PULL_WAKEUP_SRC_PB4<<4) |
(PULL_WAKEUP_SRC_PB5<<6));
analog_write (0x0e, PULL_WAKEUP_SRC_PB6 |
(PULL_WAKEUP_SRC_PB7<<2) |
(PULL_WAKEUP_SRC_PC0<<4) |
(PULL_WAKEUP_SRC_PC1<<6));
analog_write (0x0f, PULL_WAKEUP_SRC_PC2 |
(PULL_WAKEUP_SRC_PC3<<2) |
(PULL_WAKEUP_SRC_PC4<<4) |
(PULL_WAKEUP_SRC_PC5<<6));
analog_write (0x10, PULL_WAKEUP_SRC_PC6 |
(PULL_WAKEUP_SRC_PC7<<2) |
(PULL_WAKEUP_SRC_PD0<<4) |
(PULL_WAKEUP_SRC_PD1<<6));
analog_write (0x11, PULL_WAKEUP_SRC_PD2 |
(PULL_WAKEUP_SRC_PD3<<2) |
(PULL_WAKEUP_SRC_PD4<<4) |
(PULL_WAKEUP_SRC_PD5<<6));
analog_write (0x12, PULL_WAKEUP_SRC_PD6 |
(PULL_WAKEUP_SRC_PD7<<2) |
(PULL_WAKEUP_SRC_PE0<<4) |
(PULL_WAKEUP_SRC_PE1<<6));
areg = analog_read (0x08) & 0x0f;
analog_write (0x08, areg | PULL_WAKEUP_SRC_PE2<<4 |
(PULL_WAKEUP_SRC_PE3<<6) );
}
void gpio_set_wakeup(u32 pin, u32 level, int en);
void gpio_setup_up_down_resistor(u32 gpio, u32 up_down);
void gpio_set_func(u32 pin, u32 func);
|
<filename>src/engine/actorfactory.h
/*
This file is part of Granny's Bloodbath.
Granny's Bloodbath is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Granny's Bloodbath is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Granny's Bloodbath. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef _ACTORFACTORY_
#define _ACTORFACTORY_
#include "actor.h"
class Zombie;
class Dog;
class Fat;
class Spider;
class Boss;
class Bullet;
class Item;
class Game;
//! Carga y cachea todos los actores
/**
@author <NAME>
@version 1.0
Se utiliza para cargar los actores y minimizar el consumo de memoria y el número de accesos a disco.
Cuando queremos algun actor de un tipo lo obtenemos mendiate su metodo determinado.
*/
class ActorFactory{
public:
/*
Constructor
@param game Puntero a Game con el que se asocia el actor
*/
ActorFactory(Game *game);
/*
Destructor
Libera recursos
*/
~ActorFactory();
/*
@param x posicion x en la que se cargara el zombie
@param y posicion y en la que se cargara el zombie
@return Puntero a zombie
*/
Zombie* get_zombie(int x, int y);
/*
@param x posicion x en la que se cargara el fat
@param y posicion y en la que se cargara el fat
@return Puntero a fat
*/
Fat* get_fat(int x, int y);
/*
@param x posicion x en la que se cargara el dog
@param y posicion y en la que se cargara el dog
@return Puntero a dog
*/
Dog* get_dog(int x, int y);
/*
@param x posicion x en la que se cargara spider
@param y posicion y en la que se cargara spider
@return Puntero a spider
*/
Spider* get_spider(int x, int y);
/*
@param x posicion x en la que se cargara el boss
@param y posicion y en la que se cargara el boss
@return Puntero a boss
*/
Boss* get_boss(int x, int y);
/*
@param x posicion x en la que se cargara el pill
@param y posicion y en la que se cargara el pill
@return Puntero a pill
*/
Item* get_pill(int x, int y);
/*
@param x posicion x en la que se cargara el teeth
@param y posicion y en la que se cargara el teeth
@return Puntero a teeth
*/
Item* get_teeth(int x, int y);
/*
@param x posicion x en la que se cargara el ammo
@param y posicion y en la que se cargara el ammo
@return Puntero a ammo
*/
Item* get_ammo(int x, int y);
private:
ActorFactory(const ActorFactory &a);
ActorFactory& operator = (const ActorFactory& m);
Zombie *zombie;
Fat* fat;
Dog* dog;
Spider* spider;
Boss* boss;
Item* pill;
Item* teeth;
Item* ammo;
Game* g;
};
#endif
|
##动捕集群启动脚本
gnome-terminal --window -e 'bash -c "roscore; exec bash"' \
--tab -e 'bash -c "sleep 5; roslaunch prometheus_gazebo sitl_formation_4uav.launch; exec bash"' \
--tab -e 'bash -c "sleep 6; rosrun prometheus_mission formation_state; exec bash"' \
--tab -e 'bash -c "sleep 7; roslaunch prometheus_gazebo prometheus_formation_square.launch; exec bash"' \
|
# -*- coding: utf-8 -*-
"""urls module of the website."""
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'juliotrigo.views.home', name='home'),
# url(r'^juliotrigo/', include('juliotrigo.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
# Copied and changed from django.conf.urls.i18n
urlpatterns += patterns('',
url(r'^i18n/setlang/$', 'accounts.views.custom_i18n', name='set_language'),
)
|
$(function() {
// Al cambiar el campo se quita mensaje de validacion
$('input, select').change(function(e){
removeError('#' + $(this).prop('id'));
});
// Validar campos y solicitar creacion
$('#countryCreate').click(function(e){
e.preventDefault();
if (!validatorFields()) {
$.ajax({
method: $('form').prop('method'),
url: url('/country/store'),
data: $('form').serialize(),
dataType: 'json',
beforeSend: function() {
inactiveElements();
},
success: function (data) {
if (data.error !== undefined && data.error != '') {
// error en la respuesta
showErrors(data.error);
} else {
redirect('/country/index', 'message=Guardado correctamente');
}
},
complete: function() {
activeElements();
}
});
}
});
// Validar campos y solicitar actualizacion
$('#countryUpdate').click(function(e){
e.preventDefault();
if (!validatorFields()) {
setMethod('PUT');
$.ajax({
method: $('form').prop('method'),
url: url('/country/update/' + $('#id').val()),
data: $('form').serialize(),
dataType: 'json',
beforeSend: function() {
inactiveElements();
},
success: function (data) {
if (data.error !== undefined && data.error != '') {
// error en la respuesta
showErrors(data.error);
} else {
redirect('/country/index', 'message=Actualizado correctamente');
}
},
complete: function() {
// Activar los campos para la modificacion
activeElements();
}
});
}
});
// Eliminacion del pais desde edicion
$('#countryDelete').click(function(e){
e.preventDefault();
countryDelete($('#id').val());
});
// Eliminacion del pais desde el index
$('.countryDelete').click(function(e){
e.preventDefault();
countryDelete($(this).attr('id'));
});
/**
* Valida campos del formulario
*/
function validatorFields() {
var error = false;
var id = $('#id').val();
var name = $('#name').val();
$('.help-block').html('');
if (id == '') {
error = addError('#id', 'Ingrese el código');
}
if (name == '') {
error = addError('#name', 'Ingrese el nombre');
}
return error;
}
/*/
* Funcion unica de eliminación
* @param string id
*/
function countryDelete(id) {
if (confirm('Está seguro?')) {
setMethod('DELETE');
$.ajax({
method: $('form').prop('method'),
url: url('/country/delete/' + id),
data: $('form').serialize(),
dataType: 'json',
beforeSend: function() {
inactiveElements();
},
success: function (data) {
if (data.error !== undefined && data.error != '') {
// error en la respuesta
showErrors(data.error);
} else {
redirect('/country/index', 'message=Eliminado correctamente');
}
},
complete: function() {
// Activar los campos para la modificacion
activeElements();
}
});
}
}
}); |
#!/bin/bash -eux
# Install Module Assistant
sudo apt-get install -y build-essential
sudo apt-get install -y module-assistant
sudo apt-get install -y linux-headers-generic
sudo m-a prepare
# Install guest additions
sudo apt-get install -y virtualbox-guest-utils
|
<reponame>1Basile/pseudorandom_number_generators<gh_stars>0
#ifndef EXTENDED_GRADE_COMMON_DEVIDER_ALG_IMMPORTED
#define EXTENDED_GRADE_COMMON_DEVIDER_ALG_IMMPORTED
// C function for extended Euclidean Algorithm
int ext_gcd(int a, int b, int* x, int* y)
{
// Base Case
if (a == 0) {
*x = 0;
*y = 1;
return b;
}
int x1, y1; // To store results of recursive call
int gcd = ext_gcd(b % a, a, &x1, &y1);
// Update x and y using results of recursive
// call
*x = y1 - (b / a) * x1;
*y = x1;
return gcd;
}
// Driver Program
/* int main() */
/* { */
/* int x, y; */
/* int a = 35, b = 15; */
/* int g = ext_gcd(a, b, &x, &y); */
/* printf("gcd(%d, %d) = %d\n", a, b, g); */
/* printf("%d*%d + %d*%d = %d\n", a, x, b, y, g); */
/* return 0; */
/* } */
#endif
|
module.exports = {
primaryKey: 'id',
attributes: {
id: {
type: 'number',
autoIncrement: true
},
user: {
model: 'user'
},
request: {
model: 'requestlog'
},
description: {
type: 'string',
columnType: 'varchar(191)'
},
data: {
type: 'json'
},
createdAt: {
type: 'ref',
columnType: 'datetime',
autoCreatedAt: true
},
updatedAt: false
}
};
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.util;
import java.util.Locale ;
import java.util.regex.Matcher ;
import java.util.regex.Pattern ;
/**
* References:
* <ul>
* <li><a href="http://en.wikipedia.org/wiki/Roman_numbers">Wikipedia on Roman Numerals</a></li>
* <li><a href="http://www.therobs.com/uman/roman.shtml">Therobs Lex & Yacc Example: Roman Numerals</a>
* which is were the idea of working right to left, instead of looking ahead, originated for me.</li>
* </ul> */
public class RomanNumeral
{
int intValue ;
public RomanNumeral(String lexicalForm) {
if ( !isValid(lexicalForm) )
throw new NumberFormatException("Invalid Roman Numeral: " + lexicalForm) ;
intValue = r2i(lexicalForm) ;
}
public RomanNumeral(int i) {
if ( i <= 0 )
throw new NumberFormatException("Roman numerals are 1-3999 (" + i + ")") ;
if ( i > 3999 )
throw new NumberFormatException("Roman numerals are 1-3999 (" + i + ")") ;
intValue = i ;
}
@Override
public String toString() { return i2r(intValue) ; }
public int intValue() { return intValue ; }
//String pat = "M*(CM|DC{0,3}|CD|C{0,3})(XC|LX{0,3}|XL|X{0,3})(IX|VI{0,3}|IV|I{0,3})" ;
// Added I{0,4}
static String numeralPattern = "M*(CM|DC{0,3}|CD|C{0,3})(XC|LX{0,3}|XL|X{0,3})(IX|VI{0,3}|IV|I{0,4})" ;
static Pattern pattern = Pattern.compile(numeralPattern) ;
public static boolean isValid(String lex) {
lex = lex.toUpperCase(Locale.ENGLISH) ;
// Excludes IIII
Matcher m = pattern.matcher(lex) ;
return m.matches() ;
}
public static int parse(String lex) {
return r2i(lex) ;
}
// It is easier working right to left!
public static int r2i(String lex) {
lex = lex.toUpperCase(Locale.ROOT) ;
// This is overly permissive.
// 1 - allows multiple reducing values
// 2 - allows reducing values that are not 10^x in front of 5*10^x or
// 10^(x+1)
// Use the validator.
int current = 0 ;
int v = 0 ;
for ( int i = lex.length() - 1 ; i >= 0 ; i-- ) {
char ch = lex.charAt(i) ;
int x = charToNum(ch) ;
if ( x < current )
v = v - x ;
else {
v = v + x ;
current = x ;
}
}
return v ;
}
public static String asRomanNumerals(int i) { return i2r(i) ; }
public static String i2r(int i) {
if ( i <= 0 )
throw new NumberFormatException("Roman numerals are 1-3999 (" + i + ")") ;
if ( i > 3999 )
throw new NumberFormatException("Roman numerals are 1-3999 (" + i + ")") ;
StringBuffer sbuff = new StringBuffer() ;
i = i2r(sbuff, i, "M", 1000, "CM", 900, "D", 500, "CD", 400) ;
i = i2r(sbuff, i, "C", 100, "XC", 90, "L", 50, "XL", 40) ;
i = i2r(sbuff, i, "X", 10, "IX", 9, "V", 5, "IV", 4) ;
while (i >= 1) {
sbuff.append("I") ;
i -= 1 ;
}
return sbuff.toString() ;
}
private static int i2r(StringBuffer sbuff, int i,
String tens, int iTens,
String nines, int iNines,
String fives, int iFives,
String fours, int iFours)
{
while ( i >= iTens )
{
sbuff.append(tens) ;
i -= iTens ;
}
if ( i >= iNines )
{
sbuff.append(nines) ;
i -= iNines;
}
if ( i >= iFives )
{
sbuff.append(fives) ;
i -= iFives ;
}
if ( i >= iFours )
{
sbuff.append(fours) ;
i -= iFours ;
}
return i ;
}
// Only subtract ten's C,X,I
// Only allow one of them
// One do 10^x from 10^(x+1)
// CM, CD, XC, XL, IX, IV
static private int charToNum(char ch)
{
if ( ch == 0 ) return 0 ;
for ( int i = 0 ; i < RValue.table.length ; i++ )
{
if ( RValue.table[i].lex == ch )
return RValue.table[i].val ;
}
return 0 ;
}
static class RValue
{
static RValue[] table =
new RValue[] { new RValue('M', 1000) ,
new RValue('D', 500) ,
new RValue('C', 100) ,
new RValue('L', 50) ,
new RValue('X', 10) ,
new RValue('V', 5) ,
new RValue('I', 1) } ;
char lex ; int val ;
RValue(char s, int v) { lex = s ; val = v ; }
}
} |
#!/usr/bin/env bash
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../../..)
source $rootdir/test/common/autotest_common.sh
source $rootdir/test/iscsi_tgt/common.sh
if [ ! -d /usr/local/calsoft ]; then
echo "skipping calsoft tests"
exit 0
fi
timing_enter calsoft
MALLOC_BDEV_SIZE=64
MALLOC_BLOCK_SIZE=512
rpc_py="python $rootdir/scripts/rpc.py"
calsoft_py="python $testdir/calsoft.py"
# Copy the calsoft config file to /usr/local/etc
mkdir -p /usr/local/etc
cp $testdir/its.conf /usr/local/etc/
cp $testdir/auth.conf /usr/local/etc/
timing_enter start_iscsi_tgt
$ISCSI_APP -c $testdir/iscsi.conf -m 0x1 &
pid=$!
echo "Process pid: $pid"
trap "killprocess $pid; exit 1 " SIGINT SIGTERM EXIT
waitforlisten $pid
echo "iscsi_tgt is listening. Running tests..."
timing_exit start_iscsi_tgt
$rpc_py add_portal_group $PORTAL_TAG $TARGET_IP:$ISCSI_PORT
$rpc_py add_initiator_group $INITIATOR_TAG $INITIATOR_NAME $NETMASK
$rpc_py construct_malloc_bdev -b MyBdev $MALLOC_BDEV_SIZE $MALLOC_BLOCK_SIZE
# "MyBdev:0" ==> use MyBdev blockdev for LUN0
# "1:2" ==> map PortalGroup1 to InitiatorGroup2
# "64" ==> iSCSI queue depth 64
# "0 0 0 1" ==> enable CHAP authentication using auth group 1
$rpc_py construct_target_node Target3 Target3_alias 'MyBdev:0' $PORTAL_TAG:$INITIATOR_TAG 64 -g 1
sleep 1
if [ "$1" ]; then
$calsoft_py "$output_dir" "$1"
failed=$?
else
$calsoft_py "$output_dir"
failed=$?
fi
trap - SIGINT SIGTERM EXIT
killprocess $pid
timing_exit calsoft
exit $failed
|
def count_migration_operations(operations):
operation_counts = {'CreateModel': 0, 'DeleteModel': 0, 'AlterModelOptions': 0}
for operation, model in operations:
if operation == 'CreateModel':
operation_counts['CreateModel'] += 1
elif operation == 'DeleteModel':
operation_counts['DeleteModel'] += 1
elif operation == 'AlterModelOptions':
operation_counts['AlterModelOptions'] += 1
return operation_counts |
<gh_stars>10-100
import consola from 'consola'
import inquirer from 'inquirer'
import Blueprint from 'src/blueprint'
import * as ejectHelpers from 'src/cli/commands/eject'
import * as utils from 'src/utils'
import { resetUtilMocks as _resetUtilMocks } from 'test-utils'
jest.mock('src/utils')
const resetUtilMocks = utilNames => _resetUtilMocks(utils, utilNames)
jest.mock('inquirer')
jest.mock('src/blueprint')
jest.mock('src/cli/commands')
jest.mock('src/cli/commands/eject')
describe('Commands.eject', () => {
beforeAll(() => resetUtilMocks())
afterEach(() => jest.resetAllMocks())
test('Commands.eject logs fatal error without key', async () => {
utils.exists.mockReturnValue(true)
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = []
const nuxt = {}
const options = {}
await Commands.eject(args, nuxt, options)
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith('A template key identifying the template you wish to eject is required')
resetUtilMocks(['exists'])
})
test('Commands.eject logs fatal error with empty dir and no blueprints', async () => {
utils.exists.mockReturnValue(true)
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['blueprint/key']
const nuxt = {}
const options = { dir: '' }
await Commands.eject(args, nuxt, options)
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith(`Blueprint path '' is empty or does not exists`)
resetUtilMocks(['exists'])
})
test('Commands.eject logs fatal error with unknown blueprint', async () => {
utils.exists.mockReturnValue(true)
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['blueprint/key']
const nuxt = {}
const options = { blueprints: {} }
await Commands.eject(args, nuxt, options)
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith(`Unrecognized blueprint 'blueprint'`)
resetUtilMocks(['exists'])
})
test('Commands.eject logs fatal error with autodiscover for blueprint path returns nothing', async () => {
utils.exists.mockReturnValue(true)
Blueprint.autodiscover.mockReturnValue(false)
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['blueprint/key']
const nuxt = {}
const options = {
blueprints: {
blueprint: '/var/nuxt'
}
}
await Commands.eject(args, nuxt, options)
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith(`Unrecognized blueprint path, autodiscovery failed for '/var/nuxt'`)
resetUtilMocks(['exists'])
})
test('Commands.eject calls normalizeInput function & logs fatal error when prompt returns zero templates', async () => {
utils.exists.mockReturnValue(true)
Blueprint.autodiscover.mockReturnValue(true)
inquirer.prompt.mockReturnValue({ templates: [] })
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['template-key']
const nuxt = {}
const options = {
dir: '/var/nuxt',
normalizeInput: jest.fn(_ => _)
}
await Commands.eject(args, nuxt, options)
expect(options.normalizeInput).toHaveBeenCalledTimes(1)
expect(options.normalizeInput).toHaveBeenCalledWith(`template-key`)
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith(`Unrecognized template key 'template-key'`)
resetUtilMocks(['exists'])
})
test('Commands.eject calls ejectTheme helper', async () => {
utils.exists.mockReturnValue(true)
Blueprint.autodiscover.mockReturnValue(true)
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['theme']
const nuxt = {}
const options = {
dir: '/var/nuxt'
}
await Commands.eject(args, nuxt, options)
expect(ejectHelpers.ejectTheme).toHaveBeenCalledTimes(1)
expect(ejectHelpers.ejectTheme).toHaveBeenCalledWith(undefined, '/var/nuxt')
resetUtilMocks(['exists'])
})
test('Commands.eject calls ejectTemplate helper (options.templates.type)', async () => {
utils.exists.mockReturnValue(true)
Blueprint.autodiscover.mockReturnValue(true)
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['plugins']
const nuxt = {}
const options = {
dir: '/var/nuxt',
templates: {
plugins: ['test-plugin.js']
}
}
await Commands.eject(args, nuxt, options)
expect(ejectHelpers.ejectTemplates).toHaveBeenCalledTimes(1)
expect(ejectHelpers.ejectTemplates).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), ['test-plugin.js'])
resetUtilMocks(['exists'])
})
test('Commands.eject calls ejectTemplate helper (template\'s dst path)', async () => {
utils.exists.mockReturnValue(true)
Blueprint.autodiscover.mockReturnValue({
plugins: [{ dst: 'test-plugin.js' }]
})
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['test-plugin.js']
const nuxt = {}
const options = {
dir: '/var/nuxt'
}
await Commands.eject(args, nuxt, options)
expect(ejectHelpers.ejectTemplates).toHaveBeenCalledTimes(1)
expect(ejectHelpers.ejectTemplates).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), [{ dst: 'test-plugin.js' }])
resetUtilMocks(['exists'])
})
test('Commands.eject calls ejectTemplate helper (selected with prompt)', async () => {
utils.exists.mockReturnValue(true)
Blueprint.autodiscover.mockReturnValue({
plugins: [{ dst: 'test-plugin.js' }]
})
inquirer.prompt.mockReturnValue({ templates: [['plugins', 0]] })
const { default: Commands } = jest.requireActual('src/cli/commands')
const args = ['something-plugin.js']
const nuxt = {}
const options = {
dir: '/var/nuxt'
}
await Commands.eject(args, nuxt, options)
expect(ejectHelpers.ejectTemplates).toHaveBeenCalledTimes(1)
expect(ejectHelpers.ejectTemplates).toHaveBeenCalledWith(expect.any(Object), expect.any(Object), [{ dst: 'test-plugin.js' }])
resetUtilMocks(['exists'])
})
})
|
package com.hapramp.models;
import com.hapramp.steem.models.Feed;
import com.hapramp.steem.models.Voter;
import java.util.ArrayList;
public class RankableCompetitionFeedItem {
public static final double DECLINED_PAYOUT = -1;
private int rank;
private String itemId;
private String username;
private String createdAt;
private String permlink;
private ArrayList<String> tags;
private String featuredImageLink;
private String title;
private String prize;
private String description;
private ArrayList<Voter> voters;
private int childrens;
private double payout;
public RankableCompetitionFeedItem(Feed feed) {
this.itemId = feed.getPermlink();
this.username = feed.getAuthor();
this.createdAt = feed.getCreatedAt();
this.permlink = feed.getPermlink();
this.tags = feed.getTags();
this.featuredImageLink = feed.getFeaturedImageUrl();
this.title = feed.getTitle();
this.prize = feed.getPrize();
this.description = feed.getCleanedBody();
this.voters = feed.getVoters();
this.childrens = feed.getChildren();
this.payout = extractPayout(feed);
this.rank = feed.getRank();
}
private double extractPayout(Feed feed) {
double pendingPayoutValue = Double.parseDouble(feed.getPendingPayoutValue().split(" ")[0]);
double totalPayoutValue = Double.parseDouble(feed.getTotalPayoutValue().split(" ")[0]);
double curatorPayoutValue = Double.parseDouble(feed.getCuratorPayoutValue().split(" ")[0]);
if (pendingPayoutValue > 0) {
return pendingPayoutValue;
} else if ((totalPayoutValue + curatorPayoutValue) > 0) {
return totalPayoutValue + curatorPayoutValue;
}
return 0;
}
public RankableCompetitionFeedItem() {
}
public int getRank() {
return rank;
}
public void setRank(int rank) {
this.rank = rank;
}
public String getPermlink() {
return permlink;
}
public void setPermlink(String permlink) {
this.permlink = permlink;
}
public String getPrize() {
return prize;
}
public void setPrize(String prize) {
this.prize = prize;
}
public String getItemId() {
return itemId;
}
public void setItemId(String itemId) {
this.itemId = itemId;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getCreatedAt() {
return createdAt;
}
public void setCreatedAt(String createdAt) {
this.createdAt = createdAt;
}
public ArrayList<String> getTags() {
return tags;
}
public void setTags(ArrayList<String> tags) {
this.tags = tags;
}
public String getFeaturedImageLink() {
return featuredImageLink;
}
public void setFeaturedImageLink(String featuredImageLink) {
this.featuredImageLink = featuredImageLink;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public ArrayList<Voter> getVoters() {
return voters;
}
public void setVoters(ArrayList<Voter> voters) {
this.voters = voters;
}
public int getChildrens() {
return childrens;
}
public void setChildrens(int childrens) {
this.childrens = childrens;
}
public double getPayout() {
return payout;
}
public void setPayout(double payout) {
this.payout = payout;
}
@Override
public String toString() {
return "RankableCompetitionFeedItem{" +
"rank=" + rank +
", itemId='" + itemId + '\'' +
", username='" + username + '\'' +
", createdAt='" + createdAt + '\'' +
", permlink='" + permlink + '\'' +
", tags=" + tags +
", featuredImageLink='" + featuredImageLink + '\'' +
", title='" + title + '\'' +
", description='" + description + '\'' +
", voters=" + voters +
", childrens=" + childrens +
", payout='" + payout + '\'' +
'}';
}
}
|
package com.nike.cerberus.record;
import com.openpojo.reflection.PojoClass;
import com.openpojo.reflection.impl.PojoClassFactory;
import com.openpojo.validation.Validator;
import com.openpojo.validation.ValidatorBuilder;
import com.openpojo.validation.rule.impl.GetterMustExistRule;
import com.openpojo.validation.rule.impl.SetterMustExistRule;
import com.openpojo.validation.test.impl.GetterTester;
import com.openpojo.validation.test.impl.SetterTester;
import org.junit.Assert;
import org.junit.Test;
import java.util.List;
public class RecordPojoTest {
@Test
public void test_pojo_structure_and_behavior() {
List<PojoClass> pojoClasses = PojoClassFactory.getPojoClasses("com.nike.cerberus.record");
Assert.assertEquals(14, pojoClasses.size());
Validator validator = ValidatorBuilder.create()
.with(new GetterMustExistRule())
.with(new SetterMustExistRule())
.with(new SetterTester())
.with(new GetterTester())
.build();
validator.validate(pojoClasses);
}
}
|
package org.jooby.internal.ehcache;
import static com.typesafe.config.ConfigValueFactory.fromAnyRef;
import static org.junit.Assert.assertEquals;
import java.util.List;
import net.sf.ehcache.config.CacheConfiguration;
import net.sf.ehcache.config.CacheConfiguration.BootstrapCacheLoaderFactoryConfiguration;
import net.sf.ehcache.config.CacheConfiguration.CacheDecoratorFactoryConfiguration;
import net.sf.ehcache.config.CacheConfiguration.CacheEventListenerFactoryConfiguration;
import net.sf.ehcache.config.CacheConfiguration.CacheExceptionHandlerFactoryConfiguration;
import net.sf.ehcache.config.CacheConfiguration.CacheExtensionFactoryConfiguration;
import net.sf.ehcache.config.CacheConfiguration.CacheLoaderFactoryConfiguration;
import net.sf.ehcache.config.CacheConfiguration.TransactionalMode;
import net.sf.ehcache.config.CacheWriterConfiguration;
import net.sf.ehcache.config.CacheWriterConfiguration.WriteMode;
import net.sf.ehcache.config.NonstopConfiguration;
import net.sf.ehcache.config.PersistenceConfiguration;
import net.sf.ehcache.config.PersistenceConfiguration.Strategy;
import net.sf.ehcache.config.PinningConfiguration;
import net.sf.ehcache.config.PinningConfiguration.Store;
import net.sf.ehcache.config.SizeOfPolicyConfiguration;
import net.sf.ehcache.config.SizeOfPolicyConfiguration.MaxDepthExceededBehavior;
import net.sf.ehcache.config.TerracottaConfiguration;
import net.sf.ehcache.config.TerracottaConfiguration.Consistency;
import net.sf.ehcache.config.TimeoutBehaviorConfiguration;
import net.sf.ehcache.constructs.blocking.BlockingCache;
import net.sf.ehcache.constructs.readthrough.ReadThroughCache;
import net.sf.ehcache.constructs.scheduledrefresh.ScheduledRefreshCacheExtension;
import net.sf.ehcache.event.NotificationScope;
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
import org.jooby.internal.ehcache.CacheConfigurationBuilder;
import org.junit.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
public class CacheConfigurationBuilderTest {
@Test
public void cacheLoaderTimeout() {
Config config = ConfigFactory
.empty()
.withValue("cacheLoaderTimeout", fromAnyRef("3s"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(3000, cache.getCacheLoaderTimeoutMillis());
}
@Test
public void cacheLoaderTimeoutMillis() {
Config config = ConfigFactory
.empty()
.withValue("cacheLoaderTimeoutMillis", fromAnyRef(500));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(500, cache.getCacheLoaderTimeoutMillis());
}
@Test
public void clearOnFlush() {
Config config = ConfigFactory
.empty()
.withValue("clearOnFlush", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(true, cache.isClearOnFlush());
}
@Test
public void copyOnRead() {
Config config = ConfigFactory
.empty()
.withValue("copyOnRead", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(true, cache.isCopyOnRead());
}
@Test
public void diskAccessStripes() {
Config config = ConfigFactory
.empty()
.withValue("diskAccessStripes", fromAnyRef(10));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(10, cache.getDiskAccessStripes());
}
@Test
public void diskExpiryThreadInterval() {
Config config = ConfigFactory
.empty()
.withValue("diskExpiryThreadInterval", fromAnyRef("1m"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(60, cache.getDiskExpiryThreadIntervalSeconds());
}
@Test
public void diskExpiryThreadIntervalSeconds() {
Config config = ConfigFactory
.empty()
.withValue("diskExpiryThreadIntervalSeconds", fromAnyRef(60));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(60, cache.getDiskExpiryThreadIntervalSeconds());
}
@Test
public void diskSpoolBufferSizeMB() {
Config config = ConfigFactory
.empty()
.withValue("diskSpoolBufferSizeMB", fromAnyRef(5));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(5, cache.getDiskSpoolBufferSizeMB());
}
@Test
public void eternal() {
Config config = ConfigFactory
.empty()
.withValue("eternal", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(true, cache.isEternal());
}
@Test
public void logging() {
Config config = ConfigFactory
.empty()
.withValue("logging", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(true, cache.getLogging());
}
@Test
public void copyOnWrite() {
Config config = ConfigFactory
.empty()
.withValue("copyOnWrite", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(true, cache.isCopyOnWrite());
}
@Test
public void maxBytesLocalDisk() {
Config config = ConfigFactory
.empty()
.withValue("maxBytesLocalDisk", fromAnyRef("1k"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1024, cache.getMaxBytesLocalDisk());
}
@Test
public void maxBytesLocalHeap() {
Config config = ConfigFactory
.empty()
.withValue("maxBytesLocalHeap", fromAnyRef("1k"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1024, cache.getMaxBytesLocalHeap());
}
@Test
public void maxBytesLocalOffHeap() {
Config config = ConfigFactory
.empty()
.withValue("maxBytesLocalOffHeap", fromAnyRef("1k"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1024, cache.getMaxBytesLocalOffHeap());
}
@Test
public void maxElementsInMemory() {
Config config = ConfigFactory
.empty()
.withValue("maxElementsInMemory", fromAnyRef(99));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(99, cache.getMaxEntriesLocalHeap());
}
@Test
public void maxElementsOnDisk() {
Config config = ConfigFactory
.empty()
.withValue("maxElementsOnDisk", fromAnyRef(99));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(99, cache.getMaxEntriesLocalDisk());
}
@Test
public void maxEntriesInCache() {
Config config = ConfigFactory
.empty()
.withValue("maxEntriesInCache", fromAnyRef(99));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(99, cache.getMaxEntriesInCache());
}
@Test
public void maxEntriesLocalDisk() {
Config config = ConfigFactory
.empty()
.withValue("maxEntriesLocalDisk", fromAnyRef(99));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(99, cache.getMaxEntriesLocalDisk());
}
@Test
public void maxEntriesLocalHeap() {
Config config = ConfigFactory
.empty()
.withValue("maxEntriesLocalHeap", fromAnyRef(99));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(99, cache.getMaxEntriesLocalHeap());
}
@Test
public void maxMemoryOffHeap() {
Config config = ConfigFactory
.empty()
.withValue("maxMemoryOffHeap", fromAnyRef(99));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(99, cache.getMaxBytesLocalOffHeap());
}
@Test
public void memoryStoreEvictionPolicy() {
Config config = ConfigFactory
.empty()
.withValue("memoryStoreEvictionPolicy", fromAnyRef("LRU"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(MemoryStoreEvictionPolicy.LRU, cache.getMemoryStoreEvictionPolicy());
}
@Test
public void overflowToOffHeap() {
Config config = ConfigFactory
.empty()
.withValue("overflowToOffHeap", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(true, cache.isOverflowToOffHeap());
}
@Test
public void timeToIdle() {
Config config = ConfigFactory
.empty()
.withValue("timeToIdle", fromAnyRef("1s"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1, cache.getTimeToIdleSeconds());
}
@Test
public void timeToIdleSeconds() {
Config config = ConfigFactory
.empty()
.withValue("timeToIdleSeconds", fromAnyRef(1));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1, cache.getTimeToIdleSeconds());
}
@Test
public void timeToLive() {
Config config = ConfigFactory
.empty()
.withValue("timeToLive", fromAnyRef("1s"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1, cache.getTimeToLiveSeconds());
}
@Test
public void timeToLiveSeconds() {
Config config = ConfigFactory
.empty()
.withValue("timeToLiveSeconds", fromAnyRef(1));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(1, cache.getTimeToLiveSeconds());
}
@Test
public void transactionalMode() {
Config config = ConfigFactory
.empty()
.withValue("transactionalMode", fromAnyRef("local"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
assertEquals(TransactionalMode.LOCAL, cache.getTransactionalMode());
}
@Test
public void persistence() {
Config config = ConfigFactory
.empty()
.withValue("persistence.strategy", fromAnyRef("LOCALRESTARTABLE"))
.withValue("persistence.synchronousWrites", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
PersistenceConfiguration persistence = cache.getPersistenceConfiguration();
assertEquals(Strategy.LOCALRESTARTABLE, persistence.getStrategy());
assertEquals(true, persistence.getSynchronousWrites());
}
@Test
public void bootstrapCacheLoaderFactory() {
Config config = ConfigFactory
.empty()
.withValue("bootstrapCacheLoaderFactory.class",
fromAnyRef("net.sf.ehcache.distribution.RMIBootstrapCacheLoaderFactory"))
.withValue("bootstrapCacheLoaderFactory.bootstrapAsynchronously",
fromAnyRef(true))
.withValue("bootstrapCacheLoaderFactory.maximumChunkSizeBytes",
fromAnyRef(5000000));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
BootstrapCacheLoaderFactoryConfiguration bootstrapCacheLoader = cache
.getBootstrapCacheLoaderFactoryConfiguration();
assertEquals("net.sf.ehcache.distribution.RMIBootstrapCacheLoaderFactory",
bootstrapCacheLoader.getFullyQualifiedClassPath());
assertEquals("bootstrapAsynchronously=true;maximumChunkSizeBytes=5000000",
bootstrapCacheLoader.getProperties());
}
@Test
public void cacheDecoratorFactory() {
Config config = ConfigFactory
.empty()
.withValue("cacheDecoratorFactory.class",
fromAnyRef(BlockingCache.class.getName()));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheDecoratorFactoryConfiguration> decorators = cache.getCacheDecoratorConfigurations();
assertEquals(1, decorators.size());
assertEquals(BlockingCache.class.getName(), decorators.iterator().next()
.getFullyQualifiedClassPath());
}
@Test
public void cacheDecoratorFactories() {
Config config = ConfigFactory
.empty()
.withValue("cacheDecoratorFactory.blocking.class",
fromAnyRef(BlockingCache.class.getName()))
.withValue("cacheDecoratorFactory.blocking.p1",
fromAnyRef(BlockingCache.class.getName()))
.withValue("cacheDecoratorFactory.readT.class",
fromAnyRef(ReadThroughCache.class.getName()));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheDecoratorFactoryConfiguration> decorators = cache.getCacheDecoratorConfigurations();
assertEquals(2, decorators.size());
assertEquals(BlockingCache.class.getName(), decorators.get(0).getFullyQualifiedClassPath());
assertEquals(ReadThroughCache.class.getName(), decorators.get(1).getFullyQualifiedClassPath());
}
@SuppressWarnings("unchecked")
@Test
public void cacheEventListenerFactory() {
Config config = ConfigFactory
.empty()
.withValue("cacheEventListenerFactory.class",
fromAnyRef("my.Listener"))
.withValue("cacheEventListenerFactory.listenFor",
fromAnyRef("local"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheEventListenerFactoryConfiguration> listeners = cache
.getCacheEventListenerConfigurations();
assertEquals(1, listeners.size());
assertEquals("my.Listener", listeners.get(0).getFullyQualifiedClassPath());
assertEquals(NotificationScope.LOCAL, listeners.get(0).getListenFor());
}
@SuppressWarnings("unchecked")
@Test
public void cacheEventListenerFactories() {
Config config = ConfigFactory
.empty()
.withValue("cacheEventListenerFactory.l1.class",
fromAnyRef("my.Listener"))
.withValue("cacheEventListenerFactory.l1.listenFor",
fromAnyRef("local"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheEventListenerFactoryConfiguration> listeners = cache
.getCacheEventListenerConfigurations();
assertEquals(1, listeners.size());
assertEquals("my.Listener", listeners.get(0).getFullyQualifiedClassPath());
assertEquals(NotificationScope.LOCAL, listeners.get(0).getListenFor());
}
@Test
public void cacheExceptionHandlerFactory() {
Config config = ConfigFactory
.empty()
.withValue("cacheExceptionHandlerFactory.class",
fromAnyRef("my.ExceptionHandler"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
CacheExceptionHandlerFactoryConfiguration ex = cache
.getCacheExceptionHandlerFactoryConfiguration();
assertEquals("my.ExceptionHandler", ex.getFullyQualifiedClassPath());
}
@SuppressWarnings("unchecked")
@Test
public void cacheExtensionFactory() {
Config config = ConfigFactory
.empty()
.withValue("cacheExtensionFactory.class",
fromAnyRef(ScheduledRefreshCacheExtension.class.getName()));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheExtensionFactoryConfiguration> extensions = cache.getCacheExtensionConfigurations();
assertEquals(ScheduledRefreshCacheExtension.class.getName(), extensions.get(0)
.getFullyQualifiedClassPath());
}
@SuppressWarnings("unchecked")
@Test
public void cacheExtensionFactories() {
Config config = ConfigFactory
.empty()
.withValue("cacheExtensionFactory.e1.class",
fromAnyRef("Ext1"))
.withValue("cacheExtensionFactory.e2.class",
fromAnyRef("Ext2"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheExtensionFactoryConfiguration> extensions = cache.getCacheExtensionConfigurations();
assertEquals("Ext1", extensions.get(0).getFullyQualifiedClassPath());
assertEquals("Ext2", extensions.get(1).getFullyQualifiedClassPath());
}
@SuppressWarnings("unchecked")
@Test
public void cacheLoaderFactory() {
Config config = ConfigFactory
.empty()
.withValue("cacheLoaderFactory.class",
fromAnyRef("CacheLoaderFactory1"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheLoaderFactoryConfiguration> extensions = cache.getCacheLoaderConfigurations();
assertEquals("CacheLoaderFactory1", extensions.get(0).getFullyQualifiedClassPath());
}
@SuppressWarnings("unchecked")
@Test
public void cacheLoaderFactories() {
Config config = ConfigFactory
.empty()
.withValue("cacheLoaderFactory.f1.class",
fromAnyRef("CacheLoaderFactory1"))
.withValue("cacheLoaderFactory.f2.class",
fromAnyRef("CacheLoaderFactory2"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
List<CacheLoaderFactoryConfiguration> extensions = cache.getCacheLoaderConfigurations();
assertEquals("CacheLoaderFactory1", extensions.get(0).getFullyQualifiedClassPath());
assertEquals("CacheLoaderFactory2", extensions.get(1).getFullyQualifiedClassPath());
}
@Test
public void cacheWriter() {
Config config = ConfigFactory
.empty()
.withValue("cacheWriter.maxWriteDelay", fromAnyRef(100))
.withValue("cacheWriter.minWriteDelay", fromAnyRef(10))
.withValue("cacheWriter.notifyListenersOnException", fromAnyRef(true))
.withValue("cacheWriter.rateLimitPerSecond", fromAnyRef(1))
.withValue("cacheWriter.retryAttemptDelay", fromAnyRef("1m"))
.withValue("cacheWriter.retryAttempts", fromAnyRef(5))
.withValue("cacheWriter.writeBatching", fromAnyRef(true))
.withValue("cacheWriter.writeBatchSize", fromAnyRef(10))
.withValue("cacheWriter.writeBehindConcurrency", fromAnyRef(3))
.withValue("cacheWriter.writeBehindMaxQueueSize", fromAnyRef(31))
.withValue("cacheWriter.writeCoalescing", fromAnyRef(true))
.withValue("cacheWriter.writeMode", fromAnyRef("WRITE_BEHIND"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
CacheWriterConfiguration writer = cache.getCacheWriterConfiguration();
assertEquals(100, writer.getMaxWriteDelay());
assertEquals(10, writer.getMinWriteDelay());
assertEquals(true, writer.getNotifyListenersOnException());
assertEquals(1, writer.getRateLimitPerSecond());
assertEquals(60, writer.getRetryAttemptDelaySeconds());
assertEquals(5, writer.getRetryAttempts());
assertEquals(true, writer.getWriteBatching());
assertEquals(10, writer.getWriteBatchSize());
assertEquals(3, writer.getWriteBehindConcurrency());
assertEquals(WriteMode.WRITE_BEHIND, writer.getWriteMode());
}
@Test
public void pinning() {
Config config = ConfigFactory
.empty()
.withValue("pinning.store", fromAnyRef("localMemory"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
PinningConfiguration pinning = cache.getPinningConfiguration();
assertEquals(Store.LOCALMEMORY, pinning.getStore());
}
@Test
public void sizeOfPolicy() {
Config config = ConfigFactory
.empty()
.withValue("sizeOfPolicy.maxDepth", fromAnyRef(100))
.withValue("sizeOfPolicy.maxDepthExceededBehavior", fromAnyRef("abort"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
SizeOfPolicyConfiguration size = cache.getSizeOfPolicyConfiguration();
assertEquals(100, size.getMaxDepth());
assertEquals(MaxDepthExceededBehavior.ABORT, size.getMaxDepthExceededBehavior());
}
@SuppressWarnings("deprecation")
@Test
public void terracota() {
Config config = ConfigFactory
.empty()
.withValue("terracotta.nonstop.timeout.type", fromAnyRef("noop"))
.withValue("terracotta.nonstop.timeout.p1", fromAnyRef("v1"))
.withValue("terracotta.nonstop.searchTimeoutMillis", fromAnyRef("1s"))
.withValue("terracotta.nonstop.bulkOpsTimeoutMultiplyFactor", fromAnyRef(99))
.withValue("terracotta.nonstop.enabled", fromAnyRef(true))
.withValue("terracotta.nonstop.immediateTimeout", fromAnyRef(true))
.withValue("terracotta.nonstop.timeoutMillis", fromAnyRef("5s"))
.withValue("terracotta.cacheXA", fromAnyRef(false))
.withValue("terracotta.clustered", fromAnyRef(false))
.withValue("terracotta.coherent", fromAnyRef(true))
.withValue("terracotta.compressionEnabled", fromAnyRef(true))
.withValue("terracotta.concurrency", fromAnyRef(87))
.withValue("terracotta.consistency", fromAnyRef("STRONG"))
.withValue("terracotta.localCacheEnabled", fromAnyRef(true))
.withValue("terracotta.localKeyCache", fromAnyRef(true))
.withValue("terracotta.localKeyCacheSize", fromAnyRef(99))
.withValue("terracotta.orphanEviction", fromAnyRef(true))
.withValue("terracotta.orphanEvictionPeriod", fromAnyRef(98))
.withValue("terracotta.synchronousWrites", fromAnyRef(true));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
TerracottaConfiguration terracota = cache.getTerracottaConfiguration();
NonstopConfiguration nonstop = terracota.getNonstopConfiguration();
TimeoutBehaviorConfiguration timeout = nonstop.getTimeoutBehavior();
assertEquals("noop", timeout.getType());
assertEquals("p1=v1", timeout.getProperties());
assertEquals(5000, nonstop.getSearchTimeoutMillis());
assertEquals(99, nonstop.getBulkOpsTimeoutMultiplyFactor());
assertEquals(true, nonstop.isEnabled());
assertEquals(true, nonstop.isImmediateTimeout());
assertEquals(30000, nonstop.getTimeoutMillis());
assertEquals(false, terracota.isCacheXA());
assertEquals(false, terracota.isClustered());
assertEquals(true, terracota.isCoherent());
assertEquals(true, terracota.isCompressionEnabled());
assertEquals(87, terracota.getConcurrency());
assertEquals(Consistency.STRONG, terracota.getConsistency());
assertEquals(true, terracota.isLocalCacheEnabled());
assertEquals(true, terracota.getLocalKeyCache());
assertEquals(99, terracota.getLocalKeyCacheSize());
assertEquals(true, terracota.getOrphanEviction());
assertEquals(98, terracota.getOrphanEvictionPeriod());
assertEquals(true, terracota.isSynchronousWrites());
}
@Test
public void terracotaNoCoherent() {
Config config = ConfigFactory
.empty()
.withValue("terracotta.consistency", fromAnyRef("EVENTUAL"));
CacheConfigurationBuilder builder = new CacheConfigurationBuilder("c1");
CacheConfiguration cache = builder.build(config);
TerracottaConfiguration terracota = cache.getTerracottaConfiguration();
assertEquals(Consistency.EVENTUAL, terracota.getConsistency());
}
}
|
#!/bin/sh
init_db(){
${CKAN_HOME}/bin/paster --plugin=ckan db init -c /etc/ckan/default/production.ini
}
init_datastore(){
# Exportamos
export PGUSER=$DB_ENV_POSTGRES_USER;
export PGPASSWORD=$DB_ENV_POSTGRES_PASS;
export PGHOST=$DB_PORT_5432_TCP_ADDR;
export PGDATABASE=$DB_ENV_POSTGRES_DB;
export PGPORT=$DB_PORT_5432_TCP_PORT;
# Creamos Role y Database para datastore
psql -c "CREATE ROLE datastore_default WITH PASSWORD 'pass';"
psql -c "CREATE DATABASE datastore_default OWNER $DB_ENV_POSTGRES_USER;"
# Set permisos
$CKAN_HOME/bin/paster --plugin=ckan datastore set-permissions -c /etc/ckan/default/production.ini | psql --set ON_ERROR_STOP=1
}
printf "Inicializando bases de datos... "
# Configuro e inicializo el plugin "DATASTORE"
init_datastore
rids=$?
# Inicializo de la base de datos por omision de CKAN
init_db
ridb=$?
# Sumo los codigos de error para simplificar la evaluacion de los mismos.
exit_code=$(($ridb + $rids))
if [[ exit_code -eq 0 ]];
then
printf "[OK]\nBases de datos funcionales y listas!\n"
exit 0
else
printf "[FALLO]\nImposible inicializar las bases de datos!\n"
exit 1
fi |
#!/bin/bash
# file snapshot (local directory)
TSTAMP=$(date +'%Y%m%d%H%M')
IGNOREPATHS=.git
IGNOREDIRS=/sys:/run:/dev
find ./ -printf "%Cy.%Cm.%Cd %CH:%CM %M %p %b %l \n" > .fs_snapshot_$TSTAMP
|
import React, { useEffect, useState } from 'react';
import ScrollableGrid from 'src/components-shared/ScrollableGrid';
import { useQueryDataContext } from 'src/hooks/useQueryData';
import PaddedTypography from 'src/components-shared/PaddedTypography';
import LogsList from 'src/components/LogsList';
import SchedulesList from 'src/components/SchedulesList';
import SearchInput from 'src/components/SearchInput';
import { AppContainer } from 'src/styles';
import { AppBar, CircularProgress, Grid, Toolbar, Typography } from '@mui/material';
import isEmpty from 'src/utils/isEmpty';
const App = () => {
const { fetchAll, selectSchedule, toggleScheduleRetire, schedules, selectedLogs, selectedSchedule, loading, error } =
useQueryDataContext();
const [searchTerm, setSearchTerm] = useState('');
useEffect(() => {
fetchAll();
}, []);
if (!isEmpty(error)) {
return <Typography>I'm a friendly error :)</Typography>;
}
return (
<AppContainer>
<AppBar color={'default'} position="static">
<Toolbar>
<SearchInput label={'Schedules Search'} onChange={setSearchTerm} />
{selectedSchedule && <PaddedTypography color="black">{selectedSchedule.name}</PaddedTypography>}
</Toolbar>
</AppBar>
{loading ? (
<CircularProgress />
) : (
<Grid container>
<ScrollableGrid item xs={8} sm={4}>
<SchedulesList
selectSchedule={selectSchedule}
toggleScheduleRetire={toggleScheduleRetire}
schedules={schedules}
searchTerm={searchTerm}
/>
</ScrollableGrid>
<ScrollableGrid item xs>
<LogsList logs={selectedLogs} selectedSchedule={selectedSchedule} />
</ScrollableGrid>
</Grid>
)}
</AppContainer>
);
};
export default App;
|
<reponame>lyueyang/tp<filename>src/main/java/seedu/address/model/person/passenger/exceptions/PassengerNotFoundException.java
package seedu.address.model.person.passenger.exceptions;
/**
* Signals that the operation is unable to find the specified passenger.
*/
public class PassengerNotFoundException extends RuntimeException {}
|
#!/bin/sh
# this script change the desktop of Gnome or XDM to show web page instead of color or image
terminal_wm_class="pythonwebdesktop.py"
terminal_exec="python /home/vagrant/src/pythonwebdesktop/pythonwebdesktop.py http://localhost/"
# no terminal started, so start one
if [ -z "`wmctrl -lx | grep pythonwebdesktop.py`" ]; then
$terminal_exec &
else
wmctrl -x -a $terminal_wm_class
fi;
|
#!/bin/bash
SOURCE_PATH=schema
ROOT_PATH=docs/$1
DOCS_PATH=$ROOT_PATH/docs
SCHEMA_PATH=$ROOT_PATH/schema
rm -rfv $ROOT_PATH
# install_dependencies.sh
echo Building schema documentation...
jsonschema2md -d $SOURCE_PATH -o $DOCS_PATH -x $SCHEMA_PATH
# ******************************************************************************
# * Workaround for issue https://github.com/benbalter/jekyll-relative-links/issues/63
# ******************************************************************************
replace-in-file $DOCS_PATH/**/*.md --configFile=markdownUrlReplaceOptions.js --verbose
# ******************************************************************************
cd $SOURCE_PATH
git add .
git commit -s -m "build: automatically version incremented to $1" -m "Automatically version update."
git tag $1 HEAD
cd ..
git add $SOURCE_PATH
git add $ROOT_PATH
git commit -s -m "build: automatically generated documentation $1" -m "Automatically generated documentation."
|
<?php
$passwordCount = intval($_GET['passwordCount']);
$passwordLength = intval($_GET['passwordLength']);
$chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!?=@#$^&*(){}[]";
for ($i = 0; $i < $passwordCount; $i++) {
$password = '';
for ($x = 0; $x < $passwordLength; $x++) {
$password .= $chars[rand(0, strlen($chars)-1)];
}
echo $password . "\n";
}
?> |
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::Layout::EmptyLineAfterMultilineCondition do
subject(:cop) { described_class.new }
it 'registers an offense when no new line after `if` with multiline condition' do
expect_offense(<<~RUBY)
if multiline &&
^^^^^^^^^^^^ Use empty line after multiline condition.
condition
do_something
end
RUBY
end
it 'does not register an offense when new line after `if` with multiline condition' do
expect_no_offenses(<<~RUBY)
if multiline &&
condition
do_something
end
RUBY
end
it 'does not register an offense for `if` with single line condition' do
expect_no_offenses(<<~RUBY)
if singleline
do_something
end
RUBY
end
it 'registers an offense when no new line after modifier `if` with multiline condition' do
expect_offense(<<~RUBY)
do_something if multiline &&
^^^^^^^^^^^^ Use empty line after multiline condition.
condition
do_something_else
RUBY
end
it 'does not register an offense when new line after modifier `if` with multiline condition' do
expect_no_offenses(<<~RUBY)
do_something if multiline &&
condition
do_something_else
RUBY
end
it 'does not register an offense when modifier `if` with multiline condition'\
'is the last child of its parent' do
expect_no_offenses(<<~RUBY)
def m
do_something if multiline &&
condition
end
RUBY
end
it 'registers an offense when no new line after `elsif` with multiline condition' do
expect_offense(<<~RUBY)
if condition
do_something
elsif multiline &&
^^^^^^^^^^^^ Use empty line after multiline condition.
condition
do_something_else
end
RUBY
end
it 'does not register an offense when new line after `elsif` with multiline condition' do
expect_no_offenses(<<~RUBY)
if condition
do_something
elsif multiline &&
condition
do_something_else
end
RUBY
end
it 'registers an offense when no new line after `while` with multiline condition' do
expect_offense(<<~RUBY)
while multiline &&
^^^^^^^^^^^^ Use empty line after multiline condition.
condition
do_something
end
RUBY
end
it 'registers an offense when no new line after `until` with multiline condition' do
expect_offense(<<~RUBY)
until multiline &&
^^^^^^^^^^^^ Use empty line after multiline condition.
condition
do_something
end
RUBY
end
it 'does not register an offense when new line after `while` with multiline condition' do
expect_no_offenses(<<~RUBY)
while multiline &&
condition
do_something
end
RUBY
end
it 'does not register an offense for `while` with single line condition' do
expect_no_offenses(<<~RUBY)
while singleline
do_something
end
RUBY
end
it 'registers an offense when no new line after modifier `while` with multiline condition' do
expect_offense(<<~RUBY)
begin
do_something
end while multiline &&
^^^^^^^^^^^^ Use empty line after multiline condition.
condition
do_something_else
RUBY
end
it 'does not register an offense when new line after modifier `while` with multiline condition' do
expect_no_offenses(<<~RUBY)
begin
do_something
end while multiline &&
condition
do_something_else
RUBY
end
it 'does not register an offense when modifier `while` with multiline condition'\
'is the last child of its parent' do
expect_no_offenses(<<~RUBY)
def m
begin
do_something
end while multiline &&
condition
end
RUBY
end
it 'registers an offense when no new line after `when` with multiline condition' do
expect_offense(<<~RUBY)
case x
when foo,
^^^^^^^^^ Use empty line after multiline condition.
bar
do_something
end
RUBY
end
it 'does not register an offense when new line after `when` with multiline condition' do
expect_no_offenses(<<~RUBY)
case x
when foo,
bar
do_something
end
RUBY
end
it 'does not register an offense for `when` with singleline condition' do
expect_no_offenses(<<~RUBY)
case x
when foo, bar
do_something
end
RUBY
end
it 'registers an offense when no new line after `rescue` with multiline exceptions' do
expect_offense(<<~RUBY)
begin
do_something
rescue FooError,
^^^^^^^^^^^^^^^^ Use empty line after multiline condition.
BarError
handle_error
end
RUBY
end
it 'does not register an offense when new line after `rescue` with multiline exceptions' do
expect_no_offenses(<<~RUBY)
begin
do_something
rescue FooError,
BarError
handle_error
end
RUBY
end
it 'does not register an offense for `rescue` with singleline exceptions' do
expect_no_offenses(<<~RUBY)
begin
do_something
rescue FooError
handle_error
end
RUBY
end
end
|
<gh_stars>0
// The module 'vscode' contains the VS Code extensibility API
// Import the module and reference it with the alias vscode in your code below
import * as vscode from 'vscode';
import * as jszip from 'jszip';
/**
images/0.75x/my_icon.png (ldpi inside 0.75x folder)
images/my_icon.png (mdpi directly inside images)
images/1.5x/my_icon.png (hdpi inside 1.5x folder)
images/2.0x/my_icon.png (xhdpi inside 2.0x folder)
images/3.0x/my_icon.png (xxhdpi inside 3.0x folder)
images/4.0x/my_icon.png (xxxhdpi inside 4.0x folder)
*/
let _dirMap: Map<string, string> = new Map([
["ldpi", "0.75x"],
["mdpi", ""],
["hdpi", "1.5x"],
["xhdpi", "2.0x"],
["xxhdpi", "3.0x"],
["xxxhdpi", "4.0x"],
["xxxxhdpi", "5.0x"],
]);
function fname(file: string): string | undefined {
if (!file) {
return
}
let fragments = file.split("/");
return fragments[fragments.length - 1].split(".")[0];
}
function dname(path: string): string | undefined {
if (!path) {
return
}
if (path[0] == '/') {
path = path.substring(1);
}
let rawDir = path.split("/")[0].replace('mipmap-', '').replace('drawable-', '');
return _dirMap.get(rawDir);
}
function unzip(file: vscode.Uri, dest: vscode.Uri, name: string) {
vscode.workspace.fs.readFile(file).then(data => {
jszip.loadAsync(data).then(zip => {
zip.forEach((path, data) => {
if (!data.dir) {
let ext = data.name.substring(data.name.lastIndexOf("."));
let dir = dname(path);
if (dir != undefined) {
let fullUri = vscode.Uri.joinPath(dest, dir, name + ext);
console.log(`${data.name} -> ${fullUri.fsPath}`);
data.async("uint8array").then(dat => {
vscode.workspace.fs.writeFile(fullUri, dat).then(() => { }, (e) => {
vscode.window.showErrorMessage(`import ${fullUri.path} err:${e}`);
});
}).catch(e => {
vscode.window.showErrorMessage(`import ${fullUri.path} err:${e}`);
});
} else {
console.log(`${data.name} ignore(no match target dir)`);
}
}
});
vscode.window.showInformationMessage('import job done.');
}).catch(e => {
vscode.window.showErrorMessage(`import image failed:${e}`);
});
})
}
// this method is called when your extension is activated
// your extension is activated the very first time the command is executed
export function activate(context: vscode.ExtensionContext) {
console.log('Congratulations, your extension "lhscaffold" is now active!');
let importCmd = vscode.commands.registerCommand('lhscaffold.import-images', () => {
if (!vscode.workspace.workspaceFolders) {
return;
}
let imageRoot = vscode.workspace.getConfiguration('', vscode.workspace.workspaceFolders[0]).get<string>("lhscaffold.default-dir") || 'assets/images';
let root = vscode.workspace.workspaceFolders[0].uri;
let dest = vscode.Uri.joinPath(root, imageRoot);
console.log(`dest dir:${dest.fsPath}`);
vscode.workspace.fs.createDirectory(dest).then(() => {
console.log("make dest folder success");
vscode.window.showOpenDialog({ canSelectMany: false, filters: { 'zip': ['zip'] } }).then(uri => {
if (uri?.length) {
let file = uri[0];
console.log(`picked file:${file.fsPath}`);
vscode.window.showInputBox({ prompt: "File Name", value: fname(file.path) }).then((name) => {
if (!name) {
vscode.window.showWarningMessage("import canceled");
return;
}
console.log(`the new name is:${name}`);
vscode.workspace.fs.readDirectory(dest).then(entity => {
entity.forEach((x) => console.log('entry in image root:', x[0], x[1]));
let fns = entity.map(x => x[1] == vscode.FileType.File ? x[0].split(".", 2)[0] : undefined).filter(x => x != undefined);
if (fns.indexOf(name) > -1) {
vscode.window.showQuickPick(["No", "Yes"], { canPickMany: false, placeHolder: 'filename already exists,continue import(same extension will override)?' }).then(x => {
if ('Yes' == x) {
unzip(file, dest, name);
} else {
vscode.window.showWarningMessage("import canceled");
}
})
} else {
unzip(file, dest, name);
}
}, (e) => {
vscode.window.showErrorMessage(`open zip file failed:${e}`);
});
});
}
});
}, (e) => {
vscode.window.showWarningMessage(`make image root dir err:${e}`);
});
});
context.subscriptions.push(importCmd);
}
// this method is called when your extension is deactivated
export function deactivate() { }
|
#include <string.h>
__attribute__((pure))
size_t strlen(const char* s)
{
size_t len;
len = 0;
while (s[len])
len++;
return len;
}
|
<filename>spec/filters/grok/timeout2.rb
require "test_utils"
require "grok-pure"
require "timeout"
describe "grok known timeout failures" do
extend LogStash::RSpec
describe "user reported timeout" do
config <<-'CONFIG'
filter {
grok {
match => [ "message", "%{SYSLOGBASE:ts1} \[\#\|%{TIMESTAMP_ISO8601:ts2}\|%{DATA} for %{PATH:url} = %{POSINT:delay} ms.%{GREEDYDATA}" ]
}
}
CONFIG
start = Time.now
line = 'Nov 13 19:23:34 qa-api1 glassfish: [#|2012-11-13T19:23:25.604+0000|INFO|glassfish3.1.2|com.locusenergy.platform.messages.LocusMessage|_ThreadID=59;_ThreadName=Thread-2;|API TIMER - Cache HIT user: null for /kiosks/194/energyreadings/data?tz=America/New_York&fields=kwh&type=gen&end=2012-11-13T23:59:59&start=2010-12-16T00:00:00-05:00&gran=yearly = 5 ms.|#]'
sample line do
duration = Time.now - start
# insist { duration } < 0.03 #TODO refactor performance tests
end
end
describe "user reported timeout" do
config <<-'CONFIG'
filter {
grok {
pattern => [
"%{DATA:http_host} %{IPORHOST:clientip} %{USER:ident} %{USER:http_auth} \[%{HTTPDATE:http_timestamp}\] \"%{WORD:http_method} %{DATA:http_request} HTTP/%{NUMBER:http_version}\" %{NUMBER:http_response_code} (?:%{NUMBER:bytes}|-) \"(?:%{URI:http_referrer}|-)\" %{QS:http_user_agent} %{QS:http_x_forwarded_for} %{USER:ssl_chiper} %{NUMBER:request_time} (?:%{DATA:gzip_ratio}|-) (?:%{DATA:upstream}|-) (?:%{NUMBER:upstream_time}|-) (?:%{WORD:geoip_country}|-)",
"%{DATA:http_host} %{IPORHOST:clientip} %{USER:ident} %{USER:http_auth} \[%{HTTPDATE:http_timestamp}\] \"%{WORD:http_method} %{DATA:http_request} HTTP/%{NUMBER:http_version}\" %{NUMBER:http_response_code} (?:%{NUMBER:bytes}|-) \"(?:%{URI:http_referrer}|-)\" %{QS:http_user_agent} %{QS:http_x_forwarded_for} %{USER:ssl_chiper} %{NUMBER:request_time} (?:%{DATA:gzip_ratio}|-) (?:%{DATA:upstream}|-) (?:%{NUMBER:upstream_time}|-)"
]
}
}
CONFIG
#TODO fixme
# start = Time.now
# sample 'www.example.com 10.6.10.13 - - [09/Aug/2012:16:19:39 +0200] "GET /index.php HTTP/1.1" 403 211 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.8.1.12) Gecko/20080201 Firefox/2.0.0.12" "-" - 0.019 - 10.6.10.12:81 0.002 US' do
# duration = Time.now - start
# # insist { duration } < 1 #TODO refactor performance tests
# puts( subject["tags"])
# reject { subject["tags"] }.include?("_grokparsefailure")
# insist { subject["geoip_country"] } == ["US"]
# end
# sample 'www.example.com 10.6.10.13 - - [09/Aug/2012:16:19:39 +0200] "GET /index.php HTTP/1.1" 403 211 "-" "Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.8.1.12) Gecko/20080201 Firefox/2.0.0.12" "-" - 0.019 - 10.6.10.12:81 0.002 -' do
# duration = Time.now - start
# # insist { duration } < 1 #TODO refactor performance tests
# reject { subject["tags"] }.include?("_grokparsefailure")
# insist { subject["geoip_country"].nil? } == true
# end
end
end
__END__
|
#!/bin/sh
test_common_fuidshift() {
# test some bad arguments
fail=0
fuidshift > /dev/null 2>&1 && fail=1
fuidshift -t > /dev/null 2>&1 && fail=1
fuidshift /tmp -t b:0 > /dev/null 2>&1 && fail=1
fuidshift /tmp -t x:0:0:0 > /dev/null 2>&1 && fail=1
[ "${fail}" -ne 1 ]
}
test_nonroot_fuidshift() {
test_common_fuidshift
LXD_FUIDMAP_DIR=$(mktemp -d -p "${TEST_DIR}" XXX)
u=$(id -u)
g=$(id -g)
u1=$((u+1))
g1=$((g+1))
touch "${LXD_FUIDMAP_DIR}/x1"
fuidshift "${LXD_FUIDMAP_DIR}/x1" -t "u:${u}:100000:1" "g:${g}:100000:1" | tee /dev/stderr | grep "to 100000 100000" > /dev/null || fail=1
if [ "${fail}" -eq 1 ]; then
echo "==> Failed to shift own uid to container root"
false
fi
fuidshift "${LXD_FUIDMAP_DIR}/x1" -t "u:${u1}:10000:1" "g:${g1}:100000:1" | tee /dev/stderr | grep "to -1 -1" > /dev/null || fail=1
if [ "${fail}" -eq 1 ]; then
echo "==> Wrongly shifted invalid uid to container root"
false
fi
# unshift it
chown 100000:100000 "${LXD_FUIDMAP_DIR}/x1"
fuidshift "${LXD_FUIDMAP_DIR}/x1" -r -t "u:${u}:100000:1" "g:${g}:100000:1" | tee /dev/stderr | grep "to 0 0" > /dev/null || fail=1
if [ "${fail}" -eq 1 ]; then
echo "==> Failed to unshift container root back to own uid"
false
fi
}
test_root_fuidshift() {
test_nonroot_fuidshift
# Todo - test ranges
}
test_fuidshift() {
if [ "$(id -u)" -ne 0 ]; then
test_nonroot_fuidshift
else
test_root_fuidshift
fi
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.facebookOfficial = void 0;
var facebookOfficial = {
"viewBox": "0 0 1536 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M1451 128q35 0 60 25t25 60v1366q0 35-25 60t-60 25h-391v-595h199l30-232h-229v-148q0-56 23.5-84t91.5-28l122-1v-207q-63-9-178-9-136 0-217.5 80t-81.5 226v171h-200v232h200v595h-735q-35 0-60-25t-25-60v-1366q0-35 25-60t60-25h1366z"
}
}]
};
exports.facebookOfficial = facebookOfficial; |
class MerchantBankAccounts {
public $name;
public $account_id;
public $merchant_id;
public $store_id;
public $status;
public $created_by;
public function createAccount($name, $account_id, $merchant_id, $store_id, $created_by) {
$this->name = $name;
$this->account_id = $account_id;
$this->merchant_id = $merchant_id;
$this->store_id = $store_id;
$this->created_by = $created_by;
}
public function setStatus($status) {
$this->status = $status;
}
public function getDetails() {
return [
'name' => $this->name,
'account_id' => $this->account_id,
'merchant_id' => $this->merchant_id,
'store_id' => $this->store_id,
'status' => $this->status,
'created_by' => $this->created_by
];
}
} |
/*
* Copyright 2013 Twitter inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.tormenta.scheme.avro
import org.scalacheck.Arbitrary
import com.twitter.bijection.Injection
import scala.math.Equiv
import org.scalacheck.Prop._
import org.apache.avro.generic.GenericRecord
import org.apache.avro.specific.SpecificRecordBase
import scala.collection.JavaConverters._
import com.twitter.tormenta.scheme.Scheme
import java.nio.ByteBuffer
/**
* @author <NAME>
* @since 9/14/13
*/
trait BaseAvroProperties {
implicit val genericRecordEq = new Equiv[GenericRecord] {
def equiv(x: GenericRecord, y: GenericRecord): Boolean = x == y
}
implicit val specificRecordEq = new Equiv[SpecificRecordBase] {
def equiv(x: SpecificRecordBase, y: SpecificRecordBase): Boolean = x == y
}
def arbitraryViaFn[A, B](fn: A => B)(implicit arb: Arbitrary[A]): Arbitrary[B] =
Arbitrary {
arb.arbitrary.map {
fn
}
}
def isAvroRecordDecoded[A](implicit arba: Arbitrary[A], scheme: Scheme[A],
inj: Injection[A, Array[Byte]], eqa: Equiv[A]) =
forAll {
(a: A) =>
val b = inj(a)
val deserialize = scheme.deserialize(ByteBuffer.wrap(b))
val c = deserialize.asScala
!c.isEmpty && c.size == 1 && eqa.equiv(c.head.get(0).asInstanceOf[A], a)
}
def isAvroRecordNotDecoded[A](implicit arba: Arbitrary[A], scheme: Scheme[A],
failedRecord: A, inj: Injection[A, Array[Byte]], eqa: Equiv[A]) =
forAll {
(a: A) =>
val b = inj(a)
val c = scheme.deserialize(ByteBuffer.wrap(b)).asScala
!c.isEmpty && c.size == 1 && eqa.equiv(c.head.get(0).asInstanceOf[A], failedRecord)
}
}
|
<gh_stars>10-100
export const SET_CONFIG = 'SET_CONFIG'
export const SET_CONFIG_TREE = 'SET_CONFIG_TREE'
export const setConfig = (config) => {
if (!config.hierarchy || config.hierarchy.length === 0) {
throw new TypeError("config must contain a 'hierarchy' array");
}
if (!config.relationship || !config.relationship.parentType || !config.relationship.sourceRef || !config.relationship.targetRef) {
throw new TypeError("config must contain a 'relationship' object with fields 'parentType', 'sourceRef', 'targetRef'. See docs.")
}
if (!config.entityLabelKey) {
throw new TypeError("config must contain a 'entityLabelKey' field that specifies what property of the entity to use for display")
}
if (!config.data || !config.data.entities || !config.data.relationships) {
throw new TypeError("config must contain a 'data' object with fields 'entities' and 'relationships'. See docs.")
}
return {type: SET_CONFIG, config}
}
export const setConfigTree = (config) => {
if (!config.relationship || !config.relationship.sourceRef || !config.relationship.targetRef) {
throw new TypeError("config must contain a 'relationship' object with fields 'sourceRef' and 'targetRef'. See docs.")
}
if (!config.entityLabelKey) {
throw new TypeError("config must contain a 'entityLabelKey' field that specifies what property of the entity to use for display")
}
if (!config.data || !config.data.entities || !config.data.relationships) {
throw new TypeError("config must contain a 'data' object with fields 'entities' and 'relationships'. See docs.")
}
return {type: SET_CONFIG_TREE, config}
}
|
import { BasicButtonState, BasicRadioButton } from "../src";
import { getTestMaterialSet, testMaterialVisible } from "./TestMaterial";
describe("BasicRadioButton", () => {
const mat = getTestMaterialSet();
const button = new BasicRadioButton(mat);
test("constructor", () => {
expect(button).toBeTruthy();
expect(button.frozen).toBe(false);
expect(button.selection).toBeFalsy();
});
test("select", () => {
resetButton(button);
button.emit("pointerover");
testMaterialVisible(mat, BasicButtonState.NORMAL_OVER);
button.selectButton();
testMaterialVisible(mat, BasicButtonState.SELECT);
//多重セレクトは無視する
button.selectButton();
testMaterialVisible(mat, BasicButtonState.SELECT);
});
test("select and over", () => {
resetButton(button);
button.emit("pointerover");
testMaterialVisible(mat, BasicButtonState.NORMAL_OVER);
//セレクト中はdownできない
button.selectButton();
button.emit("pointerdown");
testMaterialVisible(mat, BasicButtonState.SELECT);
});
test("disable", () => {
resetButton(button);
button.disableButton();
button.selectButton();
testMaterialVisible(mat, BasicButtonState.DISABLE);
});
});
function resetButton(btn: BasicRadioButton): void {
btn.deselectButton();
btn.enableButton();
btn.emit("pointerout");
}
|
#!/usr/bin/env ruby
# file: maygion-ipcam.rb
require 'net/http'
require 'open-uri'
class MayGionIPCam
attr_reader :resp, :cookie
Factor = 500
def initialize(opt)
o = {username: 'user', password: '<PASSWORD>', address: '192.168.1.14', \
port: 80, cookie: ''}.merge(opt)
@addr, @port = o[:address], o[:port]
@raw_url = "http://#{@addr}/cgi.cmd?cmd=moveptz&user=" + o[:username] \
+ "&pwd=" + o[:password]
@cookie = o[:cookie]
if @cookie.empty? then
@resp, data = login(o[:username], o[:password])
@cookie = @resp.response['set-cookie'].scan(/\w+\=[^=]+(?=;)/).join(';')
end
end
def left(i=1)
move_camera "&dir=btnPtzLeft&tick=%s&nPtzTimes=1" % [i * Factor]
end
def right(i=1)
move_camera "&dir=btnPtzRight&tick=%s&nPtzTimes=1" % [i * Factor]
end
def up(i=1)
move_camera "&dir=btnPtzUp&tick=%s&nPtzTimes=1" % [i * Factor]
end
def down(i=1)
move_camera "&dir=btnPtzDown&tick=%s&nPtzTimes=1" % [i * Factor]
end
def login(username, password)
http = Net::HTTP.new(@addr, @port)
path = '/cgi-bin/cgicmd'
# POST request -> logging in
data = "szAccount=#{username}&szPassword=" + password +
"&cmd=Login&urlOnSuccess=/mobile.asp"
headers = {
'Referer' => "http://#{@addr}/login.asp",
'Content-Type' => 'application/x-www-form-urlencoded'
}
http.post(path, data, headers)
end
def move_camera(instruction)
url = @raw_url + instruction
open(url, 'UserAgent' => 'Ruby IPCO (IP Camera Operator)', \
'Cookie' => @cookie){|x| x.read}
end
def snap()
url = "http://#{@addr}/snap.jpg"
open(url, 'UserAgent' => 'Ruby IPCO (IP Camera Operator)', \
'Cookie' => @cookie){|x| x.read}
end
end
|
<filename>src/replayToMovie.js<gh_stars>0
const AWSXRay = require('aws-xray-sdk-core');
const Canvas = require('canvas');
const fs = require('fs');
const os = require('os');
const request = require("request");
const spawn = require('child_process').spawn;
const { debug } = require('./utils');
const FFMPEG_PATH = "binaries/ffmpeg/ffmpeg";
const LOCAL = process.env.AWS_SAM_LOCAL;
const CRF = process.env.QUALITY || 23;
const FRAME_LIMIT = 30 * 10; // 10 seconds @ 30 fps
const SPRITE_LIMIT = 25;
// Allow binaries to run out of the bundle
process.env['PATH'] += ':' + process.env['LAMBDA_TASK_ROOT'];
const SPRITE_S3_BASE = "http://s3.amazonaws.com/cdo-curriculum/images/sprites/spritesheet_tp2/";
const SPRITE_BASE = "./sprites/";
const ANIMATIONS = {};
const WIDTH = 400;
const HEIGHT = 400;
// Some effects don't currently work, and should be skipped
const BROKEN_FOREGROUND_EFFECTS = [
"pizzas",
"smile_face",
"emojis",
"paint_drip"
];
const BROKEN_BACKGROUND_EFFECTS = [
"kaleidoscope",
"quads"
];
// Allow binaries to run out of the bundle
process.env['PATH'] += ':' + process.env['LAMBDA_TASK_ROOT'];
// Mock the browser environment for p5.
// Note: must be done before requiring danceParty
global.window = global;
window.performance = {now: Date.now};
window.document = {
hasFocus: () => {},
getElementsByTagName: () => [],
createElement: type => {
if (type !== 'canvas') {
throw new Error('Cannot create type.');
}
const created = Canvas.createCanvas();
// stub ctx.scale to prevent any attempt at scaling down to 0, since that
// breaks node canvas (even though it works fine in the browser). Instead
// just scale down to something really, really small.
//
// See https://github.com/Automattic/node-canvas/issues/702
const context = created.getContext('2d');
const origScale = context.scale;
context.scale = function(x, y) {
if (x === 0) {
x = 0.001;
}
if (y === 0) {
y = 0.001;
}
return origScale.call(this, x, y);
};
created.style = {};
return created;
},
body: {
appendChild: () => {}
}
};
window.screen = {};
window.addEventListener = () => {};
window.removeEventListener = () => {};
window.Image = Canvas.Image;
window.ImageData = Canvas.ImageData;
const danceParty = require('@code-dot-org/dance-party');
const Effects = danceParty.Effects;
const MOVE_NAMES = danceParty.constants.MOVE_NAMES;
const P5 = require('@code-dot-org/p5');
P5.disableFriendlyErrors = true;
require('@code-dot-org/p5.play/lib/p5.play');
const p5Inst = new P5(function (p5obj) {
p5obj._fixedSpriteAnimationFrameSizes = true;
p5obj.width = WIDTH;
p5obj.height = HEIGHT;
});
// Create an initial rendering canvas; this will be replaced by the first
// request, but we need a renderer applied to the p5 instance for the Effects
// constructor to work, and the renderer needs a canvas. So here we are.
createNewP5RenderingCanvas();
const backgroundEffects = new Effects(p5Inst, 1);
const foregroundEffects = new Effects(p5Inst, 0.8);
/**
* Create an emulated canvas for P5 to use as a rendering context.
*
* We have to create a new canvas on every request, otherwise under periods of
* high traffic the canvas can get into a state where it "freezes" and repeats
* a single frame for the length of an entire video.
*
* See https://github.com/code-dot-org/dance-party/issues/514 for more context
*/
function createNewP5RenderingCanvas() {
const canvas = window.document.createElement('canvas');
p5Inst._renderer = new P5.Renderer2D(canvas, p5Inst, false);
p5Inst._renderer.resize(WIDTH, HEIGHT);
return canvas;
}
function loadNewSpriteSheet(spriteName, moveName) {
debug(`loading ${spriteName}@${moveName}`);
const image = new Promise((resolve, reject) => {
const localFile = SPRITE_BASE + spriteName + "_" + moveName + ".png";
p5Inst.loadImage(localFile, resolve, () => {
debug(`could not find ${spriteName}@${moveName} image locally, loading from S3`);
const s3File = SPRITE_S3_BASE + spriteName + "_" + moveName + ".png";
p5Inst.loadImage(s3File, resolve, reject);
});
});
const jsonData = new Promise((resolve, reject) => {
const localFile = SPRITE_BASE + spriteName + "_" + moveName + ".json";
fs.readFile(localFile, (err, data) => {
if (err) {
debug(`could not find ${spriteName}@${moveName} json locally, loading from S3`);
const s3File = SPRITE_S3_BASE + spriteName + "_" + moveName + ".json";
request({
url: s3File,
json: true
}, (error, response, body) => {
if (!error && response.statusCode === 200) {
resolve(body);
} else {
reject(error);
}
});
} else {
resolve(JSON.parse(data));
}
});
});
return Promise.all([image, jsonData]).then(([image, jsonData]) => {
// from https://github.com/code-dot-org/dance-party/blob/763de665816848b81f93f7e194d9ae0a35f5d1b7/src/p5.dance.js#L175-L178:
// Passing true as the 3rd arg to loadSpriteSheet() indicates that we want
// it to load the image as a Image (instead of a p5.Image), which avoids
// a canvas creation. This makes it possible to run on mobile Safari in
// iOS 12 with canvas memory limits.
// TODO elijah: see if this makes a perf difference on labmda, either way
return p5Inst.loadSpriteSheet(
image,
jsonData.frames,
true
);
});
}
async function loadSprite(spriteName) {
if (ANIMATIONS[spriteName]) {
return;
}
debug(`loading animations for ${spriteName}`);
ANIMATIONS[spriteName] = [];
for (let j = 0; j < MOVE_NAMES.length; j++) {
const moveName = MOVE_NAMES[j].name;
const spriteSheet = await loadNewSpriteSheet(spriteName, moveName);
ANIMATIONS[spriteName].push(p5Inst.loadAnimation(spriteSheet));
}
}
module.exports.runTestExport = async (outputPath, replay, parentSegment = new AWSXRay.Segment('runExportStandalone')) => {
const exportSegment = new AWSXRay.Segment('runExport', parentSegment.trace_id, parentSegment.id);
let [pipe, promise] = module.exports.renderVideo(outputPath, exportSegment);
pipe._handle.setBlocking(true);
await module.exports.renderImages(replay, pipe, exportSegment);
await promise.catch(err => {
exportSegment.addError(err);
// eslint-disable-next-line no-console
console.error(err);
process.exit(1);
});
exportSegment.close();
};
module.exports.renderImages = async (replay, writer, parentSegment) => {
const renderSegment = new AWSXRay.Segment('renderImages', parentSegment.trace_id, parentSegment.id);
const sprites = [];
let lastBackground;
let lastForeground;
const canvas = createNewP5RenderingCanvas();
replay.length = Math.min(replay.length, FRAME_LIMIT);
for (const frame of replay) {
// Load sprites and set state
frame.sprites.length = Math.min(frame.sprites.length, SPRITE_LIMIT);
for (let i = 0; i < frame.sprites.length; i++) {
const entry = frame.sprites[i];
if (!(entry.style && entry.animationLabel)) {
// A sprite was created without a dancer skin or a dance animation;
// this could be because a non-dancer sprite was incorrectly logged as
// a dancer.
continue;
}
if (!sprites[i]) {
sprites[i] = p5Inst.createSprite();
await loadSprite(entry.style);
ANIMATIONS[entry.style].forEach(function (animation, j) {
sprites[i].addAnimation("anim" + j, animation);
});
}
const sprite = sprites[i];
sprite.changeAnimation(entry.animationLabel);
sprite.mirrorX(entry.mirrorX);
sprite.rotation = entry.rotation;
sprite.scale = entry.scale;
// Ignoring tint for now; it causees perf issues
//sprite.tint = entry.tint === undefined ? undefined : "hsb(" + (Math.round(entry.tint) % 360) + ", 100%, 100%)";
sprite.setFrame(entry.animationFrame);
sprite.x = entry.x;
sprite.y = entry.y;
sprite.height = entry.height;
sprite.width = entry.width;
sprite.visible = entry.visible;
}
// Draw frame
p5Inst.background('#fff');
if (frame.palette) {
backgroundEffects.currentPalette = frame.palette;
}
if (!BROKEN_BACKGROUND_EFFECTS.includes(frame.bg)) {
const effect = backgroundEffects[frame.bg] || backgroundEffects.none;
try {
if (lastBackground != frame.bg && effect.init) {
effect.init();
}
lastBackground = frame.bg;
effect.draw(frame.context);
} catch (err) {
renderSegment.addError(err);
}
}
p5Inst.drawSprites();
if (frame.fg && !BROKEN_FOREGROUND_EFFECTS.includes(frame.fg)) {
p5Inst.push();
p5Inst.blendMode(foregroundEffects.blend);
try {
const effect = foregroundEffects[frame.fg] || foregroundEffects.none;
if (lastForeground != frame.fg && effect.init) {
effect.init();
}
lastForeground = frame.fg;
effect.draw(frame.context);
} catch (err) {
renderSegment.addError(err);
}
p5Inst.pop();
}
// Write an image.
writer.write(canvas.toBuffer('raw'));
}
sprites.forEach(sprite => sprite.remove());
writer.end();
debug('finished');
renderSegment.close();
};
module.exports.renderVideo = (outputFile) => {
// Spawn the ffmpeg process.
let args = [
'-f', 'rawvideo',
'-r', '30',
'-pix_fmt', (os.endianness() === 'LE' ? 'bgra' : 'argb'),
'-s', `${WIDTH}x${HEIGHT}`,
'-frame_size', (WIDTH * HEIGHT * 4),
'-i', 'pipe:0',
'-f', 'mp4',
// https://trac.ffmpeg.org/wiki/Encode/H.264#crf
'-crf', CRF.toString(),
// https://trac.ffmpeg.org/wiki/Encode/H.264#faststartforwebvideo
'-movflags', 'faststart',
// https://trac.ffmpeg.org/wiki/Encode/H.264#Preset
'-preset', 'ultrafast',
// https://trac.ffmpeg.org/wiki/Encode/H.264#Tune
'-tune', 'zerolatency',
// https://trac.ffmpeg.org/wiki/Encode/H.264#Alldevices
'-profile:v', 'baseline',
'-level', '3.0',
// https://trac.ffmpeg.org/wiki/Encode/H.264#Encodingfordumbplayers
'-pix_fmt', 'yuv420p',
'-y',
outputFile
];
const stdout = LOCAL ? 'inherit' : 'ignore';
const options = {
stdio: ['pipe', stdout, stdout],
};
debug(`${FFMPEG_PATH} ${args.join(' ')}`);
const child = spawn(FFMPEG_PATH, args, options);
const promise = new Promise((resolve, reject) => {
debug('Waiting for ffmpeg to encode');
child.on('error', function(err) {
// eslint-disable-next-line no-console
console.error('Error during encoding: ' + err);
reject();
});
child.on('exit', function(val) {
debug('Encoding complete with return value ' + val);
val === 0 ? resolve() : reject();
});
});
return [child.stdin, promise];
};
|
/*
Copyright 2019-2020 Netfoundry, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
'use strict';
function uncurryThis(func) {
return function () {
return Function.call.apply(func, arguments);
}
}
const TypedArrayPrototype = Object.getPrototypeOf(Uint8Array.prototype);
const TypedArrayProto_toStringTag =
uncurryThis(
Object.getOwnPropertyDescriptor(TypedArrayPrototype,
Symbol.toStringTag).get);
function isTypedArray(value) {
return TypedArrayProto_toStringTag(value) !== undefined;
}
function isUint8Array(value) {
return TypedArrayProto_toStringTag(value) === 'Uint8Array';
}
function isUint8ClampedArray(value) {
return TypedArrayProto_toStringTag(value) === 'Uint8ClampedArray';
}
function isUint16Array(value) {
return TypedArrayProto_toStringTag(value) === 'Uint16Array';
}
function isUint32Array(value) {
return TypedArrayProto_toStringTag(value) === 'Uint32Array';
}
function isInt8Array(value) {
return TypedArrayProto_toStringTag(value) === 'Int8Array';
}
function isInt16Array(value) {
return TypedArrayProto_toStringTag(value) === 'Int16Array';
}
function isInt32Array(value) {
return TypedArrayProto_toStringTag(value) === 'Int32Array';
}
function isFloat32Array(value) {
return TypedArrayProto_toStringTag(value) === 'Float32Array';
}
function isFloat64Array(value) {
return TypedArrayProto_toStringTag(value) === 'Float64Array';
}
function isBigInt64Array(value) {
return TypedArrayProto_toStringTag(value) === 'BigInt64Array';
}
function isBigUint64Array(value) {
return TypedArrayProto_toStringTag(value) === 'BigUint64Array';
}
module.exports = {
// ...internalBinding('types'),
isArrayBufferView: ArrayBuffer.isView,
isTypedArray,
isUint8Array,
isUint8ClampedArray,
isUint16Array,
isUint32Array,
isInt8Array,
isInt16Array,
isInt32Array,
isFloat32Array,
isFloat64Array,
isBigInt64Array,
isBigUint64Array
};
|
<reponame>palantir/checks<filename>vendor/github.com/palantir/godel/app.go
// Copyright 2016 <NAME>, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package godel
import (
"github.com/nmiyake/pkg/errorstringer"
"github.com/palantir/pkg/cli"
"github.com/palantir/godel/cmd"
"github.com/palantir/godel/cmd/checkpath"
"github.com/palantir/godel/cmd/clicmds"
"github.com/palantir/godel/cmd/githooks"
"github.com/palantir/godel/cmd/githubwiki"
"github.com/palantir/godel/cmd/godel"
"github.com/palantir/godel/cmd/idea"
"github.com/palantir/godel/cmd/packages"
"github.com/palantir/godel/cmd/verify"
)
func App(gödelPath string) *cli.App {
app := cli.NewApp(cli.DebugHandler(errorstringer.StackWithInterleavedMessages))
app.Name = "godel"
app.Usage = "Run tasks for coding, checking, formatting, testing, building and publishing Go code"
app.Flags = append(app.Flags, cmd.GlobalCLIFlags()...)
app.Version = godel.Version
app.Subcommands = []cli.Command{
godel.VersionCommand(),
godel.InstallCommand(),
godel.UpdateCommand(),
checkpath.Command(),
githooks.Command(),
githubwiki.Command(),
idea.Command(),
packages.Command(),
verify.Command(gödelPath),
}
app.Subcommands = append(app.Subcommands, clicmds.CfgCliCommands(gödelPath)...)
return app
}
|
import IEditAchievementDTO from 'modules/dashboard/domain/dtos/IEditAchievementDTO';
import IAchievement from 'modules/dashboard/domain/entities/IAchievement';
import IAchievementsRepository from 'modules/dashboard/domain/repositories/IAchievementsRepository';
interface IExecute {
achievement?: IAchievement;
error?: string;
shouldLogout?: boolean;
}
export default class EditAchievementService {
constructor(private achievementsRepository: IAchievementsRepository) {}
public async execute(data: IEditAchievementDTO): Promise<IExecute> {
try {
const achievement = await this.achievementsRepository.edit(data);
return { achievement };
} catch (error) {
return { error: error.message, shouldLogout: error.shouldLogout };
}
}
}
|
A butterfly soars in the sky
Filling dark space with light
Dancing with the rolling clouds
Creating beauty and delight
The sun smiles down on the meadow
A sense of peace fills the air
Birds soar far from their captivity
To the bountiful everywhere
Blooming flowers line the riverbanks
The rolling waves by the shore
Alluring sights of nature's finesse
Peacefully evermore
The beauty of the fading light
Captures my soul so still
As I take in the shimmering stars
A message of hope I instill. |
// Code generated by 'github.com/containous/yaegi/extract github.com/blushft/strana/event'. DO NOT EDIT.
// +build go1.14,!go1.15
package imports
import (
"github.com/blushft/strana/event"
"reflect"
)
func init() {
Symbols["github.com/blushft/strana/event"] = map[string]reflect.Value{
// function, constant and variable definitions
"Anonymous": reflect.ValueOf(event.Anonymous),
"Channel": reflect.ValueOf(event.Channel),
"ContextContains": reflect.ValueOf(event.ContextContains),
"ContextInvalid": reflect.ValueOf(event.ContextInvalid),
"ContextValid": reflect.ValueOf(event.ContextValid),
"DeviceID": reflect.ValueOf(event.DeviceID),
"Empty": reflect.ValueOf(event.Empty),
"GetContextType": reflect.ValueOf(event.GetContextType),
"GroupID": reflect.ValueOf(event.GroupID),
"HasContext": reflect.ValueOf(event.HasContext),
"HasID": reflect.ValueOf(event.HasID),
"Interactive": reflect.ValueOf(event.Interactive),
"New": reflect.ValueOf(event.New),
"NewValidator": reflect.ValueOf(event.NewValidator),
"NonInteractive": reflect.ValueOf(event.NonInteractive),
"Platform": reflect.ValueOf(event.Platform),
"RegisterContext": reflect.ValueOf(event.RegisterContext),
"RegisterType": reflect.ValueOf(event.RegisterType),
"SessionID": reflect.ValueOf(event.SessionID),
"TrackingID": reflect.ValueOf(event.TrackingID),
"UserID": reflect.ValueOf(event.UserID),
"WithContext": reflect.ValueOf(event.WithContext),
"WithContexts": reflect.ValueOf(event.WithContexts),
"WithRule": reflect.ValueOf(event.WithRule),
"WithRules": reflect.ValueOf(event.WithRules),
"WithValidator": reflect.ValueOf(event.WithValidator),
// type definitions
"Context": reflect.ValueOf((*event.Context)(nil)),
"ContextContructor": reflect.ValueOf((*event.ContextContructor)(nil)),
"ContextIterator": reflect.ValueOf((*event.ContextIterator)(nil)),
"ContextRegistry": reflect.ValueOf((*event.ContextRegistry)(nil)),
"ContextType": reflect.ValueOf((*event.ContextType)(nil)),
"Contexts": reflect.ValueOf((*event.Contexts)(nil)),
"Event": reflect.ValueOf((*event.Event)(nil)),
"EventConstructor": reflect.ValueOf((*event.EventConstructor)(nil)),
"EventRegistry": reflect.ValueOf((*event.EventRegistry)(nil)),
"Option": reflect.ValueOf((*event.Option)(nil)),
"Rule": reflect.ValueOf((*event.Rule)(nil)),
"Type": reflect.ValueOf((*event.Type)(nil)),
"Validator": reflect.ValueOf((*event.Validator)(nil)),
"ValidatorOption": reflect.ValueOf((*event.ValidatorOption)(nil)),
// interface wrapper definitions
"_Context": reflect.ValueOf((*_github_com_blushft_strana_event_Context)(nil)),
"_ContextIterator": reflect.ValueOf((*_github_com_blushft_strana_event_ContextIterator)(nil)),
}
}
// _github_com_blushft_strana_event_Context is an interface wrapper for Context type
type _github_com_blushft_strana_event_Context struct {
WInterface func() interface{}
WType func() event.ContextType
WValidate func() bool
WValues func() map[string]interface{}
}
func (W _github_com_blushft_strana_event_Context) Interface() interface{} { return W.WInterface() }
func (W _github_com_blushft_strana_event_Context) Type() event.ContextType { return W.WType() }
func (W _github_com_blushft_strana_event_Context) Validate() bool { return W.WValidate() }
func (W _github_com_blushft_strana_event_Context) Values() map[string]interface{} { return W.WValues() }
// _github_com_blushft_strana_event_ContextIterator is an interface wrapper for ContextIterator type
type _github_com_blushft_strana_event_ContextIterator struct {
WFirst func() event.Context
WNext func() event.Context
}
func (W _github_com_blushft_strana_event_ContextIterator) First() event.Context { return W.WFirst() }
func (W _github_com_blushft_strana_event_ContextIterator) Next() event.Context { return W.WNext() }
|
#!/usr/bin/env bash
cat << EOF > /etc/consul.d/server_node_2_config.json
{
"bind_addr": "172.20.20.12",
"datacenter": "dc-east",
"data_dir": "/opt/consul",
"log_level": "INFO",
"enable_syslog": true,
"enable_debug": true,
"node_name": "dc-east-consul-server-two",
"server": true,
"client_addr": "0.0.0.0",
"bootstrap_expect": 3,
"rejoin_after_leave": true,
"ui": true,
"retry_join": [
"172.20.20.11","172.20.20.13"
]
}
EOF
# Consul should own its configuration files
chown --recursive consul:consul /etc/consul.d
# Starting consul
sudo systemctl start consul |
<filename>open-sphere-plugins/mapzen/src/main/java/io/opensphere/search/mapzen/model/geojson/GeoJSONType.java
package io.opensphere.search.mapzen.model.geojson;
/** Type for GeoJSON. */
public enum GeoJSONType
{
/** A collection of features. */
FeatureCollection("FeatureCollection"),
/** A feature. */
Feature("Feature"),
/** A point. */
Point("Point"),
/** Multiple points. */
MultiPoint("MultiPoint"),
/** A single-line String. */
LineString("LineString"),
/** A multi-line String. */
MultiLineString("MultiLineString"),
/** A polygon. */
Polygon("Polygon"),
/** Multiple polygons. */
MultiPolygon("MultiPolygon"),
/** A collection of geometries. */
GeometryCollection("GeometryCollection");
/** The type. */
private String type;
/**
* Constructs a GeoJSONType enum.
*
* @param type the type
*/
private GeoJSONType(String type)
{
this.type = type;
}
/**
* Retrieves the type.
*
* @return {@link #type}
*/
public String getType()
{
return type;
}
}
|
# frozen_string_literal: true
require 'graphql'
require 'graphql/kaminari_connection/version'
module GraphQL
module KaminariConnection
class << self
def included(klass)
klass.extend ClassMethods
end
# @return [Class]
def page_data_type
@page_data_type ||= define_page_data_type(without_count: false)
end
def page_data_without_count_type
@page_data_without_count_type ||= define_page_data_type(without_count: true)
end
# If your schema already has 'PageData' type, you can change its name.
attr_writer :page_data_type_name
attr_writer :base_page_data_class
private
# The name of page data GraphQL type.
#
# @return [String]
def page_data_type_name
@page_data_type_name || 'PageData'
end
# @return [Class]
def base_page_data_class
@base_page_data_class || GraphQL::Schema::Object
end
# @param without_count [Boolean]
# @return [Class]
def define_page_data_type(without_count:)
type_name = without_count ? "#{page_data_type_name}WithoutTotalPages" : page_data_type_name
Class.new(base_page_data_class) do
graphql_name type_name
description 'Information about pagination'
field :current_page, 'Int', null: false
field :is_first_page, 'Boolean', null: false, method: :first_page?
field :is_last_page, 'Boolean', null: false, method: :last_page?
field :is_out_of_range, 'Boolean', null: false, method: :out_of_range?
field :limit_value, 'Int', null: false
field :next_page, 'Int', null: true
field :prev_page, 'Int', null: true
field :total_pages, 'Int', null: false unless without_count
end
end
end
module ClassMethods
def kaminari_connection(without_count: false, **params)
{
type: without_count ? page_type_without_count : page_type,
arguments: page_arguments,
null: false
}.merge(params)
end
# @return [Class]
def base_page_class
GraphQL::Schema::Object
end
private
# @return [Class]
def page_type_without_count
@page_type_without_count ||= define_page_type(without_count: true)
end
# @return [Class]
def page_type
@page_type ||= define_page_type(without_count: false)
end
def page_arguments
[
[:page, type: 'Int', required: false],
[:per, type: 'Int', required: false]
]
end
# @param without_count [Boolean]
# @return [Class]
def define_page_type(without_count:)
type_name = without_count ? "#{graphql_name}PageWithoutTotalPages" : "#{graphql_name}Page"
type_class = self
page_data_type_class =
without_count ? KaminariConnection.page_data_without_count_type : KaminariConnection.page_data_type
Class.new(base_page_class) do
graphql_name type_name
description "Autogenerated page type for #{type_name}"
field :page_data, page_data_type_class, null: false, method: :object
field :items, [type_class], 'A list of items', null: false, method: :object
end
end
end
end
end
|
<filename>src/test/java/fr/insee/rmes/api/operations/OperationsAPITest.java
package fr.insee.rmes.api.operations;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import fr.insee.rmes.api.AbstractApiTest;
import fr.insee.rmes.modeles.operations.CsvIndicateur;
import fr.insee.rmes.modeles.operations.CsvSerie;
import fr.insee.rmes.modeles.operations.documentations.DocumentationSims;
@ExtendWith(MockitoExtension.class)
class OperationsAPITest extends AbstractApiTest {
@InjectMocks
private OperationsAPI operationsAPI;
@Mock
private OperationsApiService mockOperationApiService;
private DocumentationSims sims = new DocumentationSims();
private CsvSerie csvSerie = new CsvSerie();
private CsvIndicateur csvIndic = new CsvIndicateur();
Map<String,List<String>> exclusions = new HashMap<>();
/* @Test
void givenGetOperationsTree_whenCorrectRequest_andHeaderContentIsJson_thenResponseIsOk() {
// Mock
list.add(new FamilyToOperation());
when(mockOperationApiService.getListeFamilyToOperation(Mockito.any(),exclusions))
.thenReturn(new HashMap<String, Famille>());
this.mockUtilsMethodsThenReturnListOfPojo(Boolean.TRUE);
// Call method with header content is json
operationsAPI.getOperationsTree("something", MediaType.APPLICATION_JSON);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetOperationsTree_whenCorrectRequest_andHeaderContentIsXml_thenResponseIsOk() {
// Mock
list.add(new FamilyToOperation());
when(mockOperationApiService.getListeFamilyToOperation(Mockito.any(),exclusions))
.thenReturn(new HashMap<String, Famille>());
this.mockUtilsMethodsThenReturnListOfPojo(Boolean.TRUE);
// Call method with header content is xml
operationsAPI.getOperationsTree("something", MediaType.APPLICATION_XML);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetOperationsTree_whenCorrectRequest_andDiffusuerIsInseeFr_andHeaderContentIsJson_thenResponseIsOk() {
// Mock
list.add(new FamilyToOperation());
when(mockOperationApiService.getListeFamilyToOperation(Mockito.any(),exclusions))
.thenReturn(new HashMap<String, Famille>());
this.mockUtilsMethodsThenReturnListOfPojo(Boolean.TRUE);
// Call method with header content is json
operationsAPI.getOperationsTree("insee.fr", MediaType.APPLICATION_JSON);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetOperationsTree_whenCorrectRequest_thenResponseIsNotFound() {
// Mock
this.mockUtilsMethodsThenReturnListOfPojo(Boolean.FALSE);
// Call method with header content is json
Response response = operationsAPI.getOperationsTree("something", MediaType.APPLICATION_JSON);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
response = operationsAPI.getOperationsTree("something", MediaType.APPLICATION_XML);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
verify(mockResponseUtils, Mockito.never()).produceResponse(Mockito.any(), Mockito.any());
}*/
@Test
void givenGetDocumentation_whenCorrectRequest_andHeaderContentIsJson_thenResponseIsOk() {
// Mock
sims.setUri("something");
this.mockUtilsMethodsThenReturnOnePojo(sims, Boolean.TRUE);
// Call method with header content is json
operationsAPI.getDocumentation("something", MediaType.APPLICATION_JSON);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetDocumentation_whenCorrectRequest_andHeaderContentIsXml_thenResponseIsOk() {
// Mock
sims.setUri("something");
this.mockUtilsMethodsThenReturnOnePojo(sims, Boolean.TRUE);
// Call method with header content is xml
operationsAPI.getDocumentation("something", MediaType.APPLICATION_XML);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetDocumentation_whenCorrectRequest_thenResponseIsNotFound() {
// Mock
this.mockUtilsMethodsThenReturnOnePojo(sims, Boolean.FALSE);
// Call method with header content is json
Response response = operationsAPI.getDocumentation("something", MediaType.APPLICATION_JSON);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
response = operationsAPI.getDocumentation("something", MediaType.APPLICATION_XML);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
verify(mockResponseUtils, Mockito.never()).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetSeries_whenCorrectRequest_andHeaderContentIsJson_thenResponseIsOk() {
// Mock
csvSerie.setSeriesId("something");
this.mockUtilsMethodsThenReturnOnePojo(csvSerie, Boolean.TRUE);
// Call method with header content is json
operationsAPI.getSeries("something", MediaType.APPLICATION_JSON);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetSeries_whenCorrectRequest_andHeaderContentIsXml_thenResponseIsOk() {
// Mock
csvSerie.setSeriesId("something");
this.mockUtilsMethodsThenReturnOnePojo(csvSerie, Boolean.TRUE);
// Call method with header content is xml
operationsAPI.getSeries("something", MediaType.APPLICATION_XML);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetSeries_whenCorrectRequest_thenResponseIsNotFound() {
// Mock
this.mockUtilsMethodsThenReturnOnePojo(csvSerie, Boolean.FALSE);
// Call method with header content is json
Response response = operationsAPI.getSeries("something", MediaType.APPLICATION_JSON);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
response = operationsAPI.getSeries("something", MediaType.APPLICATION_XML);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
verify(mockResponseUtils, Mockito.never()).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetIndicateur_whenCorrectRequest_andHeaderContentIsJson_thenResponseIsOk() {
// Mock
csvIndic.setId("something");
this.mockUtilsMethodsThenReturnOnePojo(csvIndic, Boolean.TRUE);
// Call method with header content is json
operationsAPI.getIndicateur("something", MediaType.APPLICATION_JSON);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetIndicateur_whenCorrectRequest_andHeaderContentIsXml_thenResponseIsOk() {
// Mock
csvIndic.setId("something");
this.mockUtilsMethodsThenReturnOnePojo(csvIndic, Boolean.TRUE);
// Call method with header content is xml
operationsAPI.getIndicateur("something", MediaType.APPLICATION_XML);
verify(mockResponseUtils, times(1)).produceResponse(Mockito.any(), Mockito.any());
}
@Test
void givenGetIndicateur_whenCorrectRequest_thenResponseIsNotFound() {
// Mock
this.mockUtilsMethodsThenReturnOnePojo(csvIndic, Boolean.FALSE);
// Call method with header content is json
Response response = operationsAPI.getIndicateur("something", MediaType.APPLICATION_JSON);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
response = operationsAPI.getIndicateur("something", MediaType.APPLICATION_XML);
Assertions.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
verify(mockResponseUtils, Mockito.never()).produceResponse(Mockito.any(), Mockito.any());
}
}
|
<filename>RL/src/naf_nets_dm.py
import numpy as np
import tensorflow as tf
flags = tf.app.flags
FLAGS = flags.FLAGS
def theta(dimIn, dimOut, l1, l2, scope):
with tf.variable_scope(scope):
normal_init = tf.truncated_normal_initializer(mean=0.0, stddev=FLAGS.initstd)
return [tf.get_variable(name='w1', shape=[dimIn, l1], initializer=normal_init),
tf.get_variable(name='b1', shape=[l1], initializer=tf.constant_initializer(0.0)),
tf.get_variable(name='w2', shape=[l1, l2], initializer=normal_init),
tf.get_variable(name='b2', shape=[l2], initializer=tf.constant_initializer(0.0)),
tf.get_variable(name='w3', shape=[l2, dimOut], initializer=normal_init),
tf.get_variable(name='b3', shape=[dimOut], initializer=tf.constant_initializer(0.0))]
def build_NN_two_hidden_layers(x, theta):
h1 = tf.matmul(x, theta[0]) + theta[1]
h1 = tf.nn.relu(h1)
h2 = tf.matmul(h1, theta[2]) + theta[3]
h2 = tf.nn.relu(h2)
h3 = tf.matmul(h2, theta[4]) + theta[5]
return h3
def lfunction(obs, theta, scope="lfunction"):
with tf.variable_scope(scope):
l = build_NN_two_hidden_layers(obs, theta)
return l
def vec2trimat(vec, dim):
L = tf.reshape(vec, [-1, dim, dim])
L = tf.batch_matrix_band_part(L, -1, 0) - tf.batch_matrix_diag(tf.batch_matrix_diag_part(L)) + \
tf.batch_matrix_diag(tf.exp(tf.batch_matrix_diag_part(L)))
return L
def ufunction(obs, theta, scope="ufunction"):
with tf.variable_scope(scope):
act = build_NN_two_hidden_layers(obs, theta)
act = tf.tanh(act)
return act
def afunction(action, lvalue, uvalue, dimA, scope="afunction"):
with tf.variable_scope(scope):
delta = action - uvalue
L = vec2trimat(lvalue, dimA)
h1 = tf.reshape(delta, [-1, 1, dimA])
h1 = tf.batch_matmul(h1, L) # batch:1:dimA
h1 = tf.squeeze(h1, [1]) # batch:dimA
h2 = -tf.constant(0.5) * tf.reduce_sum(h1 * h1, 1) # batch
return h2
def qfunction(obs, avalue, theta, scope="qfunction"):
with tf.variable_scope(scope):
q = build_NN_two_hidden_layers(obs, theta)
q = tf.squeeze(q, [1]) + avalue
return q
|
install-pushbullet() {
local -r package_name="kkpoon-pushbullet-cli"
local -r package_version="$(npm list -g | grep ${package_name})"
if [[ -z "$package_version" ]]; then
npm -g install ${package_name}
echo -e "Add the following to your \033[1m$HOME/.local/bashrc\033[0m"
echo "export PB_ACCESS_TOKEN=<YOUR_PUSHBULLET_ACCESS_TOKEN>"
fi
}
install-pushbullet
|
function extractPlaceholderColors(fieldStyle) {
const placeholderColors = [];
if (fieldStyle.selectors && fieldStyle.selectors['&::placeholder']) {
placeholderColors.push(fieldStyle.selectors['&::placeholder'].color);
}
return placeholderColors;
} |
<filename>third_parties/cmake-3.10.2/Tests/Server/cmakelib.py
from __future__ import print_function
import sys, subprocess, json
termwidth = 150
print_communication = True
def ordered(obj):
if isinstance(obj, dict):
return sorted((k, ordered(v)) for k, v in obj.items())
if isinstance(obj, list):
return sorted(ordered(x) for x in obj)
else:
return obj
def col_print(title, array):
print()
print()
print(title)
indentwidth = 4
indent = " " * indentwidth
if not array:
print(indent + "<None>")
return
padwidth = 2
maxitemwidth = len(max(array, key=len))
numCols = max(1, int((termwidth - indentwidth + padwidth) / (maxitemwidth + padwidth)))
numRows = len(array) // numCols + 1
pad = " " * padwidth
for index in range(numRows):
print(indent + pad.join(item.ljust(maxitemwidth) for item in array[index::numRows]))
def waitForRawMessage(cmakeCommand):
stdoutdata = ""
payload = ""
while not cmakeCommand.poll():
stdoutdataLine = cmakeCommand.stdout.readline()
if stdoutdataLine:
stdoutdata += stdoutdataLine.decode('utf-8')
else:
break
begin = stdoutdata.find('[== "CMake Server" ==[\n')
end = stdoutdata.find(']== "CMake Server" ==]')
if (begin != -1 and end != -1):
begin += len('[== "CMake Server" ==[\n')
payload = stdoutdata[begin:end]
if print_communication:
print("\nSERVER>", json.loads(payload), "\n")
return json.loads(payload)
def writeRawData(cmakeCommand, content):
writeRawData.counter += 1
payload = """
[== "CMake Server" ==[
%s
]== "CMake Server" ==]
""" % content
rn = ( writeRawData.counter % 2 ) == 0
if rn:
payload = payload.replace('\n', '\r\n')
if print_communication:
print("\nCLIENT>", content, "(Use \\r\\n:", rn, ")\n")
cmakeCommand.stdin.write(payload.encode('utf-8'))
cmakeCommand.stdin.flush()
writeRawData.counter = 0
def writePayload(cmakeCommand, obj):
writeRawData(cmakeCommand, json.dumps(obj))
def initProc(cmakeCommand):
cmakeCommand = subprocess.Popen([cmakeCommand, "-E", "server", "--experimental", "--debug"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
packet = waitForRawMessage(cmakeCommand)
if packet == None:
print("Not in server mode")
sys.exit(1)
if packet['type'] != 'hello':
print("No hello message")
sys.exit(1)
return cmakeCommand
def exitProc(cmakeCommand):
# Tell the server to exit.
cmakeCommand.stdin.close()
cmakeCommand.stdout.close()
# Wait for the server to exit.
# If this version of python supports it, terminate the server after a timeout.
try:
cmakeCommand.wait(timeout=5)
except TypeError:
cmakeCommand.wait()
except:
cmakeCommand.terminate()
raise
def waitForMessage(cmakeCommand, expected):
data = ordered(expected)
packet = ordered(waitForRawMessage(cmakeCommand))
if packet != data:
sys.exit(-1)
return packet
def waitForReply(cmakeCommand, originalType, cookie, skipProgress):
gotResult = False
while True:
packet = waitForRawMessage(cmakeCommand)
t = packet['type']
if packet['cookie'] != cookie or packet['inReplyTo'] != originalType:
sys.exit(1)
if t == 'message' or t == 'progress':
if skipProgress:
continue
if t == 'reply':
break
sys.exit(1)
return packet
def waitForError(cmakeCommand, originalType, cookie, message):
packet = waitForRawMessage(cmakeCommand)
if packet['cookie'] != cookie or packet['type'] != 'error' or packet['inReplyTo'] != originalType or packet['errorMessage'] != message:
sys.exit(1)
def waitForProgress(cmakeCommand, originalType, cookie, current, message):
packet = waitForRawMessage(cmakeCommand)
if packet['cookie'] != cookie or packet['type'] != 'progress' or packet['inReplyTo'] != originalType or packet['progressCurrent'] != current or packet['progressMessage'] != message:
sys.exit(1)
def handshake(cmakeCommand, major, minor, source, build, generator, extraGenerator):
version = { 'major': major }
if minor >= 0:
version['minor'] = minor
writePayload(cmakeCommand, { 'type': 'handshake', 'protocolVersion': version,
'cookie': 'TEST_HANDSHAKE', 'sourceDirectory': source, 'buildDirectory': build,
'generator': generator, 'extraGenerator': extraGenerator })
waitForReply(cmakeCommand, 'handshake', 'TEST_HANDSHAKE', False)
def validateGlobalSettings(cmakeCommand, cmakeCommandPath, data):
packet = waitForReply(cmakeCommand, 'globalSettings', '', False)
capabilities = packet['capabilities']
# validate version:
cmakeoutput = subprocess.check_output([ cmakeCommandPath, "--version" ], universal_newlines=True)
cmakeVersion = cmakeoutput.splitlines()[0][14:]
version = capabilities['version']
versionString = version['string']
vs = str(version['major']) + '.' + str(version['minor']) + '.' + str(version['patch'])
if (versionString != vs and not versionString.startswith(vs + '-')):
sys.exit(1)
if (versionString != cmakeVersion):
sys.exit(1)
# validate generators:
generatorObjects = capabilities['generators']
cmakeoutput = subprocess.check_output([ cmakeCommandPath, "--help" ], universal_newlines=True)
index = cmakeoutput.index('\nGenerators\n\n')
cmakeGenerators = []
for line in cmakeoutput[index + 12:].splitlines():
if not line.startswith(' '):
continue
if line.startswith(' '):
continue
equalPos = line.find('=')
tmp = ''
if (equalPos > 0):
tmp = line[2:equalPos].strip()
else:
tmp = line.strip()
if tmp.endswith(" [arch]"):
tmp = tmp[0:len(tmp) - 7]
if (len(tmp) > 0) and (" - " not in tmp) and (tmp != 'KDevelop3'):
cmakeGenerators.append(tmp)
generators = []
for genObj in generatorObjects:
generators.append(genObj['name'])
generators.sort()
cmakeGenerators.sort()
for gen in cmakeGenerators:
if (not gen in generators):
sys.exit(1)
gen = packet['generator']
if (gen != '' and not (gen in generators)):
sys.exit(1)
for i in data:
print("Validating", i)
if (packet[i] != data[i]):
sys.exit(1)
def validateCache(cmakeCommand, data):
packet = waitForReply(cmakeCommand, 'cache', '', False)
cache = packet['cache']
if (data['isEmpty']):
if (cache != []):
print('Expected empty cache, but got data.\n')
sys.exit(1)
return;
if (cache == []):
print('Expected cache contents, but got none.\n')
sys.exit(1)
hadHomeDir = False
for value in cache:
if (value['key'] == 'CMAKE_HOME_DIRECTORY'):
hadHomeDir = True
if (not hadHomeDir):
print('No CMAKE_HOME_DIRECTORY found in cache.')
sys.exit(1)
|
#!/bin/bash
# Abspath
# Returns absolute path of given path.
# Usage: path=$(abspath foo)
#
abspath() {
local relpath=${1:-$(pwd)}
local abspath=$(cd $relpath && pwd)
echo "${abspath}"
}
|
<filename>src/main/scala/Algorithms/Sorting/BucketSort.scala
package Algorithms.Sorting
/**
* Created by MikBac on 01.09.2020
*/
object BucketSort {
def sort(list: List[Int]): List[Int] = {
val max = list.max
val buckets = Array.fill((max / 10) + 1) {
collection.mutable.MutableList[Int]()
}
val ans = collection.mutable.MutableList[Int]()
list.foreach(numb => buckets(numb / 10) += numb)
buckets.foreach(b => b.sortWith(_.compareTo(_) < 0).foreach(bb => ans += bb))
ans.toList
}
def main(args: Array[String]): Unit = {
val list = List(3, 2, 4, 3, 2, 4, 5, 23, 23, 5, 232, 239, 2, 100, 11, 6, 3, 4, 2)
println(sort(list))
}
}
|
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stirling.fix.session;
import jdave.junit4.JDaveRunner;
import org.junit.runner.RunWith;
import stirling.fix.messages.fix42.MsgTypeValue;
import stirling.fix.tags.fix42.NewSeqNo;
@RunWith(JDaveRunner.class) public class RecvSequenceResetWithoutGapFillSpec extends InitiatorSpecification {
public class InitializedSession {
/* Ref ID 11: a. Receive Sequence Reset (reset) message with NewSeqNo >
* than expected sequence number */
public void newSeqNoGreaterThanExpectedMsgSeqNum() throws Exception {
server.expect(MsgTypeValue.LOGON);
server.respondLogon();
server.respond(
new MessageBuilder(MsgTypeValue.SEQUENCE_RESET)
.msgSeqNum(2)
.integer(NewSeqNo.Tag(), 5)
.build());
server.respondLogout(5);
server.expect(MsgTypeValue.LOGOUT);
runInClient(new Runnable() {
@Override public void run() {
session.logon(connection);
}
});
specify(session.getIncomingSeq().peek(), 6);
}
/* Ref ID 11: b. Receive Sequence Reset (reset) message with NewSeqNo =
* to expected sequence number */
public void newSeqNoEqualToMsgSeqNum() throws Exception {
server.expect(MsgTypeValue.LOGON);
server.respondLogon();
server.respond(
new MessageBuilder(MsgTypeValue.SEQUENCE_RESET)
.msgSeqNum(2)
.integer(NewSeqNo.Tag(), 2)
.build());
server.respondLogout(2);
server.expect(MsgTypeValue.LOGOUT);
checking(expectLogWarning("NewSeqNo(36)=2 is equal to expected MsgSeqNum(34)=2"));
runInClient(new Runnable() {
@Override public void run() {
session.logon(connection);
}
});
specify(session.getIncomingSeq().peek(), 3);
}
/* Ref ID 11: c. Receive Sequence Reset (reset) message with NewSeqNo <
* than expected sequence number */
public void newSeqNoSmallerThanMsgSeqNum() throws Exception {
server.expect(MsgTypeValue.LOGON);
server.respondLogon();
server.respond(
new MessageBuilder(MsgTypeValue.HEARTBEAT)
.msgSeqNum(2)
.build());
server.respond(
new MessageBuilder(MsgTypeValue.SEQUENCE_RESET)
.msgSeqNum(3)
.integer(NewSeqNo.Tag(), 2)
.build());
server.expect(MsgTypeValue.REJECT);
checking(expectLogWarning("Value is incorrect (out of range) for this tag, NewSeqNo(36)=2"));
runInClient(new Runnable() {
@Override public void run() {
session.logon(connection);
}
});
specify(session.getIncomingSeq().peek(), 3);
}
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
import PurchaseOrderService from './PurchaseOrderService.js';
import ReportTemplate from '../../utils/ReportTemplate.js';
import Table from '../../elements/Table.js';
import Button from '../../elements/Button.js';
import CheckBoxInline from '../../elements/CheckBoxInline.js';
import Select from '../../elements/Select.js';
import SelectLabel from '../../elements/SelectLabel.js';
import ModalForm from '../../elements/ModalForm.js';
import ModalFormCustom from '../../elements/ModalFormCustom.js';
import Label from '../../elements/Label.js';
import Required from '../../elements/Required.js';
import Input from '../../elements/Input.js';
import TextArea from '../../elements/TextArea.js';
class PurchaseOrderController {
static tableAutoUpdate(table, selected_row, table_row_count, time_out_id) {
time_out_id = setInterval(function() {
if ( $('#purchaseorder').hasClass("active") && $('#authorized-purchase-quote-modal').is(':visible') ) {
table.ajax.reload(null, false);
if ( selected_row.selected_authorized_purchase_quote === undefined && table_row_count !== table.data().count() ) {
table.scroller.toPosition(table.data().count(), false);
table_row_count = table.data().count();
}
} else {
clearInterval(time_out_id);
}
}, 4000);
}
static purchaseOrderAutoUpdate(table, selected_row, table_row_count, purchase_order_time_out_id) {
purchase_order_time_out_id = setInterval(() => {
if ( $('#purchaseorder').hasClass("active") ) {
table.ajax.reload(null, false);
if ( selected_row.purchase_order === undefined && table_row_count !== table.data().count() ) {
table.scroller.toPosition(table.data().count(), false);
table_row_count = table.data().count();
}
} else {
clearInterval(purchase_order_time_out_id);
}
}, 4000);
}
// static purchaseOrderAutoUpdate(table, selected_row, purchase_quote_time_out_id) {
//
// purchase_quote_time_out_id = setInterval(function() {
//
// if ( $('#purchaseprocurement').hasClass("active") ) {
//
// if ( selected_row.selected_purchase_request !== undefined && selected_row.selected_purchase_request.purchase_request_status_id > 3 ) {
//
// table.ajax.url("purchasequotes?purchase_request=" + selected_row.selected_purchase_request.purchase_request_id);
// table.ajax.reload(null, false);
//
// }
//
// } else {
// clearInterval(purchase_quote_time_out_id);
// }
//
// }, 4000);
//
// }
//
// static purchaseOrderLinesAutoUpdate(table, selected_row, purchase_quote_lines_time_out_id) {
//
// purchase_quote_lines_time_out_id = setInterval(function() {
//
// if ( $('#purchaseprocurement').hasClass("active") ) {
//
// if ( selected_row.selected_purchase_quote !== undefined ) {
//
// table.ajax.url("purchasequotelines?purchase_quote=" + selected_row.selected_purchase_quote.purchase_quote_id);
// table.ajax.reload(null, false);
//
// }
//
// } else {
// clearInterval(purchase_quote_lines_time_out_id);
// }
//
// }, 4000);
//
// }
static generatePurchaseOrderPopUp(selected_row, table_row_count, time_out_id) {
$.blockUI({
message: new ModalFormCustom("authorized-purchase-quote-modal", "Generate Purchase Order", { form_method: "POST", form_id: "generate-purchase-order", form_enctype: "application/x-www-form-urlencoded" },
[
'<div class="row">' +
'<div class="col">' +
new Table("authorized-purchase-quote", [ "table", "table-bordered", "table-striped", "table-hover" ]).render() +
'</div>' +
'</div>'
],
[
new Button([ "btn-raised", "btn-warning" ], "discard", "Close").render()
]).render()
});
//write attribute setter for modal
$('#authorized-purchase-quote-modal .modal-dialog').css({
maxWidth: "90%"
});
let authorizedpurchasequote = $('#authorized-purchase-quote').DataTable({
info: false,
scrollY: "90%",
ajax: {
url: "/purchasequotesforpurchaseorders?company=" + window.localStorage.getItem('current_user_company'),
dataSrc: "purchase_quotes_for_purchase_orders"
},
//serverSide: true,
select: {
style: "single"
},
scroller: true,
scrollCollapse:true,
saveState: true,
rowId: "purchase_quote_id",
responsive: true,
columns: [
{ title: "Purchase Quote ID", data: "purchase_quote_id", visible: false },
{ title: "Purchase Quote", data: "purchase_quote_number" },
{ title: "Vendor ID", data: "vendor_id", visible: false },
{ title: "Vendor Code", data: "vendor_code" },
{ title: "Vendor Name", data: "vendor_name" },
{ title: "Vendor Ref", data: "vendor_quote_reference_number" },
{ title: "Purchase Request ID", data: "purchase_request_id", visible: false },
{ title: "Purchase Request", data: "purchase_request_number" },
{ title: "Created By ID", data: "user_id", visible: false },
{ title: "Created By", data: "user_display_name" },
{ title: "Total Expense", data: "grand_total", render: $.fn.dataTable.render.number(",", ".", 4), className: "total" },
{ render: function() {
return new Button([ "btn-raised", "btn-success", "generate" ], "generate", "Generate").render();
}
}
]
});
authorizedpurchasequote.on('click', 'tr', function() {
selected_row.selected_authorized_purchase_quote = authorizedpurchasequote.row(this).data();
});
PurchaseOrderController.tableAutoUpdate(authorizedpurchasequote, selected_row, table_row_count, time_out_id);
$(document).on("click", ".generate", async function() {
Promise.all([ new PurchaseOrderService().getDeliveryModes(), new PurchaseOrderService().getDeliveryTerms(), new PurchaseOrderService().getPaymentTerms(), new PurchaseOrderService().getMethodsOfPayment() ]).then((resolved, rejected) => {
if ( resolved ) {
$('#authorized-purchase-quote-modal .modal-container').block({
message: new ModalFormCustom("purchase-order-terms-modal", `Please fill in Purchase Order Terms for ${selected_row.selected_authorized_purchase_quote.purchase_quote_number}`, { form_method: "POST", form_id: "purchase-order-terms", form_enctype: "application/x-www-form-urlencoded" },
[
'<div class="row">' +
'<div class="col-6">' +
'<div class="form-group">' +
new Label('delivery-mode', [], [], 'Delivery Mode').render() +
new Input('text', [], 'delivery-mode', {}).render() +
'</div>' +
'</div>' +
'<div class="col-6">' +
'<div class="form-group">' +
new Label('delivery-term', [], [], 'Delivery Term').render() +
new Input('text', [], 'delivery-term', {}).render() +
'</div>' +
'</div>' +
'</div>' +
'<div class="row">' +
'<div class="col-4">' +
'<div class="form-group">' +
new Label('delivery-date', [], [], 'Delivery Date').render() +
new Input('text', [], 'delivery-date', {}).render() +
'</div>' +
'</div>' +
'</div>' +
'<div class="row">' +
'<div class="col-6">' +
'<div class="form-group">' +
new Label('method-of-payment', [], [], 'Method of Payment').render() +
new Input('text', [], 'method-of-payment', {}).render() +
'</div>' +
'</div>' +
'<div class="col-6">' +
'<div class="form-group">' +
new Label('payment-term', [], [], 'Payment Term').render() +
new Input('text', [], 'payment-term', {}).render() +
'</div>' +
'</div>' +
'</div>'
],
[
new Button(['btn-raised', 'btn-success'], 'apply', 'Apply').render()
]).render()
});
$('#purchase-order-terms-modal .modal-dialog').css({
width: '500px'
});
$('#delivery-mode').inputpicker({
data: resolved[0].delivery_modes,
fields:[
{ name: 'delivery_mode_code', text: 'CODE' },
{ name: 'delivery_mode', text: 'DELIVERY MODE' }
],
headShow: true,
fieldText : 'delivery_mode_code',
fieldValue: 'delivery_mode_id',
filterOpen: true,
autoOpen: true
});
$('#delivery-term').inputpicker({
data: resolved[1].delivery_terms,
fields:[
{ name: 'delivery_term_code', text: 'CODE' },
{ name: 'delivery_term', text: 'DELIVERY TERM' }
],
headShow: true,
fieldText : 'delivery_term_code',
fieldValue: 'delivery_term_id',
filterOpen: true,
autoOpen: true
});
$('#method-of-payment').inputpicker({
data: resolved[3].methods_of_payment,
fields:[
{ name: 'method_of_payment_code', text: 'CODE' },
{ name: 'method_of_payment', text: 'METHOD OF PAYMENT' }
],
headShow: true,
fieldText : 'method_of_payment_code',
fieldValue: 'method_of_payment_id',
filterOpen: true,
autoOpen: true
});
$('#payment-term').inputpicker({
data: resolved[2].payment_terms,
fields:[
{ name: 'payment_term_code', text: 'CODE' },
{ name: 'payment_term', text: 'PAYMENT TERM' }
],
headShow: true,
fieldText : 'payment_term_code',
fieldValue: 'payment_term_id',
filterOpen: true,
autoOpen: true
});
$('#delivery-date').datepicker({
dateFormat: "yy-mm-dd",
showButtonPanel: true,
showOtherMonths: true,
selectOtherMonths: true
});
$("#apply").on("click", async function() {
const poTerms = {
[ $('#delivery-mode').attr("id").replace("-", "_") ]: $('#delivery-mode').val() ? $('#delivery-mode').inputpicker('element', $('#delivery-mode').val())["delivery_mode"] : "",
[ $('#delivery-term').attr("id").replace("-", "_") ]: $('#delivery-term').val() ? $('#delivery-term').inputpicker('element', $('#delivery-term').val())["delivery_term_code"] : "",
[ $('#delivery-date').attr("id").replace("-", "_") ]: $('#delivery-date').val() || "",
[ $('#method-of-payment').attr("id").replace(/-/g, "_") ]: $('#method-of-payment').val() ? $('#method-of-payment').inputpicker('element', $('#method-of-payment').val())["method_of_payment"] : "",
[ $('#payment-term').attr("id").replace("-", "_") ]: $('#payment-term').val() ? $('#payment-term').inputpicker('element', $('#payment-term').val())["payment_term"] : ""
};
const poTermsData = [
{ [ $('#delivery-mode').attr("id").replace("-", "_") ]: $('#delivery-mode').val() || null },
{ [ $('#delivery-term').attr("id").replace("-", "_") ]: $('#delivery-term').val() || null },
{ [ $('#delivery-date').attr("id").replace("-", "_") ]: $('#delivery-date').val() || null },
{ [ $('#method-of-payment').attr("id").replace(/-/g, "_") ]: $('#method-of-payment').val() || null },
{ [ $('#payment-term').attr("id").replace("-", "_") ]: $('#payment-term').val() || null }
];
await new PurchaseOrderService().getAuthorizedPurchaseQuoteLines(selected_row.selected_authorized_purchase_quote.purchase_quote_id).then(async (response) => {
let formData = new Array(
{ name: "purchase_order_header_data", value: JSON.stringify(
[
{ vendor: selected_row.selected_authorized_purchase_quote.vendor_id },
...poTermsData,
{ created_by: window.localStorage.getItem('current_user_id') },
{ purchase_order_status: 1 }
]
)
},
{ name: "purchase_order_line_data", value: JSON.stringify(response.authorized_purchase_quote_lines) },
{ name: "user_id", value: window.localStorage.getItem('current_user_id') },
{ name: "authorized_purchase_quote", value: selected_row.selected_authorized_purchase_quote.purchase_quote_id },
{ name: "company", value: window.localStorage.getItem('current_user_company') },
{ name: "_token", value: $('meta[name="csrf-token"]').attr("content") }
);
swal({
icon: "info",
title: "Generate PO?",
text: `Generate PO for PQ# ${selected_row.selected_authorized_purchase_quote.purchase_quote_number}`,
closeOnClickOutside: false,
closeOnEsc: false,
buttons: {
cancel: {
visible: true
},
confirm: {
text: "Generate"
}
}
}).then(async (generateIt) => {
if ( generateIt ) {
return await new PurchaseOrderService().newPurchaseOrder(formData);
}
}).then((result) => {
$('#authorized-purchase-quote-modal .modal-container').unblock();
if ( result.created ) {
swal({
icon: "success",
title: "Purchase Order Generated",
text: `Purchase Order ${result.newpurchaseordernumber} generated from PQ# ${selected_row.selected_authorized_purchase_quote.purchase_quote_number}`,
buttons: false,
timer: 3000,
closeOnEsc: false,
closeOnClickOutside: false
}).then(() => {
new ReportTemplate(result.newpurchaseordernumber, JSON.parse(window.localStorage.getItem('current_user_company_information')), selected_row.selected_authorized_purchase_quote, response.authorized_purchase_quote_lines, window.localStorage.getItem('current_user'), selected_row.selected_authorized_purchase_quote.authorized_users, selected_row.selected_authorized_purchase_quote.authorized_dates, poTerms).generate();
});
} else {
swal({
icon: "error",
title: "Something went wrong!",
text: "Reload application and try again",
buttons: false,
timer: 3000,
closeOnEsc: false,
closeOnClickOutside: false
});
}
}).catch(err => {
if ( err ) {
return false;
}
});
});
});
}
});
});
}
builder() {
$(document).ready(function() {
$(document).on("click", "#purchaseorder", function(){
let selected_row = {};
let swal_text = document.createElement("div");
swal_text.className = "swal-text text-center";
let table_row_count;
let time_out_id;
let po_row_count;
let po_time_out_id;
PurchaseOrderController.generatePurchaseOrderPopUp(selected_row, table_row_count, time_out_id);
let purchase_order = $('#purchase-order').DataTable({
info: false,
ajax: {
url: "/purchaseorders?company=" + window.localStorage.getItem('current_user_company'),
dataSrc: "purchase_orders"
},
//serverSide: true,
select: {
style: "single"
},
scrollY: 225,
scrollCollapse:true,
scroller: true,
saveState: true,
rowId: "purchase_order_id",
columns: [
{ title: "Purchase Order ID", data: "purchase_order_id", visible: false },
{ title: "Purchase Order", data: "purchase_order_number" },
{ title: "Vendor ID", data: "vendor_id", visible: false },
{ title: "Vendor Code", data: "vendor_code" },
{ title: "Vendor Name", data: "vendor_name" },
{ title: "Delivery Mode ID", data: "delivery_mode_id", visible: false },
{ title: "Delivery Mode", data: "delivery_mode_code" },
{ title: "Delivery Term ID", data: "delivery_term_id", visible: false },
{ title: "Delivery Term", data: "delivery_term_code" },
{ title: "Payment Term ID", data: "payment_term_id", visible: false },
{ title: "Payment Term", data: "payment_term_code" },
{ title: "Method of Payment ID", data: "method_of_payment_id", visible: false },
{ title: "Method of Payment", data: "method_of_payment" },
{ title: "Status ID", data: "purchase_order_status_id", visible: false },
{ title: "Status", data: "purchase_order_status" },
{ title: "User ID", data: "user_id", visible: false },
{ title: "Created By", data: "user_display_name" },
{ title: "Created Date", data: "created_date" },
// { title: "Received Date(s)", data: "received_dates" },
// { title: "Received By", data: "received_by" },
// { title: "Invoiced Date(s)", data: "invoiced_dates" },
// { title: "Invoiced By", data: "invoiced_by" },
{ title: "Authorized By", data: "authorized_users", visible: false },
{ title: "Authorized Date", data: "authorized_dates", visible: false }
],
dom: "Bfrtip",
buttons: [
{
text: "Generate Purchase Order(s)",
className: "btn btn-raised btn-primary waves-effect waves-light",
action: function() {
PurchaseOrderController.generatePurchaseOrderPopUp(selected_row, table_row_count, time_out_id);
}
}
],
initComplete: async function() {
this.api().buttons().container().append(new CheckBoxInline([], {}, "global", "all", "Global").render() +
new CheckBoxInline([], {}, "self", "self", "Self").render() +
new SelectLabel("Status").render() + new Select([ "purchase-order-status" ], "purchase-order-status", { multiple: "multiple" }, {}, 0).render());
$('select').SumoSelect({ selectAll: true });
let purchase_order_statuses = await new PurchaseOrderService().getPurchaseOrderStatuses().then((response) => { return response.purchaseorderstatuses; });
purchase_order_statuses.map((status, index) => {
$('select.purchase-order-status')[0].sumo.add(status.purchase_order_status_id, status.purchase_order_status, index);
});
$('select.purchase-order-status')[0].sumo.selectItem("1");
let column_status = this.api().columns(13);
$(document).on("change", "select.purchase-order-status",function() {
column_status.search($(this).val().map((value) => { return value; }).join("|"), true, false).draw();
});
}
});
//inspired by https://stackoverflow.com/questions/31586354/jquery-datatables-scroll-to-bottom-when-a-row-is-added
let $scrollBody = $(purchase_order.table().node()).parent();
$scrollBody.scrollTop($scrollBody.get(0).scrollHeight);
function floatConvert(num) {
return typeof num === 'string' ? num.replace(/,/g, '') * 1 : typeof num === 'number' ? num : 0;
}
$('#purchase-order-lines').append($("<tfoot/>").attr("id", "purchase-order-lines-footer"));
let total_discount, total_vat, total_svat, total_nbt, total_olt, total;
let purchase_order_lines = $('#purchase-order-lines').DataTable({
paging: false,
info: false,
scrollY: 225,
scrollCollapse:true,
columns: [
{ title: "Line ID", data: "purchase_order_line_id", visible: false },
{ title: "Item", data: "item_code" },
{ title: "Item Description", data: "item_attribute_description", width: "35%", render: function(data, type, row) {
return type === 'display' && data.length > 56 ?
data.substr( 0, 56 ) +'…' :
data;
}
},
{ title: "Product Description", data: "product_description", visible: false },
{ title: "UoM", data: "item_unit_of_measure", width: "5%" },
{ title: "Currency", data: "currency", width: "5%" },
{ title: "Price", data: "item_unit_price", render: $.fn.dataTable.render.number(",", ".", 4), className: "price" },
{ title: "Qty", data: "item_quantity", render: $.fn.dataTable.render.number(",", ".", 4), className: "qty" },
{ title: "Discount", data: "item_line_discount", className: "dis vas", render: $.fn.dataTable.render.number(",", ".", 4) },
{ title: "NBT", data: "item_line_nbt", className: "nbt vas", render: $.fn.dataTable.render.number(",", ".", 4) },
{ title: "OLT", data: "item_line_olt", className: "olt vas", render: $.fn.dataTable.render.number(",", ".", 4) },
{ title: "VAT", data: "item_line_vat", className: "vat vas", render: $.fn.dataTable.render.number(",", ".", 4) },
{ title: "SVAT", data: "item_line_svat", className: "svat vas", render: $.fn.dataTable.render.number(",", ".", 4) },
{ title: "Line Total", className: "total", data: null, render: function(data, type, row) {
return $.fn.dataTable.render.number(",", ".", 4).display(((((row.item_unit_price * row.item_quantity) - row.item_line_discount) + Number(row.item_line_nbt) + Number(row.item_line_olt))+ Number(row.item_line_vat)));
}
}
],
footerCallback: function(row, data, start, end, display) {
this.api().columns('.dis').every(function(){
total_discount = this.data().toArray().reduce((a,b) => Number(a) + Number(b), 0);
});
this.api().columns('.nbt').every(function(){
total_nbt = this.data().toArray().reduce((a,b) => Number(a) + Number(b), 0);
});
this.api().columns('.olt').every(function(){
total_olt = this.data().toArray().reduce((a,b) => Number(a) + Number(b), 0);
});
this.api().columns('.vat').every(function(){
total_vat = this.data().toArray().reduce((a,b) => Number(a) + Number(b), 0);
});
this.api().columns('.svat').every(function(){
total_svat = this.data().toArray().reduce((a,b) => Number(a) + Number(b), 0);
});
total = this.api().cells(null, '.total').render('display').reduce((a, b) => floatConvert(a) + floatConvert(b), 0);
if (data.length) {
$('tfoot#purchase-order-lines-footer').html(
'<tr>' +
'<td class="totalling-tag text-right">SUB TOTAL</td>' +
'<td class="totalling text-right">' + $.fn.dataTable.render.number(",", ".", 4).display(total + total_discount - (total_vat + total_nbt + total_olt)) + '</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right">TOTAL DISCOUNT</td>' +
'<td class="totalling text-right">(' + $.fn.dataTable.render.number(",", ".", 4).display(total_discount) + ')</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right">TOTAL BEFORE TAX</td>' +
'<td class="totalling text-right">' + $.fn.dataTable.render.number(",", ".", 4).display((total - (total_nbt + total_olt)) - total_vat) + '</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right">TOTAL NBT</td>' +
'<td class="totalling text-right">' + $.fn.dataTable.render.number(",", ".", 4).display(total_nbt) + '</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right">TOTAL OLT</td>' +
'<td class="totalling text-right">' + $.fn.dataTable.render.number(",", ".", 4).display(total_olt) + '</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right">TOTAL VAT</td>' +
'<td class="totalling text-right">' + $.fn.dataTable.render.number(",", ".", 4).display(total_vat) + '</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right">TOTAL SVAT</td>' +
'<td class="totalling text-right">' + $.fn.dataTable.render.number(",", ".", 4).display(total_svat) + '</td>' +
'</tr>' +
'<tr>' +
'<td class="totalling-tag text-right"><strong>GRAND TOTAL</strong></td>' +
'<td class="totalling text-right"><strong>' + $.fn.dataTable.render.number(",", ".", 4).display(total) + '</strong></td>' +
'</tr>'
);
}
}
});
purchase_order.on('click', 'tr', function() {
if (!$(this).hasClass("selected")) {
purchase_order.$('tr.selected').removeClass("selected");
$(this).addClass("selected");
selected_row.purchase_order = purchase_order.row(this).data();
if (purchase_order_lines.rows().data()) {
purchase_order_lines.clear();
}
//purchase_order_lines.rows.add(data).draw();
}
});
purchase_order.on('click', 'tr', async function(event) {
selected_row.purchase_order = purchase_order.row(this).data();
//selection issue
if ( purchase_order.row(this, { selected: true }).data() === purchase_order.row(this).data() ) {
event.stopPropagation();
return false;
} else {
await new PurchaseOrderService().getPurchaseOrderLines(selected_row.purchase_order.purchase_order_id).then((response) => {
if ( response.data ) {
purchase_order_lines.clear();
purchase_order_lines.rows.add(response.data).draw();
purchase_order_lines.columns('.vas').every(function() {
if ( this.data().toArray().every(e => Number(e) === 0) ) {
this.visible(false);
} else {
this.visible(true);
}
});
}
});
// if ( selected_row.selected_purchase_quote.purchase_quote_status_id < 4 ) {
// purchaseprocurement_quote.buttons('.extend').enable();
// } else {
// purchaseprocurement_quote.buttons('.extend').disable();
// }
// if ( selected_row.selected_purchase_quote.purchase_quote_status_id < 3 ) {
// purchaseprocurement_quote.buttons('.delete').enable();
// } else {
// purchaseprocurement_quote.buttons('.delete').disable();
// }
}
});
PurchaseOrderController.purchaseOrderAutoUpdate(purchase_order, selected_row, po_row_count, po_time_out_id);
$(document).on('click', '#discard', function() {
$.unblockUI();
document.body.style.overflowY = "auto";
});
//fixing a bug with datatables scrollY when on firefox
if (Browser.name === "firefox") {
$('div.dataTables_scrollBody').css('padding-right', "6px");
}
//fixing bug with datatable search
$('input[type="search"]').addClass("form-control");
//remove the data table button styling
$('.dt-button').removeClass("dt-button");
});
});
}
}
export default new PurchaseOrderController().builder(); |
#!/bin/sh
. /usr/share/openclash/openclash_ps.sh
#禁止多个实例
status=$(unify_ps_status "openclash_update.sh")
[ "$status" -gt "3" ] && exit 0
#一键更新
if [ "$1" = "one_key_update" ]; then
uci set openclash.config.enable=1
uci commit openclash
/usr/share/openclash/openclash_core.sh "$1" >/dev/null 2>&1 &
/usr/share/openclash/openclash_core.sh "Tun" "$1" >/dev/null 2>&1 &
/usr/share/openclash/openclash_core.sh "Game" "$1" >/dev/null 2>&1 &
wait
fi
START_LOG="/tmp/openclash_start.log"
LOGTIME=$(date "+%Y-%m-%d %H:%M:%S")
LOG_FILE="/tmp/openclash.log"
LAST_OPVER="/tmp/openclash_last_version"
LAST_VER=$(sed -n 1p "$LAST_OPVER" 2>/dev/null |sed "s/^v//g")
OP_CV=$(sed -n 1p /etc/openclash/openclash_version 2>/dev/null |awk -F '-' '{print $1}' |awk -F 'v' '{print $2}' |awk -F '.' '{print $2$3}' 2>/dev/null)
OP_LV=$(sed -n 1p $LAST_OPVER 2>/dev/null |awk -F '-' '{print $1}' |awk -F 'v' '{print $2}' |awk -F '.' '{print $2$3}' 2>/dev/null)
HTTP_PORT=$(uci get openclash.config.http_port 2>/dev/null)
PROXY_ADDR=$(uci get network.lan.ipaddr 2>/dev/null |awk -F '/' '{print $1}' 2>/dev/null)
if [ -s "/tmp/openclash.auth" ]; then
PROXY_AUTH=$(cat /tmp/openclash.auth |awk -F '- ' '{print $2}' |sed -n '1p' 2>/dev/null)
fi
if [ "$(expr "$OP_LV" \> "$OP_CV")" -eq 1 ] && [ -f "$LAST_OPVER" ]; then
echo "开始下载 OpenClash-$LAST_VER ..." >$START_LOG
if pidof clash >/dev/null; then
curl -sL -m 30 --retry 5 -x http://$PROXY_ADDR:$HTTP_PORT -U "$PROXY_AUTH" https://github.com/vernesong/OpenClash/releases/download/v"$LAST_VER"/luci-app-openclash_"$LAST_VER"_all.ipk -o /tmp/openclash.ipk >/dev/null 2>&1
else
curl -sL -m 30 --retry 5 https://github.com/vernesong/OpenClash/releases/download/v"$LAST_VER"/luci-app-openclash_"$LAST_VER"_all.ipk -o /tmp/openclash.ipk >/dev/null 2>&1
fi
if [ "$?" -eq "0" ] && [ -s "/tmp/openclash.ipk" ]; then
echo "OpenClash-$LAST_VER 下载成功,开始更新,更新过程请不要刷新页面和进行其他操作..." >$START_LOG
cat > /tmp/openclash_update.sh <<"EOF"
#!/bin/sh
LOGTIME=$(date "+%Y-%m-%d %H:%M:%S")
START_LOG="/tmp/openclash_start.log"
LOG_FILE="/tmp/openclash.log"
echo "正在卸载旧版本,更新过程请不要刷新页面和进行其他操作 ..." >$START_LOG
uci set openclash.config.enable=0
uci commit openclash
opkg remove luci-app-openclash
echo "正在安装新版本,更新过程请不要刷新页面和进行其他操作 ..." >$START_LOG
opkg install /tmp/openclash.ipk
if [ "$?" -eq "0" ]; then
rm -rf /tmp/openclash.ipk >/dev/null 2>&1
echo "OpenClash 更新成功,即将进行重启!" >$START_LOG
echo "${LOGTIME} OpenClash Update Successful" >>$LOG_FILE
sleep 3
uci set openclash.config.enable=1
uci commit openclash
/etc/init.d/openclash restart 2>/dev/null
else
echo "OpenClash 更新失败,文件保存在/tmp/openclash.ipk,请尝试手动更新!" >$START_LOG
echo "${LOGTIME} OpenClash Update Fail" >>$LOG_FILE
sleep 10
echo "" >$START_LOG
fi
EOF
chmod 4755 /tmp/openclash_update.sh
nohup /tmp/openclash_update.sh &
wait
rm -rf /tmp/openclash_update.sh
else
echo "OpenClash-$LAST_VER 下载失败,请检查网络或稍后再试!" >$START_LOG
rm -rf /tmp/openclash.ipk >/dev/null 2>&1
echo "${LOGTIME} OpenClash Update Error" >>$LOG_FILE
sleep 5
echo "" >$START_LOG
if [ "$(uci get openclash.config.config_reload 2>/dev/null)" -eq 0 ]; then
uci set openclash.config.config_reload=1
uci commit openclash
/etc/init.d/openclash restart 2>/dev/null
fi
fi
else
if [ ! -f "$LAST_OPVER" ]; then
echo "获取版本信息失败,请稍后再试..." >$START_LOG
echo "${LOGTIME} OpenClash Version Check Error, Please Try Again After A few seconds" >>$LOG_FILE
sleep 5
echo "" >$START_LOG
else
echo "OpenClash 没有更新,停止继续操作!" >$START_LOG
echo "${LOGTIME} OpenClash Version No Change, Do Nothing" >>$LOG_FILE
sleep 5
echo "" >$START_LOG
fi
if [ "$(uci get openclash.config.config_reload 2>/dev/null)" -eq 0 ]; then
uci set openclash.config.config_reload=1
uci commit openclash
/etc/init.d/openclash restart 2>/dev/null
fi
fi
|
<reponame>Kalinin-Andrey/go-app
package pg
import (
"redditclone/internal/domain/user"
minipkg_gorm "github.com/minipkg/db/gorm"
"github.com/minipkg/selection_condition"
"github.com/jinzhu/gorm"
"github.com/pkg/errors"
"github.com/minipkg/log"
)
// IRepository is an interface of repository
type IRepository interface{}
// repository persists albums in database
type repository struct {
db minipkg_gorm.IDB
logger log.ILogger
Conditions *selection_condition.SelectionCondition
}
const DefaultLimit = 1000
// GetRepository return a repository
func GetRepository(logger log.ILogger, dbase minipkg_gorm.IDB, entity string) (repo IRepository, err error) {
r := &repository{
db: dbase,
logger: logger,
}
switch entity {
case user.EntityName:
repo, err = NewUserRepository(r)
default:
err = errors.Errorf("Repository for entity %q not found", entity)
}
return repo, err
}
func (r *repository) SetDefaultConditions(defaultConditions *selection_condition.SelectionCondition) {
r.Conditions = defaultConditions
}
func (r repository) DB() *gorm.DB {
return minipkg_gorm.Conditions(r.db.DB(), r.Conditions)
}
|
import React from 'react';
export interface IconCodesandboxProps extends React.SVGAttributes<SVGElement> {
color?: string;
size?: string | number;
className?: string;
style?: React.CSSProperties;
}
export const IconCodesandbox: React.SFC<IconCodesandboxProps> = (
props: IconCodesandboxProps
): React.ReactElement => {
const { color, size, style, ...restProps } = props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size}
height={size}
viewBox="0 0 24 24"
fill="none"
stroke={color}
className="feather feather-codesandbox"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
style={{ verticalAlign: 'middle', ...style }}
{...restProps}
>
<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z" />
<polyline points="7.5 4.21 12 6.81 16.5 4.21" />
<polyline points="7.5 19.79 7.5 14.6 3 12" />
<polyline points="21 12 16.5 14.6 16.5 19.79" />
<polyline points="3.27 6.96 12 12.01 20.73 6.96" />
<line x1="12" y1="22.08" x2="12" y2="12" />
</svg>
);
};
IconCodesandbox.defaultProps = {
color: 'currentColor',
size: '1em',
};
export default IconCodesandbox;
|
import React from 'react';
export default function CommentIcon(props) {
return (
<svg
aria-hidden="true"
focusable="false"
width="1.28em"
height="1em"
viewBox="0 0 1792 1408"
>
<path d="M1408 512q0 139-94 257t-256.5 186.5T704 1024q-86 0-176-16q-124 88-278 128q-36 9-86 16h-3q-11 0-20.5-8t-11.5-21q-1-3-1-6.5t.5-6.5t2-6l2.5-5l3.5-5.5l4-5l4.5-5l4-4.5q5-6 23-25t26-29.5t22.5-29t25-38.5t20.5-44Q142 841 71 736T0 512q0-139 94-257T350.5 68.5T704 0t353.5 68.5T1314 255t94 257zm384 256q0 120-71 224.5T1526 1169q10 24 20.5 44t25 38.5t22.5 29t26 29.5t23 25q1 1 4 4.5t4.5 5t4 5t3.5 5.5l2.5 5l2 6l.5 6.5l-1 6.5q-3 14-13 22t-22 7q-50-7-86-16q-154-40-278-128q-90 16-176 16q-271 0-472-132q58 4 88 4q161 0 309-45t264-129q125-92 192-212t67-254q0-77-23-152q129 71 204 178t75 230z" />
<rect x="0" y="0" width="1792" height="1408" fill="rgba(0, 0, 0, 0)" />
</svg>
);
}
|
var Election = artifacts.require("./Election.sol");
contract("Election", function(accounts){
var electionInstance;
it("initializes with two candidates", function(){
return Election.deployed().then(function(instance){
return instance.candidateCount();
}).then(function(count){
assert.equal(count, 2);
});
});
it("it initialize the candidate with the correct values", function(){
return Election.deployed().then(function(instance){
electionInstance = instance;
return electionInstance.candidates(1);
}).then(function(candidate){
assert.equal(candidate[0],1,"Contains correct id");
assert.equal(candidate[1], "Candidate 1", "Caontains the correct name");
assert.equal(candidate[2],0,"Correct vote count");
return electionInstance.candidates(2);
}).then(function(candidate){
assert.equal(candidate[0],2,"Contains correct id");
assert.equal(candidate[1], "Candidate 2", "Caontains the correct name");
assert.equal(candidate[2],0,"Correct vote count");
return electionInstance.candidates(2);
});
});
it("Allows a voter to cast a vote", function(){
return Election.deployed().then(function(instance){
electionInstance = instance;
candidateId = 1;
return electionInstance.vote(candidateId, {from: accounts[0]});
}).then(function(receipt){
return electionInstance.vote(accounts[0]);
}).then(function(voted){
assert(voted, "the voter was marked as voted");
return electionInstance.candidates(candidateId);
}).then(function(candidate){
var voteCount = candidate[2];
assert.equal(voteCount, 1, "increased");
})
});
it("Throws an exception for invalid candidate", function(){
return Election.deployed().then(function(instance){
electionInstance = instance;
return electionInstance.vote(99, {from: accounts[1]})
}).then(assert.fail).catch(function(error){
assert(error.message.indexOf('revert')>=0, "error message must contain revert");
return electionInstance.candidate(1)
}).then(function(candidate1){
var voteCount = candidate1[2];
assert.equal(voteCount, 1, "candidate 1 did not recieve any vote");
return electionInstance.candidates(2);
}).then(function(candidate2){
var voteCount = candidate2[2];
assert.equal(voteCount,0,"Candidate 2 did not recieve any vote")
});
});
it("throws an exception for double voting", function(){
return Election.deployed().then(function(instance){
electionInstance = instance;
candidateId= 2;
electionInstance.vote(candidateId, {from: accounts[1]});
return electionInstance.candidates(candidateId);
}).then(function(candidate){
var voteCount = candidate[2];
assert.equal(voteCount, 1 , "accepts first vote");
return electionInstance.vote(candidateId, {from: accounts[1]});
}).then(assert.fail).catch(function(error){
assert(error.message.indexOf('revert')>=0, "error message");
return electionInstance.candidates(1);
}).then(function(candidate1){
var voteCount = candidate1[2];
assert.equal(voteCount, 1, "Candidate 1 did not recieve any votes");
return electionInstance.candidates(2);
}).then(function(candidate2){
var voteCount = candidate2[2];
assert.equal(voteCount, 1, "Candidate 2 did not receive any votes");
});
});
}); |
import json
class Book:
def __init__(self, title, author, year):
self.title = title
self.author = author
self.year = year
def to_json(self):
return json.dumps(self, default=lambda o: o.__dict__)
# Example usage
book1 = Book("The Catcher in the Rye", "J.D. Salinger", 1951)
json_str = book1.to_json()
print(json_str) # Output: {"title": "The Catcher in the Rye", "author": "J.D. Salinger", "year": 1951} |
/* tslint:disable */
/* eslint-disable */
/**
* WaniKani
* WaniKani: The API
*
* OpenAPI spec version: 20170710.0
*
*
*/
/**
*
* @export
* @interface ImageSvgMetadata
*/
export interface ImageSvgMetadata {
/**
* The SVG asset contains built-in CSS styling
* @type {boolean}
* @memberof ImageSvgMetadata
*/
inlineStyles: any;
}
|
<filename>src/app.js
import React, { Component } from 'react'
import Widget from './widget'
class App extends Component {
render () { return <Widget /> }
}
export default App
|
public class AnimalClassifier {
public String classify(String size, String color, String ears, String tail) {
if (size.equals("small") && color.equals("brown") && ears.equals("pointy") && tail.equals("long")) {
return "cat";
} else if (size.equals("large") && color.equals("brown") && ears.equals("floppy") && tail.equals("short")) {
return "dog";
} else {
return null;
}
}
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-shuffled-N-VB/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-shuffled-N-VB/512+512+512-FW-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_function_words_first_third_sixth --eval_function last_sixth_eval |
/* nano_rest - restful wrapper
Copyright (C) 2018 <NAME>, <NAME>, <NAME>
https://www.joltwallet.com/
*/
#include <stdio.h>
#include <stdbool.h>
#include <string.h>
#include "esp_event_loop.h"
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "freertos/event_groups.h"
#include "esp_log.h"
#include "lwip/err.h"
#include "lwip/sockets.h"
#include "lwip/sys.h"
#include "lwip/netdb.h"
#include "lwip/dns.h"
#include "picohttpparser.h"
#include "nano_rest.h"
char rx_string[RX_BUFFER_BYTES];
static const char *TAG = "network_rest";
// Can be set via the setter functions
static char *remote_domain = NULL;
static uint16_t remote_port = 0;
static char *remote_path = NULL;
static SemaphoreHandle_t http_request_complete = NULL; // Underlying type: (void *)
static const char GET_FORMAT_STR[] = \
"GET %s HTTP/1.0\r\n"
"Host: %s\r\n"
"User-Agent: esp-idf/1.0 esp32\r\n"
"\r\n";
static const char POST_FORMAT_STR[] = \
"POST %s HTTP/1.0\r\n"
"Host: %s\r\n" \
"User-Agent: esp-idf/1.0 esp32\r\n"
"Content-Type: text/plain\r\n"
"Content-Length: %d\r\n"
"\r\n"
"%s";
typedef struct task_args_t {
int get_post;
char *post_data;
char *result_data_buf;
size_t result_data_buf_len;
} task_args_t;
void nano_rest_set_remote_domain(char *str){
if( NULL != remote_domain ){
free(remote_domain);
}
if( NULL != str ){
remote_domain = malloc(strlen(str)+1);
strcpy(remote_domain, str);
}
else{
remote_domain = NULL;
}
}
void nano_rest_set_remote_port(uint16_t port){
remote_port = port;
}
void nano_rest_set_remote_path(char *str){
if( NULL != remote_path ){
free(remote_path);
}
if( NULL != str ){
remote_path = malloc(strlen(str)+1);
strcpy(remote_path, str);
}
else{
remote_path = NULL;
}
}
static char *http_request_task(int get_post, char *post_data,
char *result_data_buf, size_t result_data_buf_len) {
int s = -1; // socket descriptor
int r;
char *request_packet = NULL;
struct addrinfo *addrinfo = NULL;
char * func_result = NULL;
char *http_response = NULL;
char *http_response_new = NULL;
int http_response_len = 0;
if( 0 == get_post) {
size_t request_packet_len = strlen(GET_FORMAT_STR) +
strlen(remote_path) + strlen(remote_domain) + 1;
request_packet = malloc( request_packet_len );
snprintf(request_packet, request_packet_len, GET_FORMAT_STR,
remote_path, remote_domain);
}
else if ( 1 == get_post ) {
// 5 is for the uint16 port
size_t request_packet_len = strlen(POST_FORMAT_STR) +
strlen(remote_path) + strlen(remote_domain) +
strlen(post_data) + 5 + 1;
request_packet = malloc( request_packet_len );
size_t post_data_length = strlen((const char*)post_data);
// todo: possibility that this could be truncated
snprintf(request_packet, request_packet_len, POST_FORMAT_STR,
remote_path, remote_domain, post_data_length, post_data);
ESP_LOGI(TAG, "POST Request Packet:\n%s", request_packet);
}
else {
ESP_LOGE(TAG, "Error, POST/Get not selected");
goto exit;
}
{
const struct addrinfo hints = {
.ai_family = AF_INET,
.ai_socktype = SOCK_STREAM,
};
ESP_LOGI(TAG, "Performing DNS lookup");
ESP_LOGI(TAG, "Remote Domain: %s", remote_domain);
char port[10];
snprintf(port, sizeof(port), "%d", remote_port);
ESP_LOGI(TAG, "Remote Port: %s", port);
int err = getaddrinfo(remote_domain, port, &hints, &addrinfo);
ESP_LOGI(TAG, "DNS lookup success");
if(err != 0 || addrinfo == NULL) {
ESP_LOGE(TAG, "DNS lookup failed err=%d addrinfo=%p", err, addrinfo);
goto exit;
}
else {
ESP_LOGI(TAG, "DNS lookup success");
}
}
/* Code to print the resolved IP.
Note: inet_ntoa is non-reentrant, look at ipaddr_ntoa_r for "real" code */
{
struct in_addr *addr;
addr = &((struct sockaddr_in *)addrinfo->ai_addr)->sin_addr;
ESP_LOGI(TAG, "DNS lookup succeeded. IP=%s", inet_ntoa(*addr));
}
/* Open Socket Connection */
s = socket(addrinfo->ai_family, addrinfo->ai_socktype, 0);
if( s < 0 ) {
ESP_LOGE(TAG, "... Failed to allocate socket.");
goto exit;
}
ESP_LOGI(TAG, "... allocated socket");
if( 0 != connect(s, addrinfo->ai_addr, addrinfo->ai_addrlen) ) {
ESP_LOGE(TAG, "... socket connect failed errno=%d", errno);
goto exit;
}
ESP_LOGI(TAG, "... connected");
freeaddrinfo(addrinfo);
addrinfo = NULL;
/* Write Request to Socket */
if (write(s, request_packet, strlen(request_packet)) < 0) {
ESP_LOGE(TAG, "... socket send failed");
goto exit;
}
ESP_LOGI(TAG, "... socket send success");
{
struct timeval receiving_timeout;
receiving_timeout.tv_sec = CONFIG_NANO_REST_RECEIVE_TIMEOUT;
receiving_timeout.tv_usec = 0;
if (setsockopt(s, SOL_SOCKET, SO_RCVTIMEO, &receiving_timeout,
sizeof(receiving_timeout)) < 0) {
ESP_LOGE(TAG, "... failed to set socket receiving timeout");
goto exit;
}
ESP_LOGI(TAG, "... set socket receiving timeout success");
}
/* Read HTTP response */
do {
http_response_new = realloc(http_response, http_response_len + CONFIG_NANO_REST_RECEIVE_BLOCK_SIZE);
if( NULL == http_response_new ) {
ESP_LOGE(TAG, "Unable to allocate additional memory for http_response");
goto exit;
}
else {
http_response = http_response_new;
}
char recv_buf[CONFIG_NANO_REST_RECEIVE_BLOCK_SIZE] = { 0 };
r = read(s, recv_buf, sizeof(recv_buf)-1);
memcpy(&http_response[http_response_len], recv_buf, r);
http_response_len += r;
} while( r == CONFIG_NANO_REST_RECEIVE_BLOCK_SIZE - 1 );
ESP_LOGI(TAG, "... done reading from socket. Last read return=%d errno=%d\r\n", r, errno);
{
int ret, minor_version, status;
struct phr_header headers[100];
const char* msg;
size_t msg_len, num_headers;
num_headers = sizeof(headers) / sizeof(headers[0]);
ret = phr_parse_response(http_response, strlen(http_response),
&minor_version, &status,
&msg, &msg_len,
headers, &num_headers, 0);
int msg_size = http_response_len - ret;
ESP_LOGI(TAG, "Message Size: %d", msg_size);
if(result_data_buf_len > msg_size) {
strncpy((char *)result_data_buf, (char *)&http_response[ret], msg_size);
result_data_buf[msg_size] = '\0';
}
else {
ESP_LOGE(TAG, "Insufficient result buffer.");
goto exit;
}
ESP_LOGI(TAG, "phr_parse_response:\n%s", (char *) result_data_buf);
}
func_result = result_data_buf;
exit:
if( addrinfo ) {
freeaddrinfo(addrinfo);
}
if( request_packet ) {
free(request_packet);
}
if( s >= 0 ) {
close(s);
}
if( http_response ) {
free(http_response);
}
return func_result;
}
static void http_request_task_wrapper(void *args_in) {
task_args_t *args = args_in;
http_request_task(args->get_post, args->post_data,
args->result_data_buf, args->result_data_buf_len);
xSemaphoreGive(http_request_complete);
vTaskDelete(NULL);
}
int network_get_data(char *post_data,
char *result_data_buf, size_t result_data_buf_len){
if( NULL == http_request_complete ) {
ESP_LOGI(TAG, "Creating http_request_complete binary semaphore");
http_request_complete = xSemaphoreCreateBinary();
}
task_args_t t = {
.get_post = 1,
.post_data = post_data,
.result_data_buf = result_data_buf,
.result_data_buf_len = result_data_buf_len
};
TaskHandle_t h;
xTaskCreate(http_request_task_wrapper,
"http_rest", 16000, //todo: optimize this number
(void *)&t, 10, &h);
if( xSemaphoreTake( http_request_complete,
pdMS_TO_TICKS(CONFIG_NANO_REST_RECEIVE_TIMEOUT * 1000)) ) {
return 0;
}
else {
// Timed out
vTaskDelete(h);
result_data_buf[0] = '\0';
ESP_LOGE(TAG, "HTTP Task timed out");
return -1;
}
}
|
python test.py --rec \
--module seq2seq \
--data_root data \
--category Lamp \
--ckpt 1000 \
--format voxel \
--by_part False
|
<gh_stars>0
package com.twu.biblioteca.handlers.item;
import com.twu.biblioteca.components.Library;
import com.twu.biblioteca.components.item.Movie;
import com.twu.biblioteca.components.item.RentalItemType;
import com.twu.biblioteca.exceptions.RentalItemAlreadyExistError;
import org.json.simple.JSONObject;
public class MovieListHandler extends RentalItemListHandler {
public MovieListHandler(Library library) {
super(library, RentalItemType.MOVIE, "movie_data.json");
}
/**
* Method for creating instance from loaded data
*/
@Override
public void loadItemData() {
for (Object o : this.getItemData()) {
JSONObject movie = (JSONObject) o;
try {
if (movie.get("rating") != null) {
this.getLibrary()
.addItem(
new Movie(
(String) movie.get("movie_title"),
(String) movie.get("director"),
(String) movie.get("genre"),
Math.toIntExact((Long) movie.get("year")),
(Double) movie.get("rating")
)
);
} else {
this.getLibrary()
.addItem(
new Movie(
(String) movie.get("movie_title"),
(String) movie.get("director"),
(String) movie.get("genre"),
Math.toIntExact((Long) movie.get("year"))
)
);
}
} catch (RentalItemAlreadyExistError rentalItemAlreadyExistError) {
rentalItemAlreadyExistError.printStackTrace();
}
}
}
}
|
#pragma once
#include <limits>
namespace nifty {
namespace ilp_backend{
struct IlpBackendSettings{
enum PreSolver {
PRE_SOLVER_AUTO,
PRE_SOLVER_PRIMAL,
PRE_SOLVER_DUAL,
PRE_SOLVER_NONE,
PRE_SOLVER_DEFAULT
};
enum LPSolver {
LP_SOLVER_PRIMAL_SIMPLEX,
LP_SOLVER_DUAL_SIMPLEX,
LP_SOLVER_BARRIER,
LP_SOLVER_SIFTING,
LP_SOLVER_DEFAULT
};
double memLimit = {-1.0};
double relativeGap{0.0};
double absoluteGap{0.0};
double cutUp{1.0e+75};
int prePasses{-1};
PreSolver preSolver{PRE_SOLVER_DEFAULT};
LPSolver lpSolver{LP_SOLVER_DEFAULT};
size_t numberOfThreads{1};
size_t verbosity{0};
};
} // namespace ilp_backend
} // namespace nifty
|
<filename>inc/sfml_app.h
#pragma once
#include <SFML/Graphics.hpp>
#include "sfml_raster.h"
#include "renderer.h"
class SFMLApp {
public:
SFMLApp(unsigned int width, unsigned int height,
sf::String const& title);
int start();
void draw();
private:
sf::RenderWindow window;
sf::Sprite sprite;
SFMLRaster raster;
Renderer renderer;
};
|
<reponame>HalZhan/angular2practices
define(['./a', './b'], function(moduleA, moduleB) {
'use strict';
moduleA.method();
moduleB.method();
}); |
// *Getting the Schema class:
const { Schema } = require('mongoose');
// *Getting the common regex:
const COMMON_REGEX = require('../../../tools/common-regex');
// *Exporting this module as a function:
module.exports = () => {
const Application = new Schema({
/**
* The application name in plain text
*/
name: {
type: String,
required: true,
unique: true,
match: [/^[a-zA-Z0-9\_\-]+$/, 'Invalid application name']
},
/**
* A salted and hashed version of the generated token (hex encoded)
*/
token: {
type: String,
required: true
},
/**
* The token hash salt
*/
salt: {
type: String,
required: true,
match: [COMMON_REGEX.UUIDV4, 'Invalid salt. It must be an UUID-V4 string.']
},
/**
* The database name this application will store its users info
*/
database: {
type: String,
unique: true,
required: true
},
date: {
type: Date,
required: true,
default: Date.now
}
});
// *Exporting the schema:
return Application;
};
|
<reponame>51lslcscyber/cpp-awsome-51lslcscyber<filename>beginer/3954380.cpp
// #include <bits/stdc++.h>
//using namespace std;
//class Solution
//{
//public:
// int searchInsert(vector<int> &nums, int target)
// {
// int lo = -1;
// int hi = nums.size();
// while (lo + 1 == hi)
// {
// int mid = lo + (hi - lo) / 2;
// if (target < nums[mid])
// {
// lo = mid;
// }
// else
// {
// hi = mid;
// }
// }
// return hi;
// }
//};
//
// Created by 鹄思鹄想_bit森 on 2022/5/1.
//
|
/*
* Copyright 2014 VMware, Inc. All rights reserved. Licensed under the Apache v2 License.
*/
package errors
import "fmt"
type DatastoreError struct {
datastore string
operation string
reason string
}
func NewDatastoreError(datastore, operation, reason string) error {
err := DatastoreError{
datastore: datastore,
operation: operation,
reason: reason,
}
return &err
}
func (err *DatastoreError) Error() string {
return fmt.Sprintf("Unable to %s on datastore %s due to %s", err.operation, err.datastore, err.reason)
}
|
#!/bin/bash
dieharder -d 204 -g 23 -S 3233861039
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.