text stringlengths 1 1.05M |
|---|
<filename>src/directive/longpress/index.js
const longpress = {
beforeMount : function( el, binding, vNode ) {
if ( typeof binding.value !== 'function' ) {
throw 'callback must be a function'
}
// 定义变量
let pressTimer = null
// 创建计时器( 2秒后执行函数 )
const start = ( e ) => {
if ( e.type === 'click' && e.button !== 0 ) {
return
}
if ( pressTimer === null ) {
pressTimer = setTimeout( () => {
handler()
}, 1000 )
}
}
// 取消计时器
const cancel = ( e ) => {
if ( pressTimer !== null ) {
clearTimeout( pressTimer )
pressTimer = null
}
}
// 运行函数
const handler = ( e ) => {
binding.value( e )
}
// 添加事件监听器
el.addEventListener( 'mousedown', start )
el.addEventListener( 'touchstart', start )
// 取消计时器
el.addEventListener( 'click', cancel )
el.addEventListener( 'mouseout', cancel )
el.addEventListener( 'touchend', cancel )
el.addEventListener( 'touchcancel', cancel )
},
// 当传进来的值更新的时候触发
updated( el, { value } ) {
el.$value = value
},
// 指令与元素解绑的时候,移除事件绑定
unmounted( el ) {
el.removeEventListener( 'click', el.handler )
}
}
export default longpress
|
'use strict';
/*
* Create a `keepFirst` function that takes a string as parameter
* and return the string only keeping the 2 first characters
*
* Create a `keepLast` function that takes a string as parameter
* and return the string only keeping the 2 last characters
*
* Create a `keepFirstLast` function that takes a string as parameter
* and only keep 2 characters from the third character
*
*/
// Your code:
const keepFirst = (str) => {
let newStr = '';
for (let i = 0; i < 2; i++) {
newStr += str[i];
}
return newStr;
};
const keepFirstLast = (str) => {
let newStr = '';
for (let i = 2; i < 4; i++) {
if (str[i] !== undefined)
newStr += str[i];
}
return newStr;
};
const keepLast = (str) => {
let newStr = '';
for (let i = str.length - 2; i < str.length; i++) {
newStr += str[i];
}
return newStr;
};
//* Begin of tests
const assert = require('assert');
assert.strictEqual(typeof keepFirst, 'function');
assert.strictEqual(keepFirst.length, 1);
assert.strictEqual(keepFirst('lol'), 'lo');
assert.strictEqual(keepFirst('1234'), '12');
assert.strictEqual(typeof keepLast, 'function');
assert.strictEqual(keepLast.length, 1);
assert.strictEqual(keepLast('lol'), 'ol');
assert.strictEqual(keepLast('1234'), '34');
assert.strictEqual(typeof keepFirstLast, 'function');
assert.strictEqual(keepFirstLast.length, 1);
assert.strictEqual(keepFirstLast('lol'), 'l');
assert.strictEqual(keepFirstLast('1234'), '34');
//assert.fail('You must write your own tests');
// End of tests */
|
<reponame>alphagov-mirror/paas-yet-another-cloudwatch-exporter
package main
import (
"fmt"
"github.com/prometheus/client_golang/prometheus"
"regexp"
"strings"
)
var (
cloudwatchAPICounter = prometheus.NewCounter(prometheus.CounterOpts{
Name: "yace_cloudwatch_requests_total",
Help: "Help is not implemented yet.",
})
)
type prometheusData struct {
name *string
labels map[string]string
value *float64
}
func createPrometheusMetrics(p prometheusData) *prometheus.Gauge {
gauge := prometheus.NewGauge(prometheus.GaugeOpts{
Name: *p.name,
Help: "Help is not implemented yet.",
ConstLabels: p.labels,
})
gauge.Set(*p.value)
return &gauge
}
func removePromDouble(data []*prometheusData) []*prometheusData {
keys := make(map[string]bool)
list := []*prometheusData{}
for _, entry := range data {
check := *entry.name + entry.labels["name"]
if _, value := keys[check]; !value {
keys[check] = true
list = append(list, entry)
}
}
return list
}
func fillRegistry(promData []*prometheusData) *prometheus.Registry {
registry := prometheus.NewRegistry()
for _, point := range promData {
gauge := createPrometheusMetrics(*point)
if err := registry.Register(*gauge); err != nil {
if _, ok := err.(prometheus.AlreadyRegisteredError); ok {
fmt.Println("Already registered")
} else {
panic(err)
}
}
}
return registry
}
func promString(text string) string {
text = splitString(text)
return replaceWithUnderscores(text)
}
func promStringTag(text string) string {
return replaceWithUnderscores(text)
}
func replaceWithUnderscores(text string) string {
replacer := strings.NewReplacer(" ", "_", ",", "_", "\t", "_", ",", "_", "/", "_", "\\", "_", ".", "_", "-", "_", ":", "_")
return replacer.Replace(text)
}
func splitString(text string) string {
splitRegexp := regexp.MustCompile(`([a-z0-9])([A-Z])`)
return splitRegexp.ReplaceAllString(text, `$1.$2`)
}
|
(function () {
var canvas = document.getElementById('canvas'),
ctx = canvas.getContext('2d'),
smallBall,
mediumBall,
largeBall,
Ball,
balls;
Ball = (function () {
var ball = {
init: function(radius, x, y, xChange, yChange, color){
this.radius = radius;
this.x = x;
this.y = y;
this.xChange = xChange;
this.yChange = yChange;
this.color = color;
return this;
},
checkDirection: function () {
if(this.x + this.xChange + this.radius > canvas.width || this.x - this.radius + this.xChange < 0){
this.xChange *= -1;
}
if(this.y + this.radius + this.yChange > canvas.height || this.y - this.radius + this.yChange < 0){
this.yChange *= -1;
}
return this;
},
update: function () {
this.x += this.xChange;
this.y += this.yChange;
return this;
},
print: function () {
ctx.fillStyle = this.color;
ctx.beginPath();
ctx.arc(this.x, this.y, this.radius, 0, 2 * Math.PI);
ctx.closePath();
ctx.fill();
return this;
}
};
return ball;
}());
smallBall = Object.create(Ball).init(15, 15, canvas.height / 2, 9, 9, '#FFFFFF');
mediumBall = Object.create(Ball).init(22, 22, canvas.height / 2, 7, 7, '#00FF00');
largeBall = Object.create(Ball).init(30, 30, canvas.height / 2, 5, 5, '#FF0000');
balls = [largeBall, mediumBall, smallBall];
function animationFrame () {
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (var ind = 0; ind < balls.length; ind++) {
balls[ind].checkDirection().update().print();
}
requestAnimationFrame(animationFrame);
}
animationFrame();
}()); |
"""
Variables related to frontend
Should be replaced with those coming from your own application
"""
frontend_address = "http://localhost:3000/" |
<gh_stars>0
package com.leetcode;
public class Solution_1232 {
public boolean checkStraightLine(int[][] coordinates) {
if (coordinates.length == 2) {
return true;
}
if (coordinates[0][0] == coordinates[1][0]) {
return checkStraightLine_1(coordinates);
} else if (coordinates[0][1] == coordinates[1][1]) {
return checkStraightLine_2(coordinates);
} else {
return checkStraightLine_3(coordinates);
}
}
private boolean checkStraightLine_3(int[][] coordinates) {
boolean result = true;
int a = coordinates[1][1] - coordinates[0][1];
int b = coordinates[1][0] - coordinates[0][0];
for (int i = 2; i < coordinates.length; i++) {
if (a * (coordinates[1][0] - coordinates[i][0]) != b * (coordinates[1][1] - coordinates[i][1])) {
result = false;
break;
}
}
return result;
}
private boolean checkStraightLine_1(int[][] coordinates) {
boolean result = true;
for (int i = 0; i < coordinates.length; i++) {
if (i == 0) continue;
if (coordinates[i][0] != coordinates[i - 1][0]) {
result = false;
break;
}
}
return result;
}
private boolean checkStraightLine_2(int[][] coordinates) {
boolean result = true;
for (int i = 0; i < coordinates.length; i++) {
if (i == 0) continue;
if (coordinates[i][1] != coordinates[i - 1][1]) {
result = false;
break;
}
}
return result;
}
}
|
const HDWalletProvider = require('truffle-hdwallet-provider');
const fs = require('fs');
module.exports = {
networks: {
development: {
host: "localhost",
port: 8545,
network_id: "*",
gas: 5000000
},
abs_msconsortium_mseventadmin_mseventadmin: {
network_id: "*",
gasPrice: 0,
provider: new HDWalletProvider(fs.readFileSync('d:\\Projects\\VSCode\\ProductTraceDemo\\Demo\\memonic2.env', 'utf-8'), "https://mseventadmin.blockchain.azure.com:3200/deM4U1HYptj_x2RgIrEHLBz0")
}
},
compilers: {
solc: {
version: "0.8.4",
settings: {
optimizer: {
enabled: true,
runs: 200
}
}
}
}
};
|
#!/bin/bash
python3 pre-build.py # TODO: pass arguments
mpirun -n 9 python3 cluster.py # TODO pass arguments
|
package main
import (
"bytes"
"context"
"fmt"
"log"
"net"
"os"
"os/exec"
"regexp"
"sort"
"strings"
"sync"
"syscall"
"time"
pb "github.com/loheagn/wukuard/grpc"
"google.golang.org/grpc"
)
type InterfaceConf struct {
PrivateKey string
Address string
ListenPort int32
PostUp string
PreDown string
}
type PeerConf struct {
PublicKey string
AllowedIPs string
Endpoint string
PersistentKeepalive int32
}
type WgConf struct {
interfaceConf *InterfaceConf
peerConfList []*PeerConf
}
var (
serverIP string
serverGrpcPort string
interfaceName string // the name of network interface
)
var wgMutex sync.Mutex
func (interfaceConf InterfaceConf) generateString() string {
return fmt.Sprintf(`
[Interface]
PrivateKey = %s
Address = %s
ListenPort = %d
PostUp = %s
PreDown = %s
`, interfaceConf.PrivateKey, interfaceConf.Address, interfaceConf.ListenPort, interfaceConf.PostUp, interfaceConf.PreDown)
}
func (peerConf PeerConf) generateString() string {
return fmt.Sprintf(`
[Peer]
Publickey = %s
AllowedIPs = %s
Endpoint = %s
PersistentKeepalive = %d
`, peerConf.PublicKey, peerConf.AllowedIPs, peerConf.Endpoint, peerConf.PersistentKeepalive)
}
func (wgConf WgConf) generateString() string {
buf := bytes.Buffer{}
buf.WriteString(wgConf.interfaceConf.generateString())
for _, v := range wgConf.peerConfList {
buf.WriteString(v.generateString())
}
return buf.String()
}
const (
basePath = "/etc/wireguard/"
confFilename = "/etc/wireguard/wukuard.conf"
serviceName = "<EMAIL>"
)
func getCurrentConf() (string, error) {
wholeConfStr, err := readFile(confFilename)
if err != nil {
return "", err
}
return wholeConfStr, nil
}
func checkServiceIsRunning() bool {
output, err := exec.Command("systemctl", "status", serviceName).Output()
if err != nil {
return false
}
matched, err := regexp.MatchString("Active: active", string(output))
return err == nil && matched
}
func prepareConfFile() error {
syscall.Umask(0022)
if _, err := os.Stat(basePath); os.IsNotExist(err) {
err = os.MkdirAll(basePath, os.ModePerm)
if err != nil {
return err
}
}
// generate the wg conf
if _, err := os.Stat(confFilename); os.IsNotExist(err) {
confFile, err := os.Create(confFilename)
if err != nil {
return err
}
_ = confFile.Close()
} else {
return err
}
return nil
}
func getLocalIP() string {
conn, err := net.Dial("udp", serverIP+":80")
if err != nil {
log.Fatal(err)
}
defer conn.Close()
localAddr := conn.LocalAddr().(*net.UDPAddr)
return localAddr.IP.String()
}
func getMacAddress() string {
if interfaceName == "" {
return ""
}
ifas, err := net.Interfaces()
if err != nil {
return ""
}
for _, v := range ifas {
if v.Name == interfaceName {
return v.HardwareAddr.String()
}
}
return ""
}
func getHostname() string {
hostname, err := os.Hostname()
if err != nil {
return ""
}
return hostname
}
func parseServerAddr(addr string) {
ip, port, err := net.SplitHostPort(addr)
if err != nil {
log.Panic(err)
}
serverIP, serverGrpcPort = ip, port
}
func mapGrpcResponse(network *pb.NetWorkResponse) *WgConf {
wgConf := &WgConf{}
interfaceResponse := network.GetInterfaceResponse()
if interfaceResponse == nil {
return nil
}
wgConf.interfaceConf = &InterfaceConf{
PrivateKey: interfaceResponse.PrivateKey,
Address: interfaceResponse.Address,
ListenPort: interfaceResponse.ListenPort,
PostUp: interfaceResponse.PostUp,
PreDown: interfaceResponse.PreDown,
}
var peerConfList []*PeerConf
localIP := getLocalIP()
for _, peer := range network.PeerList {
if strings.HasPrefix(peer.Endpoint, localIP) {
// this peer describes itself
continue
}
peerConfList = append(peerConfList, &PeerConf{
PublicKey: peer.PublicKey,
AllowedIPs: peer.AllowedIPs,
Endpoint: peer.Endpoint,
PersistentKeepalive: peer.PersistentKeepalive,
})
}
// sort peerConfList by endpoint
sort.Slice(peerConfList, func(i, j int) bool {
return peerConfList[i].Endpoint < peerConfList[j].Endpoint
})
wgConf.peerConfList = peerConfList
return wgConf
}
func syncWgConf(inputConf *WgConf) {
var err error
wgMutex.Lock()
defer wgMutex.Unlock()
if inputConf == nil || inputConf.interfaceConf == nil {
err = writeFile(confFilename, "")
checkErr(err)
if checkServiceIsRunning() {
log.Println("INFO: stop wukuard service")
_ = exec.Command("/bin/bash", "-c", "ip link delete wukuard").Run()
err = exec.Command("systemctl", "stop", serviceName).Run()
checkErr(err)
}
return
}
err = prepareConfFile()
checkErr(err)
wholeConfStr, err := getCurrentConf()
inputConfStr := inputConf.generateString()
if wholeConfStr != inputConfStr || !checkServiceIsRunning() {
err = writeFile(confFilename, inputConfStr)
checkErr(err)
log.Println("INFO: restart wukuard service")
err = exec.Command("systemctl", "restart", serviceName).Run()
if err != nil {
// stupid but effective
_ = exec.Command("/bin/bash", "-c", "ip link delete wukuard").Run()
err = exec.Command("systemctl", "restart", serviceName).Run()
checkErr(err)
}
return
}
}
func buildPeerRequest() *pb.PeerRequest {
return &pb.PeerRequest{
Endpoint: fmt.Sprintf("%s:9619", getLocalIP()),
MacAddress: getMacAddress(),
Hostname: getHostname(),
}
}
func clientMain(serverAddr, inputInterfaceName string) {
parseServerAddr(serverAddr)
log.Printf("INFO: try to connect to the server(%s:%s)......\n", serverIP, serverGrpcPort)
interfaceName = inputInterfaceName
conn, err := grpc.Dial(fmt.Sprintf("%s:%s", serverIP, serverGrpcPort), grpc.WithInsecure(), grpc.WithBlock())
if err != nil {
log.Fatalf("ERROR: did not connect: %v", err)
}
log.Printf("INFO: connected to the server(%s:%s)......\n", serverIP, serverGrpcPort)
defer conn.Close()
c := pb.NewSyncNetClient(conn)
t := time.NewTicker(10 * time.Second)
defer t.Stop()
defer func() {
_ = exec.Command("/bin/bash", "-c", "ip link delete wukuard").Run()
err = exec.Command("systemctl", "stop", serviceName).Run()
checkErr(err)
}()
for {
<-t.C
resp, err := c.HeartBeat(context.Background(), buildPeerRequest())
if err != nil {
log.Printf("ERROR: get error from grpc server: %s\n", err.Error())
}
syncWgConf(mapGrpcResponse(resp))
}
}
|
package pojo;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
@XmlRootElement(name="Bidder")
public class Bidder implements Serializable {
@XmlAttribute
private Long Rating;
@XmlAttribute
private String UserID;
private String Location;
@Override
public String toString() {
return "Bidder [Rating=" + Rating + ", UserID=" + UserID + ", Location=" + Location + ", Country=" + Country
+ "]";
}
private String Country;
@XmlAttribute(name="Rating")
public Long getRating() {
return Rating;
}
public void setRating(Long rating) {
Rating = rating;
}
@XmlAttribute(name="UserID")
public String getUserID() {
return UserID;
}
public void setUserID(String userID) {
UserID = userID;
}
@XmlElement(name="Location")
public String getLocation() {
return Location;
}
public void setLocation(String location) {
Location = location;
}
@XmlElement(name="Country")
public String getCountry() {
return Country;
}
public void setCountry(String country) {
Country = country;
}
}
|
#!/bin/sh
# GPII Linux Clean Script
#
# Copyright 2012 OCAD University
#
# Licensed under the New BSD license. You may not use this file except in
# compliance with this License.
#
# You may obtain a copy of the License at
# https://github.com/gpii/universal/LICENSE.txt
cd node_modules/gsettingsBridge/nodegsettings
node-gyp clean
rm -rf build
cd ../../..
cd node_modules/alsa/nodealsa
node-gyp clean
rm -rf build
cd ../../..
cd node_modules/xrandr/nodexrandr
node-gyp clean
rm -rf build
cd ../../..
sudo rm -rf /usr/local/gpii
sudo rm -rf /var/lib/gpii
sudo rm -f /etc/udev/rules.d/80-gpii.rules
# TODO: It seems extremely dangerous to go around deleting the node_modules directory we installed,
# in case the developer has unpushed modifications in there.
# rm -rf ../node_modules
|
<reponame>thpun/stock-dapp
// external imports
import {ApolloClient, ApolloLink, InMemoryCache, Observable} from '@apollo/client';
import {graphql} from 'graphql'
import {print} from 'graphql/language/printer';
// local imports
import schema from './schema'
const blockchainLink = new ApolloLink(
operation => new Observable(observer => {
graphql(schema, print(operation.query), null, null, operation.variables).then(
result => {
observer.next(result);
observer.complete();
}
)
})
);
export default new ApolloClient({
link: blockchainLink,
cache: new InMemoryCache(),
})
|
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.volatility.surface;
/**
*
*/
public interface StrikeType {
double value();
StrikeType with(final double value);
}
|
/* THIS FILE AUTO-GENERATED FROM astra_create_stream_proxy.hpp.lpp. DO NOT EDIT. */
// This file is part of the Orbbec Astra SDK [https://orbbec3d.com]
// Copyright (c) 2015 Orbbec 3D
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Be excellent to each other.
#ifndef ASTRA_CREATE_STREAM_PROXY_H
#define ASTRA_CREATE_STREAM_PROXY_H
#include "astra_context.hpp"
#include "astra_stream_service_delegate.hpp"
#include <astra_core/capi/astra_streamservice_proxy.h>
namespace astra {
static inline astra_streamservice_proxy_t* create_stream_proxy(context* context)
{
astra_streamservice_proxy_t* proxy = new astra_streamservice_proxy_t;
proxy->streamset_open = &stream_service_delegate::streamset_open;
proxy->streamset_close = &stream_service_delegate::streamset_close;
proxy->reader_create = &stream_service_delegate::reader_create;
proxy->reader_destroy = &stream_service_delegate::reader_destroy;
proxy->reader_get_stream = &stream_service_delegate::reader_get_stream;
proxy->stream_get_description = &stream_service_delegate::stream_get_description;
proxy->stream_start = &stream_service_delegate::stream_start;
proxy->stream_stop = &stream_service_delegate::stream_stop;
proxy->reader_open_frame = &stream_service_delegate::reader_open_frame;
proxy->reader_close_frame = &stream_service_delegate::reader_close_frame;
proxy->reader_register_frame_ready_callback = &stream_service_delegate::reader_register_frame_ready_callback;
proxy->reader_unregister_frame_ready_callback = &stream_service_delegate::reader_unregister_frame_ready_callback;
proxy->reader_get_frame = &stream_service_delegate::reader_get_frame;
proxy->stream_set_parameter = &stream_service_delegate::stream_set_parameter;
proxy->stream_get_parameter = &stream_service_delegate::stream_get_parameter;
proxy->stream_get_result = &stream_service_delegate::stream_get_result;
proxy->stream_invoke = &stream_service_delegate::stream_invoke;
proxy->temp_update = &stream_service_delegate::temp_update;
proxy->streamService = context;
return proxy;
}
}
#endif /* ASTRA_CREATE_STREAM_PROXY_H */
|
#!/bin/bash -x
# Creates and pushes manifests for flavors
#
# ./docker_manifest.sh FLAVOR TAG1 ....
#
# Docker password is in a file /tmp/docker.pass
# because this script uses -x for build transparency
# but we don't want to leak passwords
set -e
FLAVOR="$1"
shift
AMENDS=""
for TAG in "$@"; do
AMENDS="$AMENDS --amend $DOCKER_ORG/openresty:$TAG"
done
export DOCKER_CLI_EXPERIMENTAL=enabled
cat /tmp/docker.pass | docker login -u="$DOCKER_USERNAME" --password-stdin
if [[ "$TRAVIS_BRANCH" == "master" ]] ; then
docker manifest create $DOCKER_ORG/openresty:$FLAVOR $AMENDS &&
docker manifest push $DOCKER_ORG/openresty:$FLAVOR ;
fi
if [[ "$TRAVIS_TAG" ]] ; then
TRAVIS_TAG_BASE=$(echo -n "$TRAVIS_TAG" | sed 's/-[0-9]$//g') ;
if [[ ( "$TRAVIS_TAG_BASE" ) && ( "$TRAVIS_TAG_BASE" != "$TRAVIS_TAG" ) ]] ; then
AMENDS_TAG_BASE=""
for TAG in "$@"; do
AMENDS_TAG_BASE="$AMENDS_TAG_BASE --amend $DOCKER_ORG/openresty:$TRAVIS_TAG_BASE-$TAG"
done
docker manifest create $DOCKER_ORG/openresty:$TRAVIS_TAG_BASE-$FLAVOR $AMENDS_TAG_BASE &&
docker manifest push $DOCKER_ORG/openresty:$TRAVIS_TAG_BASE-$FLAVOR ;
fi ;
AMENDS_TAG=""
for TAG in "$@"; do
AMENDS_TAG="$AMENDS_TAG --amend $DOCKER_ORG/openresty:$TRAVIS_TAG-$TAG"
done
docker manifest create $DOCKER_ORG/openresty:$TRAVIS_TAG-$FLAVOR $AMENDS_TAG &&
docker manifest push $DOCKER_ORG/openresty:$TRAVIS_TAG-$FLAVOR ;
fi
|
package main
import (
"errors"
"fmt"
"io"
)
// Shifter represents a buffered log pipe.
//
// Shifter will coordinate the configuration, startup and shutdown of an
// Input and Output which pass log message byte arrays over a channel.
//
// Input is backed by inputReader, and Output is backed by outputWriter.
type Shifter struct {
queueSize int
inputBufferSize int // used to configure inputReader
inputReader io.Reader
outputWriter Writer
statsChannel chan Stat // passed to Input and Output
input *Input
output *Output
}
// Writer is an interface used by Shifter.
// Writer extends io.Writer to add Init and Close functions.
type Writer interface {
io.Writer
// Init sets up the writer and should be called prior to any calls to Write.
Init() error
// Close shuts down the writer.
// After calling Close on a Writer, Init must be called again prior to any
// further calls to Write.
Close() error
}
// Stat represents a key-value pair holding a Shifter statistic name and value.
type Stat struct {
name string
value float64
}
// Start starts the log pipe and blocks until both the Input and Output are
// finished according their respective WaitGroups.
//
// An error will be returned if input or output initialization fails.
func (shifter *Shifter) Start() error {
queue := make(chan []byte, shifter.queueSize)
shifter.input = &Input{
bufferSize: shifter.inputBufferSize,
reader: shifter.inputReader,
queue: queue,
statsChannel: shifter.statsChannel,
}
shifter.output = &Output{
writer: shifter.outputWriter,
queue: queue,
statsChannel: shifter.statsChannel,
}
// start writing before reading: there's still a race here, not worth bothering with yet
writeGroup, err := shifter.output.Write()
if err != nil {
return errors.New(fmt.Sprintf("Failed to initialize output writer: %v", err))
}
readGroup := shifter.input.Read()
// wait for the the reader to complete
readGroup.Wait()
// shut down the writer by closing the queue
close(queue)
writeGroup.Wait()
shifter.outputWriter.Close()
return nil
}
|
#!/bin/bash
source wait-for-kong.bash
echo "Starting Flask server"
flask run --host=0.0.0.0 --port=5000 &
echo "Starting postgres server"
postgres
|
class Point:
def __init__(self, error, coord):
self.error = error
self.coord = coord
def is_valid_point(self):
if self.error == "None" and \
self.coord[0] > 0 and self.coord[1] > 0 and \
self.coord[0] < 1 and self.coord[1] < 1:
return True
else:
return False
# Example usage
p1 = Point("None", (0.5, 0.7))
print(p1.is_valid_point()) # Output: True
p2 = Point("Invalid point", (1.2, 0.5))
print(p2.is_valid_point()) # Output: False |
<gh_stars>1-10
package com.github.alex1304.ultimategdbot.core.database;
import java.util.List;
import java.util.Optional;
import org.jdbi.v3.sqlobject.customizer.BindPojo;
import org.jdbi.v3.sqlobject.statement.SqlQuery;
import org.jdbi.v3.sqlobject.statement.SqlUpdate;
import com.github.alex1304.ultimategdbot.api.database.guildconfig.GuildConfigDao;
import discord4j.common.util.Snowflake;
public interface CoreConfigDao extends GuildConfigDao<CoreConfigData> {
String TABLE = "core_config";
@Override
@SqlUpdate("INSERT INTO " + TABLE + "(guild_id) VALUES (?)")
void create(long guildId);
@Override
@SqlUpdate("UPDATE " + TABLE + " SET "
+ "prefix = DEFAULT(prefix), "
+ "channel_changelog_id = DEFAULT(channel_changelog_id), "
+ "locale = DEFAULT(locale) "
+ "WHERE guild_id = ?")
void reset(long guildId);
@Override
@SqlUpdate("UPDATE " + TABLE + " SET "
+ "prefix = :prefix, "
+ "channel_changelog_id = :channelChangelogId, "
+ "locale = :locale "
+ "WHERE guild_id = :guildId")
void update(@BindPojo CoreConfigData data);
@Override
@SqlQuery("SELECT * FROM " + TABLE + " WHERE guild_id = ?")
Optional<CoreConfigData> get(long guildId);
@SqlQuery("SELECT guild_id, prefix FROM " + TABLE + " WHERE prefix IS NOT NULL AND prefix != '' AND prefix != ?")
List<CoreConfigData> getAllNonDefaultPrefixes(String defaultPrefix);
@SqlQuery("SELECT guild_id, locale FROM " + TABLE + " WHERE locale IS NOT NULL AND locale != '' AND locale != ?")
List<CoreConfigData> getAllNonDefaultLocales(String defaultLocale);
@SqlQuery("SELECT channel_changelog_id FROM " + TABLE + " WHERE channel_changelog_id IS NOT NULL")
List<Snowflake> getAllChangelogChannels();
}
|
import re
from bs4 import BeautifulSoup
from urllib.request import urlopen
def email_scraper(url):
# Open the page
page = urlopen(url)
# Create a Beautiful soup object
soup = BeautifulSoup(page, 'html.parser')
# Find all emails
emails = soup.find_all(text = re.compile(r'[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}'))
emails = [e for e in emails if not e.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']]
return emails |
const address = require('@openworklabs/filecoin-address')
const sha256 = require('js-sha256')
const BN = require('bn.js')
// Get n next bits
function nextBits(obj, n) {
// if (obj.left < n) throw new Error("out of bits")
const res = obj.num.shrn(obj.left - n).and(new BN(1).shln(n).sub(new BN(1)))
obj.left -= n
return res.toNumber()
}
function indexForBitPos(bp, bitfield) {
let acc = bitfield
let idx = 0
while (bp > 0) {
if (acc.and(new BN(1)).eq(new BN(1))) {
idx++
}
bp--
acc = acc.shrn(new BN(1))
}
return idx
}
function getBit(b, n) {
return b.shrn(n).and(new BN(0x1)).toNumber()
}
async function getValue(n, load, hv, key) {
const idx = nextBits(hv, n.bitWidth)
if (getBit(n.data.bitfield, idx) === 0) {
throw new Error('not found in bitfield')
}
const cindex = indexForBitPos(idx, n.data.bitfield)
const c = n.data.pointers[cindex]
if (c[0]) {
const child = await load(c[0]['/'])
return getValue({ bitWidth: n.bitWidth, data: parseNode(child) }, load, hv, key)
}
if (c[1]) {
for (const [k, v] of c[1]) {
if (k === key.toString('base64')) return Buffer.from(v, 'base64')
}
}
throw new Error('key not found')
}
function makeBuffers(obj) {
if (typeof obj === 'string') {
return Buffer.from(obj, 'base64')
}
if (obj instanceof Array) {
return obj.map(makeBuffers)
}
return obj
}
async function forEachIter(n, load, cb) {
for (const c of n.data.pointers) {
if (c[0]) {
const child = await load(c[0]['/'])
await forEachIter({ bitWidth: n.bitWidth, data: parseNode(child) }, load, cb)
}
if (c[1]) {
for (const [k, v] of c[1]) {
await cb(Buffer.from(k, 'base64'), makeBuffers(v))
}
}
}
}
async function forEachIterParallel(n, load, cb) {
await Promise.all(n.data.pointers.map(async c => {
if (c[0]) {
const child = await load(c[0]['/'])
await forEachIterParallel({ bitWidth: n.bitWidth, data: parseNode(child) }, load, cb)
}
if (c[1]) {
for (const [k, v] of c[1]) {
await cb(Buffer.from(k, 'base64'), makeBuffers(v))
}
}
}))
}
function bytesToBig(p) {
let acc = new BN(0)
for (let i = 0; i < p.length; i++) {
acc = acc.mul(new BN(256))
acc = acc.add(new BN(p[i]))
}
return acc
}
function parseNode(data) {
return {
pointers: data[1],
bitfield: bytesToBig(Buffer.from(data[0], 'base64')),
}
}
function print(k, v) {
console.log(address.encode('t', new address.Address(k)), bytesToBig(v))
}
async function addToArray(n, load, dataArray) {
for (const c of n.data.pointers) {
if (c[0]) {
const child = await load(c[0]['/'])
await addToArray({ bitWidth: n.bitWidth, data: parseNode(child) }, load, dataArray)
}
if (c[1]) {
for (const [k, v] of c[1]) {
await dataArray.push([address.encode('t', new address.Address(Buffer.from(k, 'base64'))), bytesToBig(makeBuffers(v))])
}
}
}
}
function readVarIntE(bytes, offset) {
let res = new BN(0)
let acc = new BN(1)
for (let i = offset; i < bytes.length; i++) {
res = res.add(new BN(bytes[i] & 0x7f).mul(acc))
if (bytes[i] < 0x7f) {
return res
}
acc = acc.mul(new BN(128))
}
return res
}
function readVarInt(bytes, offset) {
return readVarIntE(bytes, offset || 0)
}
async function buildArrayData(data, load) {
var dataArray = []
await addToArray({ bitWidth: 5, data: parseNode(data) }, async a => {
return load(a)
},
dataArray)
return dataArray
}
async function find(data, load, key) {
const hash = bytesToBig(Buffer.from(sha256(key), 'hex'))
return getValue({ bitWidth: 5, data: parseNode(data) }, load, { num: hash, left: 256 }, key)
}
async function forEach(data, load, cb) {
await forEachIter({ bitWidth: 5, data: parseNode(data) }, async a => {
return load(a)
},
cb)
}
async function forEachParallel(data, load, cb) {
await forEachIterParallel({ bitWidth: 5, data: parseNode(data) }, async a => {
return load(a)
},
cb)
}
async function printData(data, load) {
await forEachIter({ bitWidth: 5, data: parseNode(data) }, async a => {
return load(a)
},
print)
}
module.exports = {
readVarInt,
buildArrayData,
nextBits,
indexForBitPos,
parseNode,
find,
forEach,
forEachParallel,
printData,
bytesToBig,
makeBuffers,
}
|
# -----------------------------------------------------------------------------
#
# Package : fluent-logger
# Version : 3.4.1
# Source repo : https://github.com/fluent/fluent-logger-node
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=fluent-logger
PACKAGE_VERSION=3.4.1
PACKAGE_URL=https://github.com/fluent/fluent-logger-node
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
CUDA_VISIBLE_DEVICES=7 python extract_image_features_kinetics.py \
--cfg /proj/vondrick/parita/projects/visualbert/model_checkpoints/detectron/35861858/e2e_faster_rcnn_R-101-FPN_2x.yaml \
--wts /proj/vondrick/parita/projects/visualbert/model_checkpoints/detectron/35861858/model_final.pkl \
--min_bboxes 150 \
--max_bboxes 150 \
--feat_name gpu_0/fc6 \
--output_dir ../../X_VLOG/features/test/npz_files/ \
--image-ext jpg \
--one_giant_file ../../X_VLOG/features/test/features_test_150.th \
/proj/vondrick/parita/projects/visualbert/X_VLOG/pairs/data/test
|
#!/bin/bash
set -eux -o pipefail
# Base software.
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y \
ack-grep \
apt-transport-https \
build-essential \
fontconfig \
git-el \
iproute2 \
jq \
meld \
nmap \
tmux \
unzip \
whois \
zsh
# Python dependencies.
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
libssl-dev \
python3-pip \
zlib1g-dev
if [ -d /usr/share/xsessions/ ]; then
if apt list --installed 2>/dev/null | grep gnome-desktop; then
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y compizconfig-settings-manager
fi
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y \
xfce4-terminal \
yakuake
fi
source ./common.sh
install_pkg_for_env docker ubuntu
install_pkg_for_env gnome ubuntu
install_pkg_for_env openvpn ubuntu
install_pkg_for_env vagrant ubuntu
|
#!/bin/bash
# This script is used to stitch together ADF scans of recto verso papers.
#
# Files made by the first scan should reside in a folder called A. The second side should be called B. It is important that both files should be sorted alphabetically on the scanning order. Make sure that this format is respected by your scanning program.
#
# It depends on the pdfjoin program, which is usually included in any TeX setup.
rm A/.gitkeep
rm B/.gitkeep
RAWA=$(ls A -1)
RAWB=$(ls B -r -1)
# Split on newline into array
FILESA=(${RAWA//$'\n'/ })
FILESB=(${RAWB//$'\n'/ })
FILECOUNT=${#FILESA[@]}
# off by 1 error
let FILECOUNT--
cd result
for index in $(seq 0 $FILECOUNT)
do
# Merge the first scan from the recto side and the last scan from the verso side together.
pdfjoin "../A/${FILESA[$index]}" "../B/${FILESB[$index]}" &
done
wait
cd ..
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SS-N/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SS-N/13-0+1024+512-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function identity_full --eval_function last_element_eval |
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.logic.jdbc.metadata.info;
import java.util.Map;
import org.apache.torque.engine.database.model.UnifiedSchema;
import org.dbflute.DfBuildProperties;
import org.dbflute.properties.DfLittleAdjustmentProperties;
import org.dbflute.util.DfCollectionUtil;
/**
* @author jflute
*/
public class DfForeignKeyMeta {
// ===================================================================================
// Attribute
// =========
protected String _foreignKeyName;
protected UnifiedSchema _localSchema;
protected String _localTablePureName;
protected UnifiedSchema _foreignSchema;
protected String _foreignTablePureName;
protected final Map<String, String> _columnNameMap = DfCollectionUtil.newLinkedHashMap();
// ===================================================================================
// Name Building
// =============
public String getForeignTableIdentityName() {
if (_foreignSchema == null) {
return _foreignTablePureName;
}
return _foreignSchema.getIdentifiedSchema() + "." + _foreignTablePureName;
}
public String getLocalTableSqlName() {
if (_localSchema == null) {
return _localTablePureName;
}
final DfLittleAdjustmentProperties prop = DfBuildProperties.getInstance().getLittleAdjustmentProperties();
final String quotedName = prop.quoteTableNameIfNeedsDirectUse(_localTablePureName);
return _localSchema.buildSqlName(quotedName); // driven is resolved here so it uses pure name here
}
public String getForeignTableSqlName() {
if (_foreignSchema == null) {
return _foreignTablePureName;
}
final DfLittleAdjustmentProperties prop = DfBuildProperties.getInstance().getLittleAdjustmentProperties();
final String quotedName = prop.quoteTableNameIfNeedsDirectUse(_foreignTablePureName);
return _foreignSchema.buildSqlName(quotedName); // driven is resolved here so it uses pure name here
}
// ===================================================================================
// Basic Override
// ==============
@Override
public String toString() {
return _foreignKeyName + ":{" + _localTablePureName + ":" + _foreignTablePureName + ":" + _columnNameMap + "}";
}
// ===================================================================================
// Accessor
// ========
public String getForeignKeyName() {
return _foreignKeyName;
}
public void setForeignKeyName(String foreignKeyName) {
_foreignKeyName = foreignKeyName;
}
public UnifiedSchema getLocalSchema() {
return _localSchema;
}
public void setLocalSchema(UnifiedSchema localSchema) {
_localSchema = localSchema;
}
public String getLocalTablePureName() {
return _localTablePureName;
}
public void setLocalTablePureName(String localTablePureName) {
_localTablePureName = localTablePureName;
}
public UnifiedSchema getForeignSchema() {
return _foreignSchema;
}
public void setForeignSchema(UnifiedSchema foreignSchema) {
_foreignSchema = foreignSchema;
}
public String getForeignTablePureName() {
return _foreignTablePureName;
}
public void setForeignTablePureName(String foreignTablePureName) {
_foreignTablePureName = foreignTablePureName;
}
public Map<String, String> getColumnNameMap() {
return _columnNameMap;
}
public void putColumnName(String localColumnName, String foreignColumnName) {
_columnNameMap.put(localColumnName, foreignColumnName);
}
public void putColumnNameAll(Map<String, String> columnNameMap) {
_columnNameMap.putAll(columnNameMap);
}
}
|
<reponame>drkitty/cyder<gh_stars>1-10
from cyder.cydns.sshfp.models import SSHFP
from cyder.api.v1.tests.base import APITests
class SSHFPAPI_Test(APITests):
__test__ = True
model = SSHFP
def create_data(self):
return SSHFP.objects.create(
ctnr=self.ctnr, description='SSHFP Record', ttl=420, label='sshfp',
domain=self.domain, algorithm_number=1, fingerprint_type=1,
key='<KEY>')
|
#!/bin/sh
set -e
DOCKER_REPO=stv0g
DOCKER_IMAGE=${DOCKER_REPO}/withings-sync
VER=$(sed -n -e 's/.*version='\''\([0-9\.]*\)'\''.*/\1/p' < setup.py)
git tag v${VER} || true
git push --tags
docker build \
-t ${DOCKER_IMAGE} \
-t ${DOCKER_IMAGE}:${VER} .
docker push ${DOCKER_IMAGE}
docker push ${DOCKER_IMAGE}:${VER}
python3 setup.py sdist
twine upload dist/withings-sync-${VER}.tar.gz |
package cm.xxx.minos.leetcode;
import java.util.Stack;
/**
* Description: 滑动窗口 算法
* Author: lishangmin
* Created: 2018-08-23 10:16
*/
public class Solution10 {
public boolean isValid(String s) {
Stack<Character> stack = new Stack<>();
for (Character c: s.toCharArray()) {
if(c == '(' || c == '[' || c == '{'){
stack.push(c);
}else{
if(stack.empty()) return false;
Character top = stack.pop();
if(top == '(' && c != ')'){
return false;
}else
if(top == '[' && c != ']'){
return false;
}else
if(top == '{' && c != '}'){
return false;
}
}
}
return stack.isEmpty();
}
public static void main(String[] args) {
Solution10 solution = new Solution10();
System.out.println(solution.isValid("{}())"));
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script builds and pushes docker images when run from a release of Spark
# with Kubernetes support.
function error {
echo "$@" 1>&2
exit 1
}
if [ -z "${SPARK_HOME}" ]; then
SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi
. "${SPARK_HOME}/bin/load-spark-env.sh"
CTX_DIR="$SPARK_HOME/target/tmp/docker"
function is_dev_build {
[ ! -f "$SPARK_HOME/RELEASE" ]
}
function cleanup_ctx_dir {
if is_dev_build; then
rm -rf "$CTX_DIR"
fi
}
trap cleanup_ctx_dir EXIT
function image_ref {
local image="$1"
local add_repo="${2:-1}"
if [ $add_repo = 1 ] && [ -n "$REPO" ]; then
image="$REPO/$image"
fi
if [ -n "$TAG" ]; then
image="$image:$TAG"
fi
echo "$image"
}
function docker_push {
local image_name="$1"
if [ ! -z $(docker images -q "$(image_ref ${image_name})") ]; then
docker push "$(image_ref ${image_name})"
if [ $? -ne 0 ]; then
error "Failed to push $image_name Docker image."
fi
else
echo "$(image_ref ${image_name}) image not found. Skipping push for this image."
fi
}
function resolve_file {
local FILE=$1
if [ -n "$FILE" ]; then
local DIR=$(dirname $FILE)
DIR=$(cd $DIR && pwd)
FILE="${DIR}/$(basename $FILE)"
fi
echo $FILE
}
# Create a smaller build context for docker in dev builds to make the build faster. Docker
# uploads all of the current directory to the daemon, and it can get pretty big with dev
# builds that contain test log files and other artifacts.
#
# Three build contexts are created, one for each image: base, pyspark, and sparkr. For them
# to have the desired effect, the docker command needs to be executed inside the appropriate
# context directory.
#
# Note: docker does not support symlinks in the build context.
function create_dev_build_context {(
set -e
local BASE_CTX="$CTX_DIR/base"
mkdir -p "$BASE_CTX/kubernetes"
cp -r "resource-managers/kubernetes/docker/src/main/dockerfiles" \
"$BASE_CTX/kubernetes/dockerfiles"
cp -r "assembly/target/scala-$SPARK_SCALA_VERSION/jars" "$BASE_CTX/jars"
cp -r "resource-managers/kubernetes/integration-tests/tests" \
"$BASE_CTX/kubernetes/tests"
mkdir "$BASE_CTX/examples"
cp -r "examples/src" "$BASE_CTX/examples/src"
# Copy just needed examples jars instead of everything.
mkdir "$BASE_CTX/examples/jars"
for i in examples/target/scala-$SPARK_SCALA_VERSION/jars/*; do
if [ ! -f "$BASE_CTX/jars/$(basename $i)" ]; then
cp $i "$BASE_CTX/examples/jars"
fi
done
for other in bin sbin data; do
cp -r "$other" "$BASE_CTX/$other"
done
local PYSPARK_CTX="$CTX_DIR/pyspark"
mkdir -p "$PYSPARK_CTX/kubernetes"
cp -r "resource-managers/kubernetes/docker/src/main/dockerfiles" \
"$PYSPARK_CTX/kubernetes/dockerfiles"
mkdir "$PYSPARK_CTX/python"
cp -r "python/lib" "$PYSPARK_CTX/python/lib"
cp -r "python/pyspark" "$PYSPARK_CTX/python/pyspark"
local R_CTX="$CTX_DIR/sparkr"
mkdir -p "$R_CTX/kubernetes"
cp -r "resource-managers/kubernetes/docker/src/main/dockerfiles" \
"$R_CTX/kubernetes/dockerfiles"
cp -r "R" "$R_CTX/R"
)}
function img_ctx_dir {
if is_dev_build; then
echo "$CTX_DIR/$1"
else
echo "$SPARK_HOME"
fi
}
function build {
local BUILD_ARGS
local SPARK_ROOT="$SPARK_HOME"
if is_dev_build; then
create_dev_build_context || error "Failed to create docker build context."
SPARK_ROOT="$CTX_DIR/base"
fi
# Verify that the Docker image content directory is present
if [ ! -d "$SPARK_ROOT/kubernetes/dockerfiles" ]; then
error "Cannot find docker image. This script must be run from a runnable distribution of Apache Spark."
fi
# Verify that Spark has actually been built/is a runnable distribution
# i.e. the Spark JARs that the Docker files will place into the image are present
local TOTAL_JARS=$(ls $SPARK_ROOT/jars/spark-* | wc -l)
TOTAL_JARS=$(( $TOTAL_JARS ))
if [ "${TOTAL_JARS}" -eq 0 ]; then
error "Cannot find Spark JARs. This script assumes that Apache Spark has first been built locally or this is a runnable distribution."
fi
local BUILD_ARGS=(${BUILD_PARAMS})
# If a custom SPARK_UID was set add it to build arguments
if [ -n "$SPARK_UID" ]; then
BUILD_ARGS+=(--build-arg spark_uid=$SPARK_UID)
fi
local BINDING_BUILD_ARGS=(
${BUILD_ARGS[@]}
--build-arg
base_img=$(image_ref spark)
)
local BASEDOCKERFILE=${BASEDOCKERFILE:-"kubernetes/dockerfiles/spark/Dockerfile"}
local PYDOCKERFILE=${PYDOCKERFILE:-false}
local RDOCKERFILE=${RDOCKERFILE:-false}
(cd $(img_ctx_dir base) && docker build $NOCACHEARG "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$BASEDOCKERFILE" .)
if [ $? -ne 0 ]; then
error "Failed to build Spark JVM Docker image, please refer to Docker build output for details."
fi
if [ "${PYDOCKERFILE}" != "false" ]; then
(cd $(img_ctx_dir pyspark) && docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-py) \
-f "$PYDOCKERFILE" .)
if [ $? -ne 0 ]; then
error "Failed to build PySpark Docker image, please refer to Docker build output for details."
fi
fi
if [ "${RDOCKERFILE}" != "false" ]; then
(cd $(img_ctx_dir sparkr) && docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-r) \
-f "$RDOCKERFILE" .)
if [ $? -ne 0 ]; then
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
fi
fi
}
function push {
docker_push "spark"
docker_push "spark-py"
docker_push "spark-r"
}
function usage {
cat <<EOF
Usage: $0 [options] [command]
Builds or pushes the built-in Spark Docker image.
Commands:
build Build image. Requires a repository address to be provided if the image will be
pushed to a different registry.
push Push a pre-built image to a registry. Requires a repository address to be provided.
Options:
-f file Dockerfile to build for JVM based Jobs. By default builds the Dockerfile shipped with Spark.
-p file (Optional) Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
Skips building PySpark docker image if not specified.
-R file (Optional) Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
Skips building SparkR docker image if not specified.
-r repo Repository address.
-t tag Tag to apply to the built image, or to identify the image to be pushed.
-m Use minikube's Docker daemon.
-n Build docker image with --no-cache
-u uid UID to use in the USER directive to set the user the main Spark process runs as inside the
resulting container
-b arg Build arg to build or push the image. For multiple build args, this option needs to
be used separately for each build arg.
Using minikube when building images will do so directly into minikube's Docker daemon.
There is no need to push the images into minikube in that case, they'll be automatically
available when running applications inside the minikube cluster.
Check the following documentation for more information on using the minikube Docker daemon:
https://kubernetes.io/docs/getting-started-guides/minikube/#reusing-the-docker-daemon
Examples:
- Build image in minikube with tag "testing"
$0 -m -t testing build
- Build PySpark docker image
$0 -r docker.io/myrepo -t v2.3.0 -p kubernetes/dockerfiles/spark/bindings/python/Dockerfile build
- Build and push image with tag "v2.3.0" to docker.io/myrepo
$0 -r docker.io/myrepo -t v2.3.0 build
$0 -r docker.io/myrepo -t v2.3.0 push
EOF
}
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
REPO=
TAG=
BASEDOCKERFILE=
PYDOCKERFILE=
RDOCKERFILE=
NOCACHEARG=
BUILD_PARAMS=
SPARK_UID=
while getopts f:p:R:mr:t:nb:u: option
do
case "${option}"
in
f) BASEDOCKERFILE=$(resolve_file ${OPTARG});;
p) PYDOCKERFILE=$(resolve_file ${OPTARG});;
R) RDOCKERFILE=$(resolve_file ${OPTARG});;
r) REPO=${OPTARG};;
t) TAG=${OPTARG};;
n) NOCACHEARG="--no-cache";;
b) BUILD_PARAMS=${BUILD_PARAMS}" --build-arg "${OPTARG};;
m)
if ! which minikube 1>/dev/null; then
error "Cannot find minikube."
fi
if ! minikube status 1>/dev/null; then
error "Cannot contact minikube. Make sure it's running."
fi
eval $(minikube docker-env)
;;
u) SPARK_UID=${OPTARG};;
esac
done
case "${@: -1}" in
build)
build
;;
push)
if [ -z "$REPO" ]; then
usage
exit 1
fi
push
;;
*)
usage
exit 1
;;
esac
|
docker build -t transfer-service-cplus .
docker tag transfer-service-cplus lex13/transfer-service-cplus
docker push lex13/transfer-service-cplus |
<reponame>GeoscienceAustralia/igssitelog-java-bindings<filename>src/main/java/au/gov/ga/geodesy/igssitelog/support/marshalling/moxy/MultipathSourceAdapter.java
package au.gov.ga.geodesy.igssitelog.support.marshalling.moxy;
import au.gov.ga.geodesy.igssitelog.domain.model.MultipathSource;
public class MultipathSourceAdapter<T> extends OptionalCompositeAdapter<MultipathSource> {
}
|
//------------------------------------------------------------------------
// Flags : clang-format SMTGSequencer
// Project : VST SDK
//
// Category : Helpers
// Filename : public.sdk/source/vst/aaxwrapper/aaxentry.h
// Created by : Steinberg, 08/2017
// Description : VST 3 -> AAX Wrapper
//
//-----------------------------------------------------------------------------
// LICENSE
// (c) 2020, Steinberg Media Technologies GmbH, All Rights Reserved
//-----------------------------------------------------------------------------
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of the Steinberg Media Technologies nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
// OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
//-----------------------------------------------------------------------------
/// \cond ignore
/**
* plugin entry from AAX_Exports.cpp
*/
//-----------------------------------------------------------------------------
#include "pluginterfaces/base/fplatform.h"
// change names to avoid different linkage
#define ACFRegisterPlugin ACFRegisterPlugin_
#define ACFRegisterComponent ACFRegisterComponent_
#define ACFGetClassFactory ACFGetClassFactory_
#define ACFCanUnloadNow ACFCanUnloadNow_
#define ACFStartup ACFStartup_
#define ACFShutdown ACFShutdown_
//#define INITACFIDS // Make sure all of the AVX2 uids are defined.
#include "AAX.h"
#include "AAX_Init.h"
#include "acfresult.h"
#include "acfunknown.h"
#undef ACFRegisterPlugin
#undef ACFRegisterComponent
#undef ACFGetClassFactory
#undef ACFCanUnloadNow
#undef ACFStartup
#undef ACFShutdown
#if SMTG_OS_MACOS
#include <CoreFoundation/CoreFoundation.h>
#include <dlfcn.h>
#endif
extern "C" {
#if SMTG_OS_MACOS
bool bundleEntry (CFBundleRef);
bool bundleExit (void);
#else
bool InitDll (); // { return true; }
bool ExitDll (); // { return true; }
#endif
}
int AAXWrapper_linkAnchor; // reference this in the plugin to force inclusion of the wrapper in the
// link
//------------------------------------------------------------------------
#if defined(__GNUC__)
#define AAX_EXPORT extern "C" __attribute__ ((visibility ("default"))) ACFRESULT
#else
#define AAX_EXPORT extern "C" __declspec (dllexport) ACFRESULT __stdcall
#endif
AAX_EXPORT ACFRegisterPlugin (IACFUnknown* pUnkHost, IACFPluginDefinition** ppPluginDefinition);
AAX_EXPORT ACFRegisterComponent (IACFUnknown* pUnkHost, acfUInt32 index,
IACFComponentDefinition** ppComponentDefinition);
AAX_EXPORT ACFGetClassFactory (IACFUnknown* pUnkHost, const acfCLSID& clsid, const acfIID& iid,
void** ppOut);
AAX_EXPORT ACFCanUnloadNow (IACFUnknown* pUnkHost);
AAX_EXPORT ACFStartup (IACFUnknown* pUnkHost);
AAX_EXPORT ACFShutdown (IACFUnknown* pUnkHost);
AAX_EXPORT ACFGetSDKVersion (acfUInt64* oSDKVersion);
//------------------------------------------------------------------------
// \func ACFRegisterPlugin
// \brief Determines the number of components defined in the dll.
//
ACFAPI ACFRegisterPlugin (IACFUnknown* pUnkHostVoid, IACFPluginDefinition** ppPluginDefinitionVoid)
{
ACFRESULT result = ACF_OK;
try
{
result = AAXRegisterPlugin (pUnkHostVoid, ppPluginDefinitionVoid);
}
catch (...)
{
result = ACF_E_UNEXPECTED;
}
return result;
}
//------------------------------------------------------------------------
// \func ACFRegisterComponent
// \brief Registers a specific component in the DLL.
//
ACFAPI ACFRegisterComponent (IACFUnknown* pUnkHost, acfUInt32 index,
IACFComponentDefinition** ppComponentDefinition)
{
ACFRESULT result = ACF_OK;
try
{
result = AAXRegisterComponent (pUnkHost, index, ppComponentDefinition);
}
catch (...)
{
result = ACF_E_UNEXPECTED;
}
return result;
}
//------------------------------------------------------------------------
// \func ACFGetClassFactory
// \brief Gets the factory for a given class ID.
//
ACFAPI ACFGetClassFactory (IACFUnknown* pUnkHost, const acfCLSID& clsid, const acfIID& iid,
void** ppOut)
{
ACFRESULT result = ACF_OK;
try
{
result = AAXGetClassFactory (pUnkHost, clsid, iid, ppOut);
}
catch (...)
{
result = ACF_E_UNEXPECTED;
}
return result;
}
//------------------------------------------------------------------------
// \func ACFCanUnloadNow
// \brief Figures out if all objects are released so we can unload.
//
ACFAPI ACFCanUnloadNow (IACFUnknown* pUnkHost)
{
ACFRESULT result = ACF_OK;
try
{
result = AAXCanUnloadNow (pUnkHost);
}
catch (...)
{
result = ACF_E_UNEXPECTED;
}
return result;
}
#if SMTG_OS_MACOS
//------------------------------------------------------------------------
static CFBundleRef GetBundleFromExecutable (const char* filepath)
{
// AutoreleasePool ap;
char* fname = strdup (filepath);
int pos = strlen (fname);
int level = 3;
while (level > 0 && --pos >= 0)
{
if (fname[pos] == '/')
level--;
}
if (level > 0)
return 0;
fname[pos] = 0;
CFURLRef url = CFURLCreateFromFileSystemRepresentation (0, (const UInt8*)fname, pos, true);
CFBundleRef bundle = CFBundleCreate (0, url);
return bundle;
}
//------------------------------------------------------------------------
static CFBundleRef GetCurrentBundle ()
{
Dl_info info;
if (dladdr ((const void*)GetCurrentBundle, &info))
{
if (info.dli_fname)
{
return GetBundleFromExecutable (info.dli_fname);
}
}
return 0;
}
#endif
//------------------------------------------------------------------------
// \func ACFStartup
// \brief Called once at init time.
//
ACFAPI ACFStartup (IACFUnknown* pUnkHost)
{
ACFRESULT result = ACF_OK;
try
{
result = AAXStartup (pUnkHost);
if (result == ACF_OK)
{
#if SMTG_OS_MACOS
bool rc = bundleEntry (GetCurrentBundle ());
#else
bool rc = InitDll ();
#endif
if (!rc)
{
AAXShutdown (pUnkHost);
result = ACF_E_UNEXPECTED;
}
}
}
catch (...)
{
result = ACF_E_UNEXPECTED;
}
return result;
}
//------------------------------------------------------------------------
// \func ACFShutdown
// \brief Called once at termination of dll.
//
ACFAPI ACFShutdown (IACFUnknown* pUnkHost)
{
ACFRESULT result = ACF_OK;
try
{
#if SMTG_OS_MACOS
bundleExit ();
#else
ExitDll ();
#endif
result = AAXShutdown (pUnkHost);
}
catch (...)
{
result = ACF_E_UNEXPECTED;
}
return result;
}
//------------------------------------------------------------------------
ACFAPI ACFGetSDKVersion (acfUInt64* oSDKVersion)
{
return AAXGetSDKVersion (oSDKVersion);
}
/// \endcond
|
"use strict";
export const platform = "browser";
|
#!/bin/bash
source ../utils.sh
# Contiki directory
CONTIKI=$1
# Test basename
BASENAME=06-oscore-interops-test
IPADDR=fd00::302:304:506:708
# Starting Contiki-NG native node
make -C $CONTIKI/examples/oscore clean >/dev/null
make -C $CONTIKI/examples/oscore > make.log 2> make.err
sleep 10
echo "Downloading leshan"
CALIFORNIUM_JAR=californium-oscore-interops-server.jar
#wget -nc https://joakimeriksson.github.io/resources/$LESHAN_JAR
#Do not run tests 5 through 7, Observe is not implemented yet.
#Skip test 2 since it uses a different context.
#Test 12 seems californium can't pair response without token with request
#Test 16 on Coap-server
for i in {0..1} {3..4} {8..11} {12..15} 17
do
#echo "Starting native node - oscore server"
sudo $CONTIKI/examples/oscore/oscore-plugtest-server.native > node.log 2> node.err &
CPID=$!
#echo "Starting leshan server"
java -jar $CALIFORNIUM_JAR $i >californium.log 2>californium.err &
CALID=$!
sleep 5
if grep -q 'TEST OK' californium.log ; then
echo "Test $i OK"
else
echo "Test $i FAIL!"
echo "Closing Californium"
sleep 1
kill_bg $CALID
echo "Closing native node"
sleep 1
kill_bg $CPID
break
fi
#echo "Closing native node"
sleep 1
kill_bg $CPID
#echo "Closing Californium"
#sleep 1
#kill_bg $CALID
rm node.log
rm node.err
rm californium.log
rm californium.err
done
if grep -q 'OK' californium.log ; then
cp californium.err $BASENAME.testlog;
printf "%-32s TEST OK\n" "$BASENAME" | tee $BASENAME.testlog;
else
echo "==== make.log ====" ; cat make.log;
echo "==== make.err ====" ; cat make.err;
echo "==== node.log ====" ; cat node.log;
echo "==== node.err ====" ; cat node.err;
echo "==== leshan.log ====" ; cat californium.log;
echo "==== leshan.err ====" ; cat californium.err;
echo "==== $BASENAME.log ====" ; cat $BASENAME.log;
printf "%-32s TEST FAIL\n" "$BASENAME" | tee $BASENAME.testlog;
fi
rm make.log
rm make.err
rm node.log
rm node.err
rm californium.log
rm californium.err
# We do not want Make to stop -> Return 0
# The Makefile will check if a log contains FAIL at the end
exit 0
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 <NAME>. All rights reserved.
#
"""Floating point modulo
"""
#end_pymotw_header
import math
print '{:^4} {:^4} {:^5} {:^5}'.format('x', 'y', '%', 'fmod')
print '---- ---- ----- -----'
for x, y in [ (5, 2),
(5, -2),
(-5, 2),
]:
print '{:4.1f} {:4.1f} {:5.2f} {:5.2f}'.format(
x,
y,
x % y,
math.fmod(x, y),
)
|
#!/bin/bash
# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Generate test keys for use by the tests.
# Load common constants and variables.
. "$(dirname "$0")/common.sh"
set -e
sha_types=( 1 256 512 )
# Generate RSA test keys of various lengths.
function generate_keys {
key_index=0
key_name_base="${TESTKEY_DIR}/key_rsa"
for i in ${key_lengths[@]}
do
key_base="${key_name_base}${i}"
if [ -f "${key_base}.keyb" ]; then
continue
fi
openssl genrsa -F4 -out ${key_base}.pem $i
# Generate self-signed certificate from key.
openssl req -batch -new -x509 -key ${key_base}.pem \
-out ${key_base}.crt
# Generate pre-processed key for use by RSA signature verification code.
${UTIL_DIR}/dumpRSAPublicKey -cert ${key_base}.crt \
> ${key_base}.keyb
alg_index=0
for sha_type in ${sha_types[@]}
do
alg=$((${key_index} * 3 + ${alg_index}))
# wrap the public key
${UTIL_DIR}/vbutil_key \
--pack "${key_base}.sha${sha_type}.vbpubk" \
--key "${key_base}.keyb" \
--version 1 \
--algorithm ${alg}
# wrap the private key
${UTIL_DIR}/vbutil_key \
--pack "${key_base}.sha${sha_type}.vbprivk" \
--key "${key_base}.pem" \
--algorithm ${alg}
alg_index=$((${alg_index} + 1))
done
key_index=$((${key_index} + 1))
done
}
mkdir -p ${TESTKEY_DIR}
generate_keys
|
// Function that calculates the discrete Fourier algorithm of the given array
function DiscreteFourierAlgorithm(arr){
// Create an empty array to store the Fourier coefficients
let fourierCoeff = [];
// Loop through the array and calculate the Fourier coefficients
for(let i = 0; i < arr.length; i++){
let sum = 0;
for(let j = 0; j < arr.length; j++){
let exponent = ( -2 * Math.PI * i * j ) / arr.length;
sum += arr[j] * Math.cos(exponent);
}
// Append the coefficient to the Fourier coefficients array
fourierCoeff.push(sum);
}
return fourierCoeff;
}
if __name__=="__main__":
result = DiscreteFourierAlgorithm([1,2,3,4]);
print(result); |
<reponame>georgettica/mattermost-server
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
package api4
import (
"encoding/json"
"net/http"
"github.com/mattermost/mattermost-server/v6/model"
)
func (api *API) InitUsage() {
// GET /api/v4/usage/posts
api.BaseRoutes.Usage.Handle("/posts", api.APISessionRequired(getPostsUsage)).Methods("GET")
// GET /api/v4/usage/teams
api.BaseRoutes.Usage.Handle("/teams", api.APISessionRequired(getTeamsUsage)).Methods("GET")
// GET /api/v4/usage/integrations
api.BaseRoutes.Usage.Handle("/integrations", api.APISessionRequired(getIntegrationsUsage)).Methods("GET")
}
func getPostsUsage(c *Context, w http.ResponseWriter, r *http.Request) {
count, appErr := c.App.GetPostsUsage()
if appErr != nil {
c.Err = model.NewAppError("Api4.getPostsUsage", "app.post.analytics_posts_count.app_error", nil, appErr.Error(), http.StatusInternalServerError)
return
}
json, err := json.Marshal(&model.PostsUsage{Count: count})
if err != nil {
c.Err = model.NewAppError("Api4.getPostsUsage", "api.marshal_error", nil, err.Error(), http.StatusInternalServerError)
return
}
w.Write(json)
}
func getTeamsUsage(c *Context, w http.ResponseWriter, r *http.Request) {
teamsUsage, appErr := c.App.GetTeamsUsage()
if appErr != nil {
c.Err = model.NewAppError("Api4.getTeamsUsage", "app.teams.analytics_teams_count.app_error", nil, appErr.Error(), http.StatusInternalServerError)
return
}
if teamsUsage == nil {
c.Err = model.NewAppError("Api4.getTeamsUsage", "app.teams.analytics_teams_count.app_error", nil, appErr.Error(), http.StatusInternalServerError)
}
json, err := json.Marshal(teamsUsage)
if err != nil {
c.Err = model.NewAppError("Api4.getTeamsUsage", "api.marshal_error", nil, err.Error(), http.StatusInternalServerError)
}
w.Write(json)
}
func getIntegrationsUsage(c *Context, w http.ResponseWriter, r *http.Request) {
if !*c.App.Config().PluginSettings.Enable {
json, err := json.Marshal(&model.IntegrationsUsage{})
if err != nil {
c.Err = model.NewAppError("Api4.getIntegrationsUsage", "api.marshal_error", nil, err.Error(), http.StatusInternalServerError)
return
}
w.Write(json)
return
}
usage, appErr := c.App.GetIntegrationsUsage()
if appErr != nil {
c.Err = appErr
return
}
json, err := json.Marshal(usage)
if err != nil {
c.Err = model.NewAppError("Api4.getIntegrationsUsage", "api.marshal_error", nil, err.Error(), http.StatusInternalServerError)
return
}
w.Write(json)
}
|
/**
* @fileoverview Check element match selector
*
*/
'use strict';
var inArray = require('../array/inArray');
var toArray = require('../collection/toArray');
var elProto = Element.prototype;
var matchSelector = elProto.matches ||
elProto.webkitMatchesSelector ||
elProto.mozMatchesSelector ||
elProto.msMatchesSelector ||
function(selector) {
var doc = this.document || this.ownerDocument;
return inArray(this, toArray(doc.querySelectorAll(selector))) > -1;
};
/**
* Check element match selector
* @param {HTMLElement} element - element to check
* @param {string} selector - selector to check
* @returns {boolean} is selector matched to element?
* @memberof module:domUtil
*/
function matches(element, selector) {
return matchSelector.call(element, selector);
}
module.exports = matches;
|
using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using Xunit;
public class SeedGenerator
{
public List<int> GenerateSeeds(int count)
{
if (count <= 0)
{
throw new ArgumentException("Count should be greater than zero");
}
var random = new Random();
var seeds = new List<int>();
while (seeds.Count < count)
{
int seed = random.Next();
if (!seeds.Contains(seed))
{
seeds.Add(seed);
}
}
return seeds;
}
}
public class SeedGeneratorTests
{
[Fact]
public void GenerateSeeds_ShouldReturnNonEmptyList()
{
// Arrange
var seedGenerator = new SeedGenerator();
// Act
var generatedSeeds = seedGenerator.GenerateSeeds(5);
// Assert
generatedSeeds.Should().NotBeEmpty();
}
[Fact]
public void GenerateSeeds_ShouldReturnUniqueSeeds()
{
// Arrange
var seedGenerator = new SeedGenerator();
// Act
var generatedSeeds = seedGenerator.GenerateSeeds(10);
// Assert
generatedSeeds.Should().OnlyHaveUniqueItems();
}
} |
package com.littlejenny.gulimall.ware.service;
import java.util.ArrayList;
import java.util.List;
public class Test {
public static void main(String[] args) {
List<Integer> list = new ArrayList<>();
list.add(5);
list.add(6);
list.add(7);
list.forEach(item ->{
if(item == 6)return;
});
System.out.println(8);
}
}
|
package com.ghn.android.news;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AnticipateInterpolator;
import android.view.animation.OvershootInterpolator;
import android.webkit.WebView;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.ghn.android.R;
import com.ghn.android.news.favorite.DatabaseHandler;
import com.ghn.android.news.favorite.Pojo;
import com.ghn.android.news.imageloader.ImageLoader;
import com.ghn.android.news.util.AnimatorUtils;
import com.ghn.android.news.util.Constant;
import com.ogaclejapan.arclayout.ArcLayout;
import java.util.ArrayList;
import java.util.List;
public class NewsDetail_Activity extends AppCompatActivity implements View.OnClickListener{
int position;
String[] allArraynews,allArraynewsCatName;
String[] allArrayNewsCId,allArrayNewsCatId,allArrayNewsCatImage,allArrayNewsCatName,allArrayNewsHeading,allArrayNewsImage,allArrayNewsDes,allArrayNewsDate;
ImageView vp_imageview;
ViewPager viewpager;
public ImageLoader imageLoader;
int TOTAL_IMAGE;
public DatabaseHandler db;
private Menu menu;
String newscid,newscat_id,newscatimage,newscatname,newsheading,newsimage,newsdes,newsdate;
Toolbar toolbar;
private static final String KEY_DEMO = "demo";
Toast toast = null;
View fab;
View menuLayout;
ArcLayout arcLayout;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.newsdetail);
db = new DatabaseHandler(this);
setTitle("");
Intent i=getIntent();
fab = findViewById(R.id.fab);
menuLayout = findViewById(R.id.menu_layout);
arcLayout = (ArcLayout) findViewById(R.id.arc_layout);
for (int k = 0, size = arcLayout.getChildCount(); k < size; k++) {
arcLayout.getChildAt(k).setOnClickListener(this);
}
fab.setOnClickListener(this);
position=i.getIntExtra("POSITION", 0);
allArrayNewsCId=i.getStringArrayExtra("CATEGORY_ITEM_CID");
allArrayNewsCatName=i.getStringArrayExtra("CATEGORY_ITEM_NAME");
allArrayNewsCatImage=i.getStringArrayExtra("CATEGORY_ITEM_IMAGE");
allArrayNewsCatId=i.getStringArrayExtra("CATEGORY_ITEM_CAT_ID");
allArrayNewsImage=i.getStringArrayExtra("CATEGORY_ITEM_NEWSIMAGE");
allArrayNewsHeading=i.getStringArrayExtra("CATEGORY_ITEM_NEWSHEADING");
allArrayNewsDes=i.getStringArrayExtra("CATEGORY_ITEM_NEWSDESCRI");
allArrayNewsDate=i.getStringArrayExtra("CATEGORY_ITEM_NEWSDATE");
//TOTAL_IMAGE=allArraynews.length-1;
viewpager=(ViewPager)findViewById(R.id.news_slider);
imageLoader=new ImageLoader(getApplicationContext());
ImagePagerAdapter adapter = new ImagePagerAdapter();
viewpager.setAdapter(adapter);
boolean found = false;
int j1=0;
for(int i1=0;i1<allArrayNewsCatId.length;i1++)
{
if(allArrayNewsCatId[i1].contains(String.valueOf(position)))
{
found=true;
j1=i1;
break;
}
}
if(found)
{
viewpager.setCurrentItem(j1);
}
viewpager.setOnPageChangeListener(new OnPageChangeListener() {
@Override
public void onPageSelected(int position) {
// TODO Auto-generated method stub
position=viewpager.getCurrentItem();
newscat_id=allArrayNewsCatId[position];
List<Pojo> pojolist=db.getFavRow(newscat_id);
if(pojolist.size()==0)
{
findViewById(R.id.btnb).setBackgroundResource(R.drawable.fav);
}
else
{
if(pojolist.get(0).getCatId().equals(newscat_id))
{
findViewById(R.id.btnb).setBackgroundResource(R.drawable.fav_active);
}
}
}
@Override
public void onPageScrolled(int arg0, float arg1, int position) {
// TODO Auto-generated method stub
}
@Override
public void onPageScrollStateChanged(int position) {
// TODO Auto-generated method stub
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.news_menu, menu);
this.menu = menu;
FirstFav();
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem menuItem)
{
switch (menuItem.getItemId())
{
case android.R.id.home:
onBackPressed();
return true;
case R.id.menu_back:
position=viewpager.getCurrentItem();
position--;
if (position < 0) {
position = 0;
}
viewpager.setCurrentItem(position);
return true;
case R.id.menu_next:
position=viewpager.getCurrentItem();
position++;
if (position == TOTAL_IMAGE) {
position = TOTAL_IMAGE;
}
viewpager.setCurrentItem(position);
return true;
default:
return super.onOptionsItemSelected(menuItem);
}
}
public String RemoveTag(String html){
html=html.replaceAll("<br/>","");
return html;
}
public void AddtoFav(int position)
{
newscat_id=allArrayNewsCatId[position];
newscid=allArrayNewsCId[position];
newscatname=allArrayNewsCatName[position];
//newscatimage=allArrayNewsCatImage[position];
newsheading=allArrayNewsHeading[position];
newsimage=allArrayNewsImage[position];
newsdes=allArrayNewsDes[position];
newsdate=allArrayNewsDate[position];
db.AddtoFavorite(new Pojo(newscat_id,newscid,newscatname,newsheading,newsimage,newsdes,newsdate));
Toast.makeText(getApplicationContext(), "Added to Favorite", Toast.LENGTH_SHORT).show();
findViewById(R.id.btnb).setBackgroundResource(R.drawable.fav_active);
}
//remove from favorite
public void RemoveFav(int position)
{
newscat_id=allArrayNewsCatId[position];
db.RemoveFav(new Pojo(newscat_id));
Toast.makeText(getApplicationContext(), "Removed from Favorite", Toast.LENGTH_SHORT).show();
findViewById(R.id.btnb).setBackgroundResource(R.drawable.fav);
}
public void FirstFav()
{
int first=viewpager.getCurrentItem();
String Image_id=allArrayNewsCatId[first];
List<Pojo> pojolist=db.getFavRow(Image_id);
if(pojolist.size()==0)
{
findViewById(R.id.btnb).setBackgroundResource(R.drawable.fav);
}
else
{
if(pojolist.get(0).getCatId().equals(Image_id))
{
findViewById(R.id.btnb).setBackgroundResource(R.drawable.fav_active);
}
}
}
private class ImagePagerAdapter extends PagerAdapter {
private LayoutInflater inflater;
public ImagePagerAdapter() {
// TODO Auto-generated constructor stub
inflater = getLayoutInflater();
}
@Override
public int getCount() {
return allArrayNewsCatId.length;
}
@Override
public boolean isViewFromObject(View view, Object object) {
return view.equals(object);
}
@Override
public Object instantiateItem(ViewGroup container, final int position) {
View imageLayout = inflater.inflate(R.layout.newpager_item, container, false);
assert imageLayout != null;
ImageView news_imageview=(ImageView)imageLayout.findViewById(R.id.image_news);
TextView txt_newstitle=(TextView)imageLayout.findViewById(R.id.text_newstitle);
TextView txt_newsdate=(TextView)imageLayout.findViewById(R.id.text_newsdate);
//TextView txt_newsdes=(TextView)imageLayout.findViewById(R.id.text_newsdes);
WebView webnewsdes=(WebView)imageLayout.findViewById(R.id.webView_newsdes);
imageLoader.DisplayImage(Constant.SERVER_IMAGE_NEWSLISTDETAILS+allArrayNewsImage[position], news_imageview);
txt_newstitle.setText(allArrayNewsHeading[position]);
txt_newsdate.setText(allArrayNewsDate[position]);
//txt_newsdes.setText(allArrayNewsDes[position]);
webnewsdes.setBackgroundColor(Color.parseColor(getString(R.color.background_color)));
webnewsdes.setFocusableInTouchMode(false);
webnewsdes.setFocusable(false);
webnewsdes.getSettings().setDefaultTextEncodingName("UTF-8");
String mimeType = "text/html;charset=UTF-8";
String encoding = "utf-8";
String htmlText = allArrayNewsDes[position];
String text = "<html><head>"
+ "<style type=\"text/css\">body{color: #525252;text-align:justify}"
+ "</style></head>"
+ "<body>"
+ htmlText
+ "</body></html>";
webnewsdes.loadData(text, mimeType, encoding);
container.addView(imageLayout, 0);
return imageLayout;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
((ViewPager) container).removeView((View) object);
}
}
@Override
protected void onPause() {
//mAdView.pause();
super.onPause();
}
@Override
protected void onResume() {
super.onResume();
//mAdView.resume();
}
@Override
protected void onDestroy() {
//mAdView.destroy();
super.onDestroy();
}
@Override
public void onClick(View v) {
if (v.getId() == R.id.fab) {
onFabClick(v);
return;
}
if (v instanceof Button) {
showToast((Button) v);
}
}
private void showToast(Button btn) {
if (toast != null) {
toast.cancel();
}
if (btn.getId() == R.id.btna) {
position=viewpager.getCurrentItem();
newsheading=allArrayNewsHeading[position];
newsdes=allArrayNewsDes[position];
String formattedString=android.text.Html.fromHtml(newsdes).toString();
Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
sendIntent.putExtra(Intent.EXTRA_TEXT, newsheading+"\n"+formattedString+"\n"+" I Would like to share this with you. Here You Can Download This Application from PlayStore "+"https://play.google.com/store/apps/details?id="+getPackageName());
sendIntent.setType("text/plain");
startActivity(sendIntent);
hideMenu();
}
if (btn.getId() == R.id.btnb) {
position=viewpager.getCurrentItem();
newscat_id=allArrayNewsCatId[position];
List<Pojo> pojolist=db.getFavRow(newscat_id);
if(pojolist.size()==0)
{
AddtoFav(position);//if size is zero i.e means that record not in database show add to favorite
}
else
{
if(pojolist.get(0).getCatId().equals(newscat_id))
{
RemoveFav(position);
}
}
hideMenu();
}
if (btn.getId() == R.id.btnc)
{
Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
sendIntent.putExtra(Intent.EXTRA_TEXT," I Would like to share this with you. Here You Can Download This Application from PlayStore "+"https://play.google.com/store/apps/details?id="+getPackageName());
sendIntent.setType("text/plain");
startActivity(sendIntent);
hideMenu();
}
if (btn.getId() == R.id.btnd)
{
final String appName = getPackageName();//your application package name i.e play store application url
try {
startActivity(new Intent(Intent.ACTION_VIEW,
Uri.parse("market://details?id="
+ appName)));
} catch (android.content.ActivityNotFoundException anfe) {
startActivity(new Intent(
Intent.ACTION_VIEW,
Uri.parse("http://play.google.com/store/apps/details?id="
+ appName)));
}
}
hideMenu();
}
private void onFabClick(View v) {
if (v.isSelected()) {
hideMenu();
} else {
showMenu();
}
v.setSelected(!v.isSelected());
}
@SuppressWarnings("NewApi")
private void showMenu() {
menuLayout.setVisibility(View.VISIBLE);
List<Animator> animList = new ArrayList<Animator>();
for (int i = 0, len = arcLayout.getChildCount(); i < len; i++) {
animList.add(createShowItemAnimator(arcLayout.getChildAt(i)));
}
AnimatorSet animSet = new AnimatorSet();
animSet.setDuration(400);
animSet.setInterpolator(new OvershootInterpolator());
animSet.playTogether(animList);
animSet.start();
}
@SuppressWarnings("NewApi")
private void hideMenu() {
List<Animator> animList = new ArrayList<Animator>();
for (int i = arcLayout.getChildCount() - 1; i >= 0; i--) {
animList.add(createHideItemAnimator(arcLayout.getChildAt(i)));
}
AnimatorSet animSet = new AnimatorSet();
animSet.setDuration(400);
animSet.setInterpolator(new AnticipateInterpolator());
animSet.playTogether(animList);
animSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
menuLayout.setVisibility(View.INVISIBLE);
}
});
animSet.start();
}
private Animator createShowItemAnimator(View item) {
float dx = fab.getX() - item.getX();
float dy = fab.getY() - item.getY();
item.setRotation(0f);
item.setTranslationX(dx);
item.setTranslationY(dy);
Animator anim = ObjectAnimator.ofPropertyValuesHolder(
item,
AnimatorUtils.rotation(0f, 720f),
AnimatorUtils.translationX(dx, 0f),
AnimatorUtils.translationY(dy, 0f)
);
return anim;
}
private Animator createHideItemAnimator(final View item) {
float dx = fab.getX() - item.getX();
float dy = fab.getY() - item.getY();
Animator anim = ObjectAnimator.ofPropertyValuesHolder(
item,
AnimatorUtils.rotation(720f, 0f),
AnimatorUtils.translationX(0f, dx),
AnimatorUtils.translationY(0f, dy)
);
anim.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
item.setTranslationX(0f);
item.setTranslationY(0f);
}
});
return anim;
}
}
|
import { defineComponent, h, computed, ref } from '@vue/runtime-core'
import Circle from './component/Circle'
import StartPage from './page/StartPage'
import GamePage from './page/GamePage'
export default defineComponent({
setup(props, ctx) {
// 改变string 来切换组件
// ref: 创建一个响应式数据,一般用来包裹 值类型
const currentPageName = ref('StartPage') // 需要变为响应式
// 计算属性,更改字符串的话来更改组件
const currentPage = computed(() => {
return currentPageName.value === 'StartPage' ? StartPage : GamePage
})
return {
currentPageName,
currentPage,
}
},
render(ctx) {
// 创建vnode
// <rect x=100 y=100 >我的头发是真的!!</rect>
// const vnode = h('rect', { x: 100, y: 100 }, '我的头发是真的!!')
//return h('rect', { x: 100, y: 100 }, '我的头发是真的!!')
//return h('rect', { x: 100, y: 100 }, [
// '我的头发是真的!!',
// h('circle', { x: 150, y: 150 }),
// h(Circle),
//])
return h('Container', [
h(ctx.currentPage, {
onChangePage(page) {
ctx.currentPageName = page
},
}),
])
},
})
|
package weixin.tenant.service;
import weixin.tenant.entity.TFavoMenuEntity;
import org.jeecgframework.core.common.service.CommonService;
import java.io.Serializable;
public interface TFavoMenuServiceI extends CommonService{
public <T> void delete(T entity);
public <T> Serializable save(T entity);
public <T> void saveOrUpdate(T entity);
/**
* 默认按钮-sql增强-新增操作
* @param id
* @return
*/
public boolean doAddSql(TFavoMenuEntity t);
/**
* 默认按钮-sql增强-更新操作
* @param id
* @return
*/
public boolean doUpdateSql(TFavoMenuEntity t);
/**
* 默认按钮-sql增强-删除操作
* @param id
* @return
*/
public boolean doDelSql(TFavoMenuEntity t);
}
|
<reponame>xiaomaigou/xiaomaigou_code_generator
package com.xiaomaigou.code.controller;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.xiaomaigou.code.dto.Result;
import com.xiaomaigou.code.entity.TableEntity;
import com.xiaomaigou.code.service.TableService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* 表信息
*
* @author xiaomaiyun
* @version 1.2.3
* @date 2020/5/30 17:48
*/
@Api(tags = "表信息", value = "表信息")
@RestController
@RequestMapping("codeGenerator/table")
public class TableController {
private static final Logger logger = LoggerFactory.getLogger(TableController.class);
@Autowired
private TableService tableService;
@ApiOperation(value = "搜索表(分页)", notes = "搜索表(分页)")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "当前页,默认1", paramType = "query", required = false, dataType = "int", defaultValue = "1"),
@ApiImplicitParam(name = "pageSize", value = "每页显示条数,默认10", paramType = "query", required = false, dataType = "int", defaultValue = "10"),
@ApiImplicitParam(name = "tableName", value = "表名", paramType = "query", required = false, dataType = "String")
})
@GetMapping("listTable")
public Result<Page<TableEntity>> listTable(@RequestParam(value = "pageNo", required = false, defaultValue = "1") Integer pageNo,
@RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer pageSize,
@RequestParam(value = "tableName", required = false) String tableName) {
Page<TableEntity> tableEntityList = tableService.listTable(pageNo, pageSize, tableName);
return new Result<Page<TableEntity>>().success(tableEntityList);
}
}
|
<reponame>elliotsegler/altimeter
from typing import Dict, List, Union
from rdflib import Graph, Literal, Namespace, RDF, URIRef
from altimeter.core.base_model import BaseImmutableModel
from altimeter.core.graph.links import LinkCollection
from altimeter.core.graph.node_cache import NodeCache
class Resource(BaseImmutableModel):
"""A Resource defines a single scanned resource which is directly translatable to a graph
node. It contains an id, type name and list of Links.
Args:
resource_id: id of this resource
type: type name of this resource
link_collection: a LinkCollection representing links from this resource
"""
resource_id: str
type: str
link_collection: LinkCollection
def to_rdf(self, namespace: Namespace, graph: Graph, node_cache: NodeCache) -> None:
"""Graph this Resource as a URIRef on a Graph.
Args:
namespace: RDF namespace to use for predicates and objects when graphing
this resource's links
graph: RDF graph
node_cache: NodeCache to use for any cached URIRef lookups
"""
node = node_cache.setdefault(self.resource_id, URIRef(self.resource_id))
graph.add((node, RDF.type, getattr(namespace, self.type)))
graph.add((node, getattr(namespace, "id"), Literal(self.resource_id)))
self.link_collection.to_rdf(
subj=node, namespace=namespace, graph=graph, node_cache=node_cache
)
def to_lpg(self, vertices: List[Dict], edges: List[Dict]) -> None:
"""Graph this Resource as a dictionary into the vertices and edges lists.
Args:
vertices: List containing dictionaries representing a vertex
edges: List containing dictionaries representing an edge
"""
vertex = {
"~id": self.resource_id,
"~label": self.type,
"arn": self.resource_id,
}
self.link_collection.to_lpg(vertex, vertices, edges)
vertices.append(vertex)
|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.db import IntegrityError
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from django.utils.html import strip_tags
from django.conf import settings
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives
from optparse import make_option
from eventbrite import EventbriteClient
from thewall.models import Participant, Unconference
User = get_user_model()
class Command(BaseCommand):
"""Pull all the attendees from Eventbrite"""
help = "Pull all attendees from Eventbrite"
option_list = BaseCommand.option_list + (
make_option('--list',
'-l',
action='store_true',
dest='list',
default=False,
help='List all attendees'),
)
def handle(self, *args, **options):
# Try to start a connection with eventbrite, this will work with both the EventBrite OAuth2 method and a app/user key
# We test on App/User method though
events = Unconference.objects.all()
if hasattr(settings, "EB_USERKEY") and hasattr(settings, "EB_APPKEY"):
eb = EventbriteClient({'app_key': settings.EB_APPKEY, 'user_key': settings.EB_USERKEY})
elif hasattr(settings, "EB_OAUTHKEY"):
eb = EventbriteClient({'access_code': settings.EB_OAUTHKEY})
else:
return "Sorry, No Eventbrite Access Methods Found\nBe sure to add EB_APPKEY and EB_USERKEY or EB_OAUTHKEY to your environment\n"
results = []
for event in events:
results.append(pull_attendees_for_event(eb, event, options))
return "\n".join(results)
def pull_attendees_for_event(eb, event, options={}):
if not event.eventbrite_page_id:
return "Sorry, there is no eventbrite event id for this event."
# Grab all of our attendees
print "Downloading Attendees from EventBrite for ", event.name
attendees = eb.event_list_attendees({'id': event.eventbrite_page_id})['attendees']
# Setup our counters
handled = 0
numcreated = 0
# Loop over all our attendees
print "Adding/Updating Attendee Database"
for person in attendees:
# EB returns a dictionary with all attendee data under 'attendee', lets just assign the value of that dictionary to a variable
a = person['attendee']
# Lets assemble our participant's name. We'll ignore unicode characters.
first_name = a['first_name'].encode('ascii', 'ignore')
last_name = a['last_name'].encode('ascii', 'ignore')
number = str(a['id'])
email = a['email'].encode('ascii', 'ignore')
# User must have email address
if not email:
continue
# Check to see if there is a 'company' field returned, if so, save assign it to org
org = a.get('company', 'Independent').encode('ascii', 'ignore')
# Check to see if the 'company' field we got back was blank. If so, set the participant's organization to 'Independent'
if not org:
org = 'Independent'
# First, find user
user, user_created = User.objects.get_or_create(email=email)
password = None
if not user.first_name:
user.first_name = first_name
if not user.last_name:
user.last_name = last_name
print "Password: ", user.password
if not user.has_usable_password():
password = '{0}{1}'.format(first_name[0].lower(), last_name.lower())
user.set_password(password)
user.save()
if not user_created:
print "User: {0} {1} exists.".format(first_name, last_name)
participant, participant_created = Participant.objects.get_or_create(user=user)
participant.attendeenumber = number
if not participant.organization:
participant.organization = org
participant.save()
if not event.participants.filter(pk=participant.pk).exists():
print "Adding user {0} to event {1}".format(user, event)
event.participants.add(participant)
# Email participant info about the event
to = user.email
from_email = 'Rootscamp Moderator <<EMAIL>>'
domain = settings.DOMAIN
subject = u"Submit session ideas for {0}".format(event)
if user_created:
html = render_to_string("session/email/new_attendee_added.html", {"user": user, "domain": domain, "event": event, "password": password})
else:
html = render_to_string("session/email/existing_attendee_added.html", {"user": user, "domain": domain, "event": event})
text = strip_tags(html)
msg = EmailMultiAlternatives(subject, text, from_email, [to])
msg.attach_alternative(html, "text/html")
try:
msg.send()
except:
pass
# As get_or_create returns a touple, lets test to see if a new object is created and increase our counter
if user_created == True:
numcreated += 1
# Lets print out some basic information for the individual running the sync
if options.get('list', False):
print '%s %s %s %s %s' % (first_name, last_name, email, org, number)
handled += 1
event.save()
# Some final stats
return("\n%i Attendees\n%i Added to Database\n" % (handled, numcreated)) |
#!/bin/bash
# by default, with cleanup
# please note that the language(s) was not selected for any particular reason (other to represent the various sizes of babel datasets)
# 304-lithuanian | %WER 40.8 | 20041 61492 | 61.9 28.4 9.8 2.7 40.8 28.7 | -0.335 | exp/chain_cleaned/tdnn_lstm_bab6_sp/decode_dev10h.pem/score_10/dev10h.pem.ctm.sys
# num-iters=48 nj=2..12 num-params=13.8M dim=43+100->3273 combine=-0.189->-0.174
# xent:train/valid[31,47,final]=(-2.02,-1.74,-1.72/-2.29,-2.17,-2.17)
# logprob:train/valid[31,47,final]=(-0.187,-0.155,-0.152/-0.246,-0.240,-0.240)
# 206-zulu | %WER 53.0 | 22805 52162 | 50.5 38.2 11.3 3.5 53.0 31.0 | -0.564 | exp/chain_cleaned/tdnn_lstm_bab6_sp/decode_dev10h.pem/score_12/dev10h.pem.ctm.sys
# num-iters=66 nj=2..12 num-params=13.8M dim=43+100->3274 combine=-0.220->-0.209
# xent:train/valid[43,65,final]=(-2.06,-1.81,-1.80/-2.33,-2.26,-2.25)
# logprob:train/valid[43,65,final]=(-0.222,-0.186,-0.183/-0.283,-0.285,-0.283)
# 104-pashto | %WER 41.4 | 21825 101803 | 62.9 27.0 10.1 4.3 41.4 30.1 | -0.494 | exp/chain_cleaned/tdnn_lstm_bab6_sp/decode_dev10h.pem/score_10/dev10h.pem.ctm.sys
# num-iters=85 nj=2..12 num-params=13.8M dim=43+100->3328 combine=-0.192->-0.186
# xent:train/valid[55,84,final]=(-1.93,-1.69,-1.69/-2.25,-2.17,-2.16)
# logprob:train/valid[55,84,final]=(-0.197,-0.167,-0.165/-0.270,-0.267,-0.266)
set -e -o pipefail
# First the options that are passed through to run_ivector_common.sh
# (some of which are also used in this script directly).
stage=17
nj=30
train_set=train_cleaned
gmm=tri5_cleaned # the gmm for the target data
langdir=data/langp/tri5_ali
num_threads_ubm=12
nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned
# The rest are configs specific to this script. Most of the parameters
# are just hardcoded at this level, in the commands below.
train_stage=-10
tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration.
tdnn_affix=_bab6 #affix for TDNN directory, e.g. "a" or "b", in case we change the configuration.
common_egs_dir=exp/chain_cleaned/tdnn_lstm_sp/egs # you can set this to use previously dumped egs.
chunk_width=150,120,90,75
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
local/chain/run_ivector_common.sh --stage $stage \
--nj $nj \
--train-set $train_set \
--gmm $gmm \
--num-threads-ubm $num_threads_ubm \
--nnet3-affix "$nnet3_affix"
gmm_dir=exp/$gmm
ali_dir=exp/${gmm}_ali_${train_set}_sp
tree_dir=exp/chain${nnet3_affix}/tree${tree_affix}
lat_dir=exp/chain${nnet3_affix}/${gmm}_${train_set}_sp_lats
dir=exp/chain${nnet3_affix}/tdnn_lstm${tdnn_affix}_sp
train_data_dir=data/${train_set}_sp_hires
lores_train_data_dir=data/${train_set}_sp
train_ivector_dir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires
for f in $gmm_dir/final.mdl $train_data_dir/feats.scp $train_ivector_dir/ivector_online.scp \
$lores_train_data_dir/feats.scp $ali_dir/ali.1.gz $gmm_dir/final.mdl; do
[ ! -f $f ] && echo "$0: expected file $f to exist" && exit 1
done
if [ $stage -le 14 ]; then
echo "$0: creating lang directory with one state per phone."
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
if [ -d data/lang_chain ]; then
if [ data/lang_chain/L.fst -nt data/lang/L.fst ]; then
echo "$0: data/lang_chain already exists, not overwriting it; continuing"
else
echo "$0: data/lang_chain already exists and seems to be older than data/lang..."
echo " ... not sure what to do. Exiting."
exit 1;
fi
else
cp -r $langdir data/lang_chain
silphonelist=$(cat data/lang_chain/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat data/lang_chain/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >data/lang_chain/topo
fi
fi
if [ $stage -le 15 ]; then
# Get the alignments as lattices (gives the chain training more freedom).
# use the same num-jobs as the alignments
steps/align_fmllr_lats.sh --nj 100 --cmd "$train_cmd" ${lores_train_data_dir} \
$langdir $gmm_dir $lat_dir
rm $lat_dir/fsts.*.gz # save space
fi
if [ $stage -le 16 ]; then
# Build a tree using our new topology. We know we have alignments for the
# speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use
# those.
if [ -f $tree_dir/final.mdl ]; then
echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it."
exit 1;
fi
steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \
--context-opts "--context-width=2 --central-position=1" \
--leftmost-questions-truncate -1 \
--cmd "$train_cmd" 4000 ${lores_train_data_dir} data/lang_chain $ali_dir $tree_dir
fi
xent_regularize=0.25
if [ $stage -le 17 ]; then
mkdir -p $dir
echo "$0: creating neural net configs using the xconfig parser";
num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}')
[ -z $num_targets ] && { echo "$0: error getting num-targets"; exit 1; }
learning_rate_factor=$(echo "print (0.5/$xent_regularize)" | python)
lstm_opts="decay-time=20"
label_delay=5
mkdir -p $dir/configs
cat <<EOF > $dir/configs/network.xconfig
input dim=100 name=ivector
input dim=43 name=input
# please note that it is important to have input layer with the name=input
# as the layer immediately preceding the fixed-affine-layer to enable
# the use of short notation for the descriptor
fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat
# the first splicing is moved before the lda layer, so no splicing here
relu-batchnorm-layer name=tdnn1 dim=512
relu-batchnorm-layer name=tdnn2 input=Append(-1,0,1) dim=512
relu-batchnorm-layer name=tdnn3 input=Append(-1,0,1) dim=512
# check steps/libs/nnet3/xconfig/lstm.py for the other options and defaults
fast-lstmp-layer name=fastlstm1 cell-dim=512 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3 $lstm_opts
relu-batchnorm-layer name=tdnn4 input=Append(-3,0,3) dim=512
relu-batchnorm-layer name=tdnn5 input=Append(-3,0,3) dim=512
fast-lstmp-layer name=fastlstm2 cell-dim=512 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3 $lstm_opts
relu-batchnorm-layer name=tdnn6 input=Append(-3,0,3) dim=512
relu-batchnorm-layer name=tdnn7 input=Append(-3,0,3) dim=512
fast-lstmp-layer name=fastlstm3 cell-dim=512 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3 $lstm_opts
## adding the layers for chain branch
output-layer name=output input=fastlstm3 output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5
# adding the layers for xent branch
# This block prints the configs for a separate output that will be
# trained with a cross-entropy objective in the 'chain' models... this
# has the effect of regularizing the hidden parts of the model. we use
# 0.5 / args.xent_regularize as the learning rate factor- the factor of
# 0.5 / args.xent_regularize is suitable as it means the xent
# final-layer learns at a rate independent of the regularization
# constant; and the 0.5 was tuned so as to make the relative progress
# similar in the xent and regular final layers.
output-layer name=output-xent input=fastlstm3 output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5
EOF
steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/
fi
if [ $stage -le 18 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage
fi
[ ! -d $dir/egs ] && mkdir -p $dir/egs/
touch $dir/egs/.nodelete # keep egs around when that run dies.
steps/nnet3/chain/train.py --stage $train_stage \
--cmd "$decode_cmd" \
--feat.online-ivector-dir $train_ivector_dir \
--feat.cmvn-opts "--norm-means=false --norm-vars=false" \
--chain.xent-regularize $xent_regularize \
--chain.leaky-hmm-coefficient 0.1 \
--chain.l2-regularize 0.00005 \
--chain.apply-deriv-weights false \
--chain.lm-opts="--num-extra-lm-states=2000" \
--egs.dir "$common_egs_dir" \
--egs.opts "--frames-overlap-per-eg 0" \
--egs.chunk-width $chunk_width \
--trainer.num-chunk-per-minibatch 128 \
--trainer.frames-per-iter 1500000 \
--trainer.num-epochs 4 \
--trainer.optimization.num-jobs-initial 2 \
--trainer.optimization.num-jobs-final 12 \
--trainer.optimization.initial-effective-lrate 0.001 \
--trainer.optimization.final-effective-lrate 0.0001 \
--trainer.max-param-change 2.0 \
--cleanup.remove-egs true \
--feat-dir $train_data_dir \
--tree-dir $tree_dir \
--lat-dir $lat_dir \
--dir $dir
fi
if [ $stage -le 19 ]; then
# Note: it might appear that this data/lang_chain directory is mismatched, and it is as
# far as the 'topo' is concerned, but this script doesn't read the 'topo' from
# the lang directory.
utils/mkgraph.sh --self-loop-scale 1.0 data/langp_test $dir $dir/graph
fi
exit 0
|
#!/usr/bin/env bash
set -e
# Builds of tagged revisions are published to sonatype staging.
# Travis runs a build on new revisions and on new tags, so a tagged revision is built twice.
# Builds for a tag have TRAVIS_TAG defined, which we use for identifying tagged builds.
# Checking the local git clone would not work because git on travis does not fetch tags.
# The version number to be published is extracted from the tag, e.g., v1.2.3 publishes
# version 1.2.3 on all combinations of the travis matrix where `[ "$RELEASE_COMBO" = "true" ]`.
# In order to build a previously released version against a new (binary incompatible) Scala release,
# a new commit that modifies (and prunes) the Scala versions in .travis.yml needs to be added on top
# of the existing tag. Then a new tag can be created for that commit, e.g., `v1.2.3#2.13.0-M5`.
# Everything after the `#` in the tag name is ignored.
RELEASE_COMBO=true
if [ "$SCALAJS_VERSION" = "" ]; then
if [[ "$TEST_SCALAFIX" == "true" ]]; then
projectPrefix="scalafixRules"
else
projectPrefix="compat"
fi
else
projectPrefix="compatJS"
fi
if [[ "$TEST_SCALAFIX" == "true" ]]; then
crossScalaVersion="noop"
testProjectPrefix="scalafixTests"
else
crossScalaVersion="++$TRAVIS_SCALA_VERSION"
testProjectPrefix="$projectPrefix"
fi
verPat="[0-9]+\.[0-9]+\.[0-9]+(-[A-Za-z0-9-]+)?"
tagPat="^v$verPat(#.*)?$"
publishVersion="noop"
publishTask="noop"
if [[ "$TRAVIS_TAG" =~ $tagPat ]]; then
tagVer=$(echo $TRAVIS_TAG | sed s/#.*// | sed s/^v//)
publishVersion='set every version := "'$tagVer'"'
if [ "$RELEASE_COMBO" = "true" ]; then
currentJvmVer=$(java -version 2>&1 | awk -F '"' '/version/ {print $2}' | sed 's/^1\.//' | sed 's/[^0-9].*//')
echo "Releasing $tagVer with Scala $TRAVIS_SCALA_VERSION on Java version $currentJvmVer."
publishTask="$projectPrefix/publish-signed"
cat admin/gpg.sbt >> project/plugins.sbt
cp admin/publish-settings.sbt .
# Copied from the output of genKeyPair.sh
K=$encrypted_8c7005201bb0_key
IV=$encrypted_8c7005201bb0_iv
openssl aes-256-cbc -K $K -iv $IV -in admin/secring.asc.enc -out admin/secring.asc -d
fi
fi
sbt ";$crossScalaVersion ;$publishVersion ;$projectPrefix/clean ;$testProjectPrefix/test ;$projectPrefix/publishLocal ;$publishTask"
|
<filename>example/md/list.ts<gh_stars>0
let md = function() {
/* [TOC]
* [ ] sdfsdf
* [x] asddsfdfs
* 无序列表
* * 给岁月以文明,
* 而不是给文明以岁月。
发士大夫十分
* * 无序列表
* * > 123
> 456
789
*/
}
let lines = new String(md);
lines = lines.substring(lines.indexOf("/*") + 3, lines.lastIndexOf("*/"));
/**删除每行的最后一个字符(\n) */
let arr = lines.split('\n')
for (let i = 0; i < arr.length; i++) {
arr[i] = arr[i].substring(0, arr[i].length - 1)
}
lines = arr.join('\n');
export default lines; |
package commands_test
import (
"cf/api"
. "cf/commands"
"cf/configuration"
"cf/models"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
testapi "testhelpers/api"
testassert "testhelpers/assert"
testcmd "testhelpers/commands"
testconfig "testhelpers/configuration"
testreq "testhelpers/requirements"
testterm "testhelpers/terminal"
)
var _ = Describe("target command", func() {
var (
orgRepo *testapi.FakeOrgRepository
spaceRepo *testapi.FakeSpaceRepository
config configuration.ReadWriter
reqFactory *testreq.FakeReqFactory
)
BeforeEach(func() {
orgRepo, spaceRepo, config, reqFactory = getTargetDependencies()
})
It("TestTargetFailsWithUsage", func() {
ui := callTarget([]string{"bad-foo"}, reqFactory, config, orgRepo, spaceRepo)
Expect(ui.FailedWithUsage).To(BeTrue())
})
It("fails requirements when targeting a space or org", func() {
callTarget([]string{"-o", "some-crazy-org-im-not-in"}, reqFactory, config, orgRepo, spaceRepo)
Expect(testcmd.CommandDidPassRequirements).To(BeFalse())
callTarget([]string{"-s", "i-love-space"}, reqFactory, config, orgRepo, spaceRepo)
Expect(testcmd.CommandDidPassRequirements).To(BeFalse())
})
It("passes requirements when not attempting to target a space or org", func() {
callTarget([]string{}, reqFactory, config, orgRepo, spaceRepo)
Expect(testcmd.CommandDidPassRequirements).To(BeTrue())
})
Context("when the user logs in successfully", func() {
BeforeEach(func() {
reqFactory.LoginSuccess = true
})
It("passes requirements when targeting a space or org", func() {
callTarget([]string{"-s", "i-love-space"}, reqFactory, config, orgRepo, spaceRepo)
Expect(testcmd.CommandDidPassRequirements).To(BeTrue())
callTarget([]string{"-o", "orgs-are-delightful"}, reqFactory, config, orgRepo, spaceRepo)
Expect(testcmd.CommandDidPassRequirements).To(BeTrue())
})
It("TestTargetOrganizationWhenUserHasAccess", func() {
org := models.Organization{}
org.Name = "my-organization"
org.Guid = "my-organization-guid"
orgRepo.Organizations = []models.Organization{org}
orgRepo.FindByNameOrganization = org
ui := callTarget([]string{"-o", "my-organization"}, reqFactory, config, orgRepo, spaceRepo)
Expect(orgRepo.FindByNameName).To(Equal("my-organization"))
Expect(ui.ShowConfigurationCalled).To(BeTrue())
Expect(config.OrganizationFields().Guid).To(Equal("my-organization-guid"))
})
It("TestTargetOrganizationWhenUserDoesNotHaveAccess", func() {
orgs := []models.Organization{}
orgRepo.Organizations = orgs
orgRepo.FindByNameErr = true
ui := callTarget([]string{"-o", "my-organization"}, reqFactory, config, orgRepo, spaceRepo)
testassert.SliceContains(ui.Outputs, testassert.Lines{{"FAILED"}})
})
It("TestTargetOrganizationWhenOrgNotFound", func() {
orgRepo.FindByNameNotFound = true
ui := callTarget([]string{"-o", "my-organization"}, reqFactory, config, orgRepo, spaceRepo)
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"FAILED"},
{"my-organization", "not found"},
})
})
It("TestTargetSpaceWhenNoOrganizationIsSelected", func() {
config.SetOrganizationFields(models.OrganizationFields{})
ui := callTarget([]string{"-s", "my-space"}, reqFactory, config, orgRepo, spaceRepo)
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"FAILED"},
{"An org must be targeted before targeting a space"},
})
Expect(config.OrganizationFields().Guid).To(Equal(""))
})
It("TestTargetSpaceWhenUserHasAccess", func() {
space := models.Space{}
space.Name = "my-space"
space.Guid = "my-space-guid"
spaceRepo.Spaces = []models.Space{space}
spaceRepo.FindByNameSpace = space
ui := callTarget([]string{"-s", "my-space"}, reqFactory, config, orgRepo, spaceRepo)
Expect(spaceRepo.FindByNameName).To(Equal("my-space"))
Expect(config.SpaceFields().Guid).To(Equal("my-space-guid"))
Expect(ui.ShowConfigurationCalled).To(BeTrue())
})
It("TestTargetSpaceWhenUserDoesNotHaveAccess", func() {
config.SetSpaceFields(models.SpaceFields{})
spaceRepo.FindByNameErr = true
ui := callTarget([]string{"-s", "my-space"}, reqFactory, config, orgRepo, spaceRepo)
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"FAILED"},
{"Unable to access space", "my-space"},
})
Expect(config.SpaceFields().Guid).To(Equal(""))
Expect(ui.ShowConfigurationCalled).To(BeFalse())
})
It("TestTargetSpaceWhenSpaceNotFound", func() {
spaceRepo.FindByNameNotFound = true
ui := callTarget([]string{"-s", "my-space"}, reqFactory, config, orgRepo, spaceRepo)
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"FAILED"},
{"my-space", "not found"},
})
})
It("TestTargetOrganizationAndSpace", func() {
org := models.Organization{}
org.Name = "my-organization"
org.Guid = "my-organization-guid"
orgRepo.Organizations = []models.Organization{org}
space := models.Space{}
space.Name = "my-space"
space.Guid = "my-space-guid"
spaceRepo.Spaces = []models.Space{space}
ui := callTarget([]string{"-o", "my-organization", "-s", "my-space"}, reqFactory, config, orgRepo, spaceRepo)
Expect(ui.ShowConfigurationCalled).To(BeTrue())
Expect(orgRepo.FindByNameName).To(Equal("my-organization"))
Expect(config.OrganizationFields().Guid).To(Equal("my-organization-guid"))
Expect(spaceRepo.FindByNameName).To(Equal("my-space"))
Expect(config.SpaceFields().Guid).To(Equal("my-space-guid"))
})
It("TestTargetOrganizationAndSpaceWhenSpaceFails", func() {
config.SetSpaceFields(models.SpaceFields{})
org := models.Organization{}
org.Name = "my-organization"
org.Guid = "my-organization-guid"
orgRepo.Organizations = []models.Organization{org}
spaceRepo.FindByNameErr = true
ui := callTarget([]string{"-o", "my-organization", "-s", "my-space"}, reqFactory, config, orgRepo, spaceRepo)
Expect(ui.ShowConfigurationCalled).To(BeFalse())
Expect(orgRepo.FindByNameName).To(Equal("my-organization"))
Expect(config.OrganizationFields().Guid).To(Equal("my-organization-guid"))
Expect(spaceRepo.FindByNameName).To(Equal("my-space"))
Expect(config.SpaceFields().Guid).To(Equal(""))
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"FAILED"},
{"Unable to access space", "my-space"},
})
})
})
})
func getTargetDependencies() (
orgRepo *testapi.FakeOrgRepository,
spaceRepo *testapi.FakeSpaceRepository,
config configuration.ReadWriter,
reqFactory *testreq.FakeReqFactory) {
orgRepo = &testapi.FakeOrgRepository{}
spaceRepo = &testapi.FakeSpaceRepository{}
config = testconfig.NewRepositoryWithDefaults()
reqFactory = &testreq.FakeReqFactory{}
return
}
func callTarget(args []string,
reqFactory *testreq.FakeReqFactory,
config configuration.ReadWriter,
orgRepo api.OrganizationRepository,
spaceRepo api.SpaceRepository) (ui *testterm.FakeUI) {
ui = new(testterm.FakeUI)
cmd := NewTarget(ui, config, orgRepo, spaceRepo)
ctxt := testcmd.NewContext("target", args)
testcmd.RunCommand(cmd, ctxt, reqFactory)
return
}
|
# Finding the average of 3 numbers
# Creating a vector
Num_vec <- c(3, 5, 7)
# Calculating the average
Average <- mean(Num_vec)
# Printing the average
print(Average) |
def sort(arr):
for i in range(len(arr)-1):
for j in range(0, len(arr)-i-1):
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
# test
arr = [2, 7, 4, 88, 12]
sort(arr)
print(arr) |
#!/bin/bash
set -e
TMPDIR=/var/tmp/webgetpics-www
# main static content
cp /webgetpics-www/www/* $TMPDIR
# bootstrap
cd /webgetpics-www/bootstrap.git
git checkout .
git apply /webgetpics-www/setup/bootstrap/bootstrap.patch
grunt dist
cp -r /webgetpics-www/bootstrap.git/dist/* $TMPDIR
# jquery
cp -r /webgetpics-www/jquery/node_modules/jquery/dist/cdn/* $TMPDIR
# pics
convert $TMPDIR/rpi.jpg -strip \
-crop 2000x1500+1600+1300 \
-resize 800x600 $TMPDIR/rpi-sm.jpg
convert $TMPDIR/screen.jpg -strip \
-crop 4000x3000+500+650 \
-brightness-contrast 20% \
'(' '(' $TMPDIR/pop_art_by_purpledragongirl.jpg -resize 320x240 \
')' -resize 3085x1800 \
')' -geometry +600+600 -compose blend \
-define compose:args=50,50 -composite \
-resize 800x600 $TMPDIR/screen-sm.jpg
|
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.garnish;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.function.Consumer;
import org.dbflute.cbean.ConditionBean;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.cbean.sqlclause.select.SelectedRelationColumn;
/**
* @author jflute
* @since 1.1.2 (2016/12/31 Saturday)
*/
public class SpecifyColumnRequiredChecker {
public void checkSpecifyColumnRequiredIfNeeds(ConditionBean cb, Consumer<Set<String>> thrower) {
// cannot embed this to SQL clause because of too complex
// so simple implementation like this:
final SqlClause sqlClause = cb.getSqlClause();
final String basePointAliasName = sqlClause.getBasePointAliasName();
final Set<String> nonSpecifiedAliasSet = new LinkedHashSet<>();
if (!sqlClause.hasSpecifiedSelectColumn(basePointAliasName)) { // local table without SpecifyColumn
nonSpecifiedAliasSet.add(cb.asDBMeta().getTableDispName() + " (" + basePointAliasName + ")");
}
for (Entry<String, Map<String, SelectedRelationColumn>> entry : sqlClause.getSelectedRelationColumnMap().entrySet()) {
final String tableAliasName = entry.getKey();
if (!sqlClause.hasSpecifiedSelectColumn(tableAliasName)) { // relation table without SpecifyColumn
final Collection<SelectedRelationColumn> values = entry.getValue().values();
final String dispName;
if (!values.isEmpty()) {
final SelectedRelationColumn firstColumn = values.iterator().next();
dispName = sqlClause.translateSelectedRelationPathToPropName(firstColumn.getRelationNoSuffix());
} else { // no way, just in case
dispName = "*no name";
}
nonSpecifiedAliasSet.add(dispName + " (" + tableAliasName + ")");
}
}
if (!nonSpecifiedAliasSet.isEmpty()) {
thrower.accept(nonSpecifiedAliasSet);
}
}
}
|
package com.gzwl.demo.pojo;
import java.util.ArrayList;
import java.util.List;
public class DictionaryExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public DictionaryExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andDictionaryIdIsNull() {
addCriterion("dictionary_id is null");
return (Criteria) this;
}
public Criteria andDictionaryIdIsNotNull() {
addCriterion("dictionary_id is not null");
return (Criteria) this;
}
public Criteria andDictionaryIdEqualTo(Integer value) {
addCriterion("dictionary_id =", value, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdNotEqualTo(Integer value) {
addCriterion("dictionary_id <>", value, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdGreaterThan(Integer value) {
addCriterion("dictionary_id >", value, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdGreaterThanOrEqualTo(Integer value) {
addCriterion("dictionary_id >=", value, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdLessThan(Integer value) {
addCriterion("dictionary_id <", value, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdLessThanOrEqualTo(Integer value) {
addCriterion("dictionary_id <=", value, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdIn(List<Integer> values) {
addCriterion("dictionary_id in", values, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdNotIn(List<Integer> values) {
addCriterion("dictionary_id not in", values, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdBetween(Integer value1, Integer value2) {
addCriterion("dictionary_id between", value1, value2, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryIdNotBetween(Integer value1, Integer value2) {
addCriterion("dictionary_id not between", value1, value2, "dictionaryId");
return (Criteria) this;
}
public Criteria andDictionaryTableIsNull() {
addCriterion("dictionary_table is null");
return (Criteria) this;
}
public Criteria andDictionaryTableIsNotNull() {
addCriterion("dictionary_table is not null");
return (Criteria) this;
}
public Criteria andDictionaryTableEqualTo(String value) {
addCriterion("dictionary_table =", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableNotEqualTo(String value) {
addCriterion("dictionary_table <>", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableGreaterThan(String value) {
addCriterion("dictionary_table >", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableGreaterThanOrEqualTo(String value) {
addCriterion("dictionary_table >=", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableLessThan(String value) {
addCriterion("dictionary_table <", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableLessThanOrEqualTo(String value) {
addCriterion("dictionary_table <=", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableLike(String value) {
addCriterion("dictionary_table like", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableNotLike(String value) {
addCriterion("dictionary_table not like", value, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableIn(List<String> values) {
addCriterion("dictionary_table in", values, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableNotIn(List<String> values) {
addCriterion("dictionary_table not in", values, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableBetween(String value1, String value2) {
addCriterion("dictionary_table between", value1, value2, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryTableNotBetween(String value1, String value2) {
addCriterion("dictionary_table not between", value1, value2, "dictionaryTable");
return (Criteria) this;
}
public Criteria andDictionaryColumIsNull() {
addCriterion("dictionary_colum is null");
return (Criteria) this;
}
public Criteria andDictionaryColumIsNotNull() {
addCriterion("dictionary_colum is not null");
return (Criteria) this;
}
public Criteria andDictionaryColumEqualTo(String value) {
addCriterion("dictionary_colum =", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumNotEqualTo(String value) {
addCriterion("dictionary_colum <>", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumGreaterThan(String value) {
addCriterion("dictionary_colum >", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumGreaterThanOrEqualTo(String value) {
addCriterion("dictionary_colum >=", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumLessThan(String value) {
addCriterion("dictionary_colum <", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumLessThanOrEqualTo(String value) {
addCriterion("dictionary_colum <=", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumLike(String value) {
addCriterion("dictionary_colum like", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumNotLike(String value) {
addCriterion("dictionary_colum not like", value, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumIn(List<String> values) {
addCriterion("dictionary_colum in", values, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumNotIn(List<String> values) {
addCriterion("dictionary_colum not in", values, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumBetween(String value1, String value2) {
addCriterion("dictionary_colum between", value1, value2, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryColumNotBetween(String value1, String value2) {
addCriterion("dictionary_colum not between", value1, value2, "dictionaryColum");
return (Criteria) this;
}
public Criteria andDictionaryValueIsNull() {
addCriterion("dictionary_value is null");
return (Criteria) this;
}
public Criteria andDictionaryValueIsNotNull() {
addCriterion("dictionary_value is not null");
return (Criteria) this;
}
public Criteria andDictionaryValueEqualTo(String value) {
addCriterion("dictionary_value =", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueNotEqualTo(String value) {
addCriterion("dictionary_value <>", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueGreaterThan(String value) {
addCriterion("dictionary_value >", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueGreaterThanOrEqualTo(String value) {
addCriterion("dictionary_value >=", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueLessThan(String value) {
addCriterion("dictionary_value <", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueLessThanOrEqualTo(String value) {
addCriterion("dictionary_value <=", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueLike(String value) {
addCriterion("dictionary_value like", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueNotLike(String value) {
addCriterion("dictionary_value not like", value, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueIn(List<String> values) {
addCriterion("dictionary_value in", values, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueNotIn(List<String> values) {
addCriterion("dictionary_value not in", values, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueBetween(String value1, String value2) {
addCriterion("dictionary_value between", value1, value2, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionaryValueNotBetween(String value1, String value2) {
addCriterion("dictionary_value not between", value1, value2, "dictionaryValue");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberIsNull() {
addCriterion("dictionary_serial_number is null");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberIsNotNull() {
addCriterion("dictionary_serial_number is not null");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberEqualTo(Integer value) {
addCriterion("dictionary_serial_number =", value, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberNotEqualTo(Integer value) {
addCriterion("dictionary_serial_number <>", value, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberGreaterThan(Integer value) {
addCriterion("dictionary_serial_number >", value, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberGreaterThanOrEqualTo(Integer value) {
addCriterion("dictionary_serial_number >=", value, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberLessThan(Integer value) {
addCriterion("dictionary_serial_number <", value, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberLessThanOrEqualTo(Integer value) {
addCriterion("dictionary_serial_number <=", value, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberIn(List<Integer> values) {
addCriterion("dictionary_serial_number in", values, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberNotIn(List<Integer> values) {
addCriterion("dictionary_serial_number not in", values, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberBetween(Integer value1, Integer value2) {
addCriterion("dictionary_serial_number between", value1, value2, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionarySerialNumberNotBetween(Integer value1, Integer value2) {
addCriterion("dictionary_serial_number not between", value1, value2, "dictionarySerialNumber");
return (Criteria) this;
}
public Criteria andDictionaryStatusIsNull() {
addCriterion("dictionary_status is null");
return (Criteria) this;
}
public Criteria andDictionaryStatusIsNotNull() {
addCriterion("dictionary_status is not null");
return (Criteria) this;
}
public Criteria andDictionaryStatusEqualTo(Integer value) {
addCriterion("dictionary_status =", value, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusNotEqualTo(Integer value) {
addCriterion("dictionary_status <>", value, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusGreaterThan(Integer value) {
addCriterion("dictionary_status >", value, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusGreaterThanOrEqualTo(Integer value) {
addCriterion("dictionary_status >=", value, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusLessThan(Integer value) {
addCriterion("dictionary_status <", value, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusLessThanOrEqualTo(Integer value) {
addCriterion("dictionary_status <=", value, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusIn(List<Integer> values) {
addCriterion("dictionary_status in", values, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusNotIn(List<Integer> values) {
addCriterion("dictionary_status not in", values, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusBetween(Integer value1, Integer value2) {
addCriterion("dictionary_status between", value1, value2, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryStatusNotBetween(Integer value1, Integer value2) {
addCriterion("dictionary_status not between", value1, value2, "dictionaryStatus");
return (Criteria) this;
}
public Criteria andDictionaryRemarksIsNull() {
addCriterion("dictionary_remarks is null");
return (Criteria) this;
}
public Criteria andDictionaryRemarksIsNotNull() {
addCriterion("dictionary_remarks is not null");
return (Criteria) this;
}
public Criteria andDictionaryRemarksEqualTo(String value) {
addCriterion("dictionary_remarks =", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksNotEqualTo(String value) {
addCriterion("dictionary_remarks <>", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksGreaterThan(String value) {
addCriterion("dictionary_remarks >", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksGreaterThanOrEqualTo(String value) {
addCriterion("dictionary_remarks >=", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksLessThan(String value) {
addCriterion("dictionary_remarks <", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksLessThanOrEqualTo(String value) {
addCriterion("dictionary_remarks <=", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksLike(String value) {
addCriterion("dictionary_remarks like", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksNotLike(String value) {
addCriterion("dictionary_remarks not like", value, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksIn(List<String> values) {
addCriterion("dictionary_remarks in", values, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksNotIn(List<String> values) {
addCriterion("dictionary_remarks not in", values, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksBetween(String value1, String value2) {
addCriterion("dictionary_remarks between", value1, value2, "dictionaryRemarks");
return (Criteria) this;
}
public Criteria andDictionaryRemarksNotBetween(String value1, String value2) {
addCriterion("dictionary_remarks not between", value1, value2, "dictionaryRemarks");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
} |
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=D:\flutter"
export "FLUTTER_APPLICATION_PATH=D:\github.com\abserari\HistoryOfEverything\app"
export "FLUTTER_TARGET=lib\main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build\ios"
export "OTHER_LDFLAGS=$(inherited) -framework Flutter"
export "FLUTTER_FRAMEWORK_DIR=D:\flutter\bin\cache\artifacts\engine\ios"
export "FLUTTER_BUILD_NAME=1.0.1"
export "FLUTTER_BUILD_NUMBER=19"
export "DART_OBFUSCATION=false"
export "TRACK_WIDGET_CREATION=false"
export "TREE_SHAKE_ICONS=false"
export "PACKAGE_CONFIG=.packages"
|
#!/usr/bin/env bash
python create-path.py
|
package softuni.exam.domain.dtos.exportdtos;
import java.math.BigDecimal;
public class PlayerWhereSalaryBiggerThanDto {
private String name;
private Integer number;
private BigDecimal salary;
private String teamName;
public PlayerWhereSalaryBiggerThanDto() {
}
public PlayerWhereSalaryBiggerThanDto(String name, Integer number, BigDecimal salary, String teamName) {
this.name = name;
this.number = number;
this.salary = salary;
this.teamName = teamName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getNumber() {
return number;
}
public void setNumber(Integer number) {
this.number = number;
}
public BigDecimal getSalary() {
return salary;
}
public void setSalary(BigDecimal salary) {
this.salary = salary;
}
public String getTeamName() {
return teamName;
}
public void setTeamName(String teamName) {
this.teamName = teamName;
}
}
|
/*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if !defined( _TS_NATIVE_INCLUDED_ )
#define _TS_NATIVE_INCLUDED_
#if defined(__APPLE__) || defined(__linux__)
#include <stdbool.h>
#else
#define bool int
#define true 1
#define false 0
#endif
#define _MIN(x,y) ((x)>(y)? (y):(x))
#define _MAX(x,y) ((x)>(y)? (x):(y))
#ifdef WIN32
//*********************** WINDOWS section *************************
#if( _MSC_VER <= 800 )
#pragma pack(1)
#else
#include <pshpack1.h>
#endif
//*********************** WINDOWS section *************************
#else
#ifdef MAC
//*********************** MAC section *************************
#include <sys/cdefs.h>
#ifndef _inline
#define _inline inline
#endif
#define min(x,y) ((x)>(y)? (y):(x)) //replace with _MIN
#define max(x,y) ((x)>(y)? (x):(y)) //replace with _MAX
#define stricmp strcasecmp
#define strnicmp strncasecmp
//*********************** MAC section *************************
#else
//*********************** LINUX section *************************
//typedef _int64 long long
#ifndef _inline
#define _inline inline
#endif
#define min(x,y) ((x)>(y)? (y):(x)) //replace with _MIN
#define max(x,y) ((x)>(y)? (x):(y)) //replace with _MAX
//LINUX OS API conversion
#define stricmp strcasecmp
#define strnicmp strncasecmp
//*********************** LINUX section *************************
#endif
#endif
#ifdef __cplusplus
extern "C" {
#endif
/* put memory alloc under control (leak check) */
_inline void* sagetv_malloc( int size );
_inline void sagetv_free( void* );
//for cross-platform API stricmp
int sage_stricmp ( const char * dst, const char * src );
#define TS_PACKET_LENGTH 188
#define MAX_PROGRAM 64
#define MAX_ES 64
#define MAX_SERVICE_NUM 32
#define SYNC 0x47
typedef long (*LPFNParserDump)(void* handle, short, void*);
typedef long (*LPFNBuilderDump)(void* handle, short, void*);
typedef long (*LPFNRawDump)(void* handle, void*, long );
typedef long (*LPFNMesgDump)(void* handle, short, void*);
#ifdef WIN32 //windows defined
typedef _int64 LONGLONG;
typedef unsigned _int64 ULONGLONG;
typedef LONGLONG REFERENCE_TIME;
#else
typedef long long LONGLONG;
typedef unsigned long long ULONGLONG;
typedef LONGLONG REFERENCE_TIME;
#endif
#ifndef STREAM_TYPE_ENUM
#define STREAM_TYPE_ENUM
typedef enum
{
UNKNOWN_STREAM_TYPE = 0,
ATSC_STREAM_TYPE = 1,
DVB_STREAM_TYPE = 2,
} STREAM_TYPE;
#endif
typedef struct
{
char sync;
bool error;
bool start;
bool priority;
unsigned short pid;
char scrambling_ctr;
char adaption_ctr;
char continuity_ct;
char pcr_flag;
} TS_HEADER;
typedef struct
{
unsigned char apapt_len;
bool discontinute;
bool random_acceess;
bool elem_stream_prio;
char flags;
} TS_ADAPTION;
typedef struct
{
//unsigned char PCR[6];
//unsigned char OPCR[6];
unsigned char splice;
} TS_ADAPTION_OPTION;
typedef struct
{
int bytes;
int total_bytes;
int data_size;
char* data; //maxium is 1K bytes
char left_over[200];
bool left_over_size;
} TS_SECTION;
typedef struct
{
unsigned short SectionLength;
unsigned short TSID;
unsigned char VersionNumber;
bool CurrentNextIndicator;
unsigned char SectionNumber;
unsigned char LastSectionNumber;
unsigned short NumPrograms;
unsigned short ProgramNumber[MAX_PROGRAM];
unsigned short ProgramPID[MAX_PROGRAM];
unsigned char counter; //used by TS_HEADER.continuity_ct 4 bits for PAT
TS_SECTION section;
} TS_PAT;
/* Program Map data*/
typedef struct
{
unsigned char TableId;
unsigned short SectionLength;
unsigned short ProgramNumber;
unsigned char VersionNumber;
bool CurrentNextIndicator;
unsigned char SectionNum;
unsigned char LastSectionNum;
unsigned short PCRPID;
unsigned short ProgramInfoLength;
unsigned char* ProgramInfo;
unsigned short NumStreams;
unsigned char StreamType[MAX_ES];
unsigned short ElementaryPID[MAX_ES];
unsigned short ESInfoLength[MAX_ES];
unsigned char* ESInfo[MAX_ES];
unsigned char EScounter[MAX_PROGRAM]; //used by TS_HEADER.continuity_ct 4 bits for every ES;
unsigned char counter; //used by TS_HEADER.continuity_ct 4 bits for PMt
TS_SECTION* section;
} TS_PMT;
typedef struct
{
unsigned short DescLength;
unsigned char* DescInfo;
} DESC;
typedef struct
{
short channelID;
short programID;
short type;
short pid;
} PROGRAM_INFO;
typedef struct
{
unsigned short channelID;
unsigned short programID;
unsigned short streamIndex; //streamIndex:-1 indicates PMT, orthers are ES
unsigned short streamType;
unsigned short pid;
char Desc[180];
} AVSTREAM_INFO;
typedef struct
{
unsigned char type;
short sub_channel;
bool start_group_flag;
unsigned continuity;
short pid;
short bytes;
char* data;
unsigned short header_bytes;
LONGLONG PCR;
} TS_AV_INFO;
typedef struct
{
unsigned char stream_id;
unsigned char stream_type;
bool start_group_flag;
bool has_PTS;
bool has_DTS;
short bytes;
char* data;
short header_bytes;
LONGLONG PCR; //PCR for TS, SCR for PS
LONGLONG PTS;
LONGLONG DTS;
} PES_AV_INFO;
typedef struct
{
char type;
short pid;
void* data;
short size;
} SIGNAL_INFO;
typedef struct
{
short pid;
short service;
short channel;
void* parser;
void* tbl;
char* data;
short length;
} SECTION_INFO;
#ifndef SI_DATA_STRUCT
#define SI_DATA_STRUCT
#define MAX_CHANNEL_NAME 32
typedef struct
{
unsigned short a; //ATSC:major; DVB: ONID
unsigned short b; //ATSC:mainor; DVB: TSID
unsigned short c; //ATSC:program_id; DVB: program_id
} CHANNEL;
typedef struct
{
CHANNEL channel;
char name[MAX_CHANNEL_NAME];
unsigned short type;
} CHANNEL_NAME;
#endif
typedef int (*SI_PARSER_DUMPER)( void* context, unsigned short pid, TS_HEADER* ts, char* data, int bytes );
#define STREAM_START 0x01
#define STREAM_READY 0x02
#define STREAM_CLOSE 0x03
#define STREAM_VIDEO_START 0x04
#define STREAM_AUDIO_START 0x05
#define PCR_READY 0x06
#define PES_SYSTEM_PACK 0x10
#define PES_PACKET_PACK 0x11
#define PES_PADDING_PACK 0x12
#define PES_SAGE_PRIVATE_INF 0x13
void StartSection(TS_SECTION* pSection, int nSectionLength );
bool PushSectionData( TS_SECTION* pSection, char* pData, int Bytes );
bool SectionCrcCheck( TS_SECTION* pSection );
bool BuildSectionData( TS_SECTION* pSection, int nSectionLength, char* pData );
int PopSectionData( TS_SECTION* pSection, char* pData, int Bytes );
#ifdef __cplusplus
}
#endif
#endif
|
<filename>internal/storage/config.go
package storage
// Config defines path for store data
type Config struct {
Path string `env:"BADGER_PATH,default=/tmp/badger"`
EncryptionKey string `env:"BADGER_ENCRYPTIONKEY,default=test"`
}
|
export * from './healths'
|
import './index';
describe('jest-chain', () => {
it('chains top level matchers', () => {
expect(1)
.toBe(1)
.toBeGreaterThan(0)
.toBeLessThan(2);
});
it('chains nested level matchers', () => {
expect(1)
.toBe(1)
.not.toBe(0)
.toBeGreaterThan(0)
.not.toBeGreaterThan(1)
.toBeLessThan(2)
.not.toBeLessThan(1);
});
it('chains custom matchers from jest-extended', () => {
expect(1)
.toBe(1)
.toBeGreaterThan(0)
.toBeLessThan(2)
.toBePositive()
.not.toBeNegative();
});
describe('fails fast', () => {
it('throws error from first matcher: toBe when 1 != 2', () => {
expect(() =>
expect(1)
.toBe(2)
.toBeGreaterThan(1)
.toBeLessThan(1)
).toThrowErrorMatchingSnapshot();
});
it('throws error from second matcher: toBeGreaterThan when 1 !> 1', () => {
expect(() =>
expect(1)
.toBe(1)
.toBeGreaterThan(1)
.toBeLessThan(1)
).toThrowErrorMatchingSnapshot();
});
it('throws error from second matcher: toBeLessThan when 1 !< 1', () => {
expect(() =>
expect(1)
.toBe(1)
.toBeGreaterThan(0)
.toBeLessThan(1)
).toThrowErrorMatchingSnapshot();
});
});
describe('supports custom matchers registered after jest-chain', () => {
expect.extend({
toBeDivisibleBy(received, argument) {
const pass = received % argument == 0;
const message = pass
? () => `expected ${received} not to be divisible by ${argument}`
: () => `expected ${received} to be divisible by ${argument}`;
return { message, pass };
}
});
it('chains new custom matchers with existing ones', () => {
expect(100).toBeDivisibleBy(2);
expect(101).not.toBeDivisibleBy(2);
expect(100)
.toBeDivisibleBy(2)
.toBePositive()
.not.toBeNegative()
.toBe(100);
});
it('supports custom asymmetric matchers', () => {
expect({ apples: 6, bananas: 3 }).toEqual({
apples: expect.toBeDivisibleBy(2),
bananas: expect.not.toBeDivisibleBy(2)
});
});
});
});
|
#!/bin/bash
sudo apt-get install -y git
git clone -b monolith https://github.com/express42/reddit.git
cd reddit && sudo bundle install
|
package util;
import com.alibaba.fastjson.JSONObject;
import com.jcraft.jsch.*;
import core.checker.checker.Operation;
import core.db.Node;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import static util.Constant.SSH_PORT;
@Slf4j
public class Support {
private static JSch jsch = new JSch();
public static String TxtToString(String filePath) {
try {
Path path = Paths.get(filePath);
List<String> lines = Files.readAllLines(path);
StringBuilder result = new StringBuilder();
for(String line: lines) {
if(!line.equals(""))
result.append(line).append("\n");
}
return result.toString();
} catch (Exception e) {
log.error(e.getMessage());
return "";
}
}
// 用户自定义的Operation的request类型传进来
public static ArrayList<Operation> TxtToOperations(String filePath) {
// public static ArrayList<Operation> TxtToOperations(String filePath, Class requestClass) {
ArrayList<Operation> operations = new ArrayList<>();
String content = TxtToString(filePath);
if(content.equals(""))
return operations;
String[] strings = content.split("\n");
for(String str: strings) {
Operation operation = JSONObject.parseObject(str, Operation.class); // 在这里data会被反序列化成JsonObject类型
// TODO 不知道会不会有问题
// if(operation.getAction() == ActionEnum.InvokeOperation)
// operation.setData(JSONObject.parseObject(JSON.toJSONString(operation.getData()), requestClass));
// else
// operation.setData(JSONObject.parseObject(JSON.toJSONString(operation.getData()), ClientInvokeResponse.class));
operations.add(operation);
}
return operations;
}
public static ArrayList<Integer> ShuffleByCount(int length) {
// shuffle indices
ArrayList<Integer> indices = new ArrayList<>();
for(int i = 0; i < length; i++)
indices.add(i);
Collections.shuffle(indices); // 从后往前用一个随机数做index进行swap
return indices;
}
// executeQuery用于select
public static <T> T JDBCQueryWithNode(Node node, String sql, Function<ResultSet, T> handle) {
Connection connection = null;
Statement statement = null;
try {
connection = DriverManager.getConnection(node.getOceanBaseURL(), node.getUsername(), node.getPassword());
statement = connection.createStatement();
ResultSet rs = statement.executeQuery(sql);
return handle.apply(rs);
} catch(Exception e) {
log.error(e.getMessage());
return null;
} finally {
try {
if (statement != null)
statement.close();
if(connection != null)
connection.close();
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
// TODO 合并一下
// executeQuery用于select
// Attention: maybe return is null!!!
public static <T> T JDBCQueryWithClient(Connection connection, String selectSQL, Function<ResultSet, T> handle) {
Statement statement = null;
try {
statement = connection.createStatement();
ResultSet rs = statement.executeQuery(selectSQL);
return handle.apply(rs);
} catch(Exception e) {
log.error(e.getMessage());
return null;
} finally {
try {
if (statement != null)
statement.close();
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
// executeUpdate用于create, insert, delete, update
public static Exception JDBCUpdate(Connection connection, String allSQL) { ;
Statement statement = null;
try {
statement = connection.createStatement();
for(String sql: allSQL.split(";"))
statement.executeUpdate(sql + ";");
return null;
} catch(Exception e) {
log.error(e.getMessage());
return e;
} finally {
try {
if (statement != null)
statement.close();
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
public static String ShellCommand(String shell_path, String args) {
return "chmod u+x " + shell_path + "\n" + shell_path + " " + args;
}
public static Exception ExecuteCommand(Node node, String command) {
try {
Session session = jsch.getSession(node.getUsername(), node.getIp(), SSH_PORT);
session.setPassword(<PASSWORD>());
session.setConfig("StrictHostKeyChecking","no");
session.setTimeout(6000);
session.connect();
ChannelExec exec = (ChannelExec) session.openChannel("exec");
InputStream in = exec.getInputStream();
exec.setCommand(command); // 默认位置是/$username
exec.connect();
String s = IOUtils.toString(in, "UTF-8");
log.info("执行命令:" + command + " 结果:"+s);
in.close();
exec.disconnect();
session.disconnect();
return null;
} catch (Exception e) {
log.error(e.getMessage());
return e;
}
}
public static Exception SendFile(Node node, String srcPath, String destPath) {
try {
Session session = jsch.getSession(node.getUsername(), node.getIp(), SSH_PORT);
session.setPassword(<PASSWORD>());
session.setConfig("StrictHostKeyChecking","no");
session.setTimeout(6000);
session.connect();
ChannelSftp sftp = (ChannelSftp) session.openChannel("sftp");
sftp.connect();
sftp.put(srcPath, destPath, new MySftpProgressMonitor(), ChannelSftp.OVERWRITE);
sftp.disconnect();
session.disconnect();
return null;
} catch (Exception e) {
log.error(e.getMessage());
return e;
}
}
private static class MySftpProgressMonitor implements SftpProgressMonitor {
private long transferred;
@Override
public void init(int op, String src, String dest, long max) {
log.info("Transferring begin.");
}
@Override
public boolean count(long count) {
transferred = transferred + count;
log.info("Currently transferred total size: " + transferred + " bytes");
return true;
}
@Override
public void end() {
log.info("Transferring done.");
}
}
}
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_sports_basketball_outline = void 0;
var ic_sports_basketball_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M12,2C6.48,2,2,6.48,2,12c0,5.52,4.48,10,10,10s10-4.48,10-10C22,6.48,17.52,2,12,2z M5.23,7.75 C6.1,8.62,6.7,9.74,6.91,11H4.07C4.22,9.82,4.63,8.72,5.23,7.75z M4.07,13h2.84c-0.21,1.26-0.81,2.38-1.68,3.25 C4.63,15.28,4.22,14.18,4.07,13z M11,19.93c-1.73-0.22-3.29-1-4.49-2.14c1.3-1.24,2.19-2.91,2.42-4.79H11V19.93z M11,11H8.93 C8.69,9.12,7.81,7.44,6.5,6.2C7.71,5.06,9.27,4.29,11,4.07V11z M19.93,11h-2.84c0.21-1.26,0.81-2.38,1.68-3.25 C19.37,8.72,19.78,9.82,19.93,11z M13,4.07c1.73,0.22,3.29,0.99,4.5,2.13c-1.31,1.24-2.19,2.92-2.43,4.8H13V4.07z M13,19.93V13 h2.07c0.24,1.88,1.12,3.55,2.42,4.79C16.29,18.93,14.73,19.71,13,19.93z M18.77,16.25c-0.87-0.86-1.46-1.99-1.68-3.25h2.84 C19.78,14.18,19.37,15.28,18.77,16.25z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M12,2C6.48,2,2,6.48,2,12c0,5.52,4.48,10,10,10s10-4.48,10-10C22,6.48,17.52,2,12,2z M5.23,7.75 C6.1,8.62,6.7,9.74,6.91,11H4.07C4.22,9.82,4.63,8.72,5.23,7.75z M4.07,13h2.84c-0.21,1.26-0.81,2.38-1.68,3.25 C4.63,15.28,4.22,14.18,4.07,13z M11,19.93c-1.73-0.22-3.29-1-4.49-2.14c1.3-1.24,2.19-2.91,2.42-4.79H11V19.93z M11,11H8.93 C8.69,9.12,7.81,7.44,6.5,6.2C7.71,5.06,9.27,4.29,11,4.07V11z M19.93,11h-2.84c0.21-1.26,0.81-2.38,1.68-3.25 C19.37,8.72,19.78,9.82,19.93,11z M13,4.07c1.73,0.22,3.29,0.99,4.5,2.13c-1.31,1.24-2.19,2.92-2.43,4.8H13V4.07z M13,19.93V13 h2.07c0.24,1.88,1.12,3.55,2.42,4.79C16.29,18.93,14.73,19.71,13,19.93z M18.77,16.25c-0.87-0.86-1.46-1.99-1.68-3.25h2.84 C19.78,14.18,19.37,15.28,18.77,16.25z"
},
"children": []
}]
}]
}]
}]
}]
};
exports.ic_sports_basketball_outline = ic_sports_basketball_outline; |
<gh_stars>0
#include "catch.hpp"
#include "test_helpers.hpp"
using namespace duckdb;
using namespace std;
TEST_CASE("Test scalar aggregates with many different types", "[aggregate]") {
unique_ptr<QueryResult> result;
DuckDB db(nullptr);
Connection con(db);
// count
result = con.Query("SELECT COUNT(), COUNT(1), COUNT(*), COUNT(NULL), COUNT('hello'), COUNT(DATE '1992-02-02')");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {1}));
REQUIRE(CHECK_COLUMN(result, 2, {1}));
REQUIRE(CHECK_COLUMN(result, 3, {0}));
REQUIRE(CHECK_COLUMN(result, 4, {1}));
REQUIRE(CHECK_COLUMN(result, 5, {1}));
REQUIRE_FAIL(con.Query("SELECT COUNT(1, 2)"));
// sum
result = con.Query("SELECT SUM(1), SUM(NULL), SUM(33.3)");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {Value()}));
REQUIRE(CHECK_COLUMN(result, 2, {33.3}));
REQUIRE_FAIL(con.Query("SELECT SUM(True)"));
REQUIRE_FAIL(con.Query("SELECT SUM('hello')"));
REQUIRE_FAIL(con.Query("SELECT SUM(DATE '1992-02-02')"));
REQUIRE_FAIL(con.Query("SELECT SUM()"));
REQUIRE_FAIL(con.Query("SELECT SUM(1, 2)"));
// min
result = con.Query("SELECT MIN(1), MIN(NULL), MIN(33.3), MIN('hello'), MIN(True), MIN(DATE '1992-02-02'), "
"MIN(TIMESTAMP '2008-01-01 00:00:01')");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {Value()}));
REQUIRE(CHECK_COLUMN(result, 2, {33.3}));
REQUIRE(CHECK_COLUMN(result, 3, {"hello"}));
REQUIRE(CHECK_COLUMN(result, 4, {Value::BOOLEAN(true)}));
REQUIRE(CHECK_COLUMN(result, 5, {Value::DATE(1992, 2, 2)}));
REQUIRE(CHECK_COLUMN(result, 6, {Value::TIMESTAMP(2008, 1, 1, 0, 0, 1, 0)}));
REQUIRE_FAIL(con.Query("SELECT MIN()"));
REQUIRE_FAIL(con.Query("SELECT MIN(1, 2)"));
// max
result = con.Query("SELECT MAX(1), MAX(NULL), MAX(33.3), MAX('hello'), MAX(True), MAX(DATE '1992-02-02'), "
"MAX(TIMESTAMP '2008-01-01 00:00:01')");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {Value()}));
REQUIRE(CHECK_COLUMN(result, 2, {33.3}));
REQUIRE(CHECK_COLUMN(result, 3, {"hello"}));
REQUIRE(CHECK_COLUMN(result, 4, {Value::BOOLEAN(true)}));
REQUIRE(CHECK_COLUMN(result, 5, {Value::DATE(1992, 2, 2)}));
REQUIRE(CHECK_COLUMN(result, 6, {Value::TIMESTAMP(2008, 1, 1, 0, 0, 1, 0)}));
REQUIRE_FAIL(con.Query("SELECT MAX()"));
REQUIRE_FAIL(con.Query("SELECT MAX(1, 2)"));
// first
result = con.Query("SELECT FIRST(1), FIRST(NULL), FIRST(33.3), FIRST('hello'), FIRST(True), FIRST(DATE "
"'1992-02-02'), FIRST(TIMESTAMP '2008-01-01 00:00:01')");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {Value()}));
REQUIRE(CHECK_COLUMN(result, 2, {33.3}));
REQUIRE(CHECK_COLUMN(result, 3, {"hello"}));
REQUIRE(CHECK_COLUMN(result, 4, {Value::BOOLEAN(true)}));
REQUIRE(CHECK_COLUMN(result, 5, {Value::DATE(1992, 2, 2)}));
REQUIRE(CHECK_COLUMN(result, 6, {Value::TIMESTAMP(2008, 1, 1, 0, 0, 1, 0)}));
REQUIRE_FAIL(con.Query("SELECT FIRST()"));
REQUIRE_FAIL(con.Query("SELECT FIRST(1, 2)"));
// avg
result = con.Query("SELECT AVG(1), AVG(NULL), AVG(33.3)");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {Value()}));
REQUIRE(CHECK_COLUMN(result, 2, {33.3}));
REQUIRE_FAIL(con.Query("SELECT AVG(True)"));
REQUIRE_FAIL(con.Query("SELECT AVG('hello')"));
REQUIRE_FAIL(con.Query("SELECT AVG(DATE '1992-02-02')"));
REQUIRE_FAIL(con.Query("SELECT AVG()"));
REQUIRE_FAIL(con.Query("SELECT AVG(1, 2)"));
// string agg
result = con.Query(
"SELECT STRING_AGG('hello', ' '), STRING_AGG('hello', NULL), STRING_AGG(NULL, ' '), STRING_AGG(NULL, NULL)");
REQUIRE(CHECK_COLUMN(result, 0, {"hello"}));
REQUIRE(CHECK_COLUMN(result, 1, {Value()}));
REQUIRE(CHECK_COLUMN(result, 2, {Value()}));
REQUIRE(CHECK_COLUMN(result, 3, {Value()}));
REQUIRE_FAIL(con.Query("SELECT STRING_AGG()"));
REQUIRE_FAIL(con.Query("SELECT STRING_AGG('hello')"));
REQUIRE_FAIL(con.Query("SELECT STRING_AGG(1, 2, 3)"));
}
TEST_CASE("Test aggregates with many different types", "[aggregate]") {
unique_ptr<QueryResult> result;
DuckDB db(nullptr);
Connection con(db);
// strings
REQUIRE_NO_FAIL(con.Query("CREATE TABLE strings(s STRING, g INTEGER)"));
REQUIRE_NO_FAIL(con.Query("INSERT INTO strings VALUES ('hello', 0), ('world', 1), (NULL, 0), ('r', 1)"));
// simple aggregates only
result = con.Query("SELECT COUNT(*), COUNT(s), MIN(s), MAX(s) FROM strings");
REQUIRE(CHECK_COLUMN(result, 0, {4}));
REQUIRE(CHECK_COLUMN(result, 1, {3}));
REQUIRE(CHECK_COLUMN(result, 2, {"hello"}));
REQUIRE(CHECK_COLUMN(result, 3, {"world"}));
// simple aggr with only NULL values
result = con.Query("SELECT COUNT(*), COUNT(s), MIN(s), MAX(s) FROM strings WHERE s IS NULL");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {0}));
REQUIRE(CHECK_COLUMN(result, 2, {Value()}));
REQUIRE(CHECK_COLUMN(result, 3, {Value()}));
// add string_agg
result = con.Query("SELECT STRING_AGG(s, ' ') FROM strings");
REQUIRE(CHECK_COLUMN(result, 0, {"hello world r"}));
// more complex agg (groups)
result = con.Query(
"SELECT g, COUNT(*), COUNT(s), MIN(s), MAX(s), STRING_AGG(s, ' ') FROM strings GROUP BY g ORDER BY g");
REQUIRE(CHECK_COLUMN(result, 0, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 1, {2, 2}));
REQUIRE(CHECK_COLUMN(result, 2, {1, 2}));
REQUIRE(CHECK_COLUMN(result, 3, {"hello", "r"}));
REQUIRE(CHECK_COLUMN(result, 4, {"hello", "world"}));
REQUIRE(CHECK_COLUMN(result, 5, {"hello", "world r"}));
// empty group
result = con.Query("SELECT g, COUNT(*), COUNT(s), MIN(s), MAX(s), STRING_AGG(s, ' ') FROM strings WHERE s IS NULL "
"OR s <> 'hello' GROUP BY g ORDER BY g");
REQUIRE(CHECK_COLUMN(result, 0, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 1, {1, 2}));
REQUIRE(CHECK_COLUMN(result, 2, {0, 2}));
REQUIRE(CHECK_COLUMN(result, 3, {Value(), "r"}));
REQUIRE(CHECK_COLUMN(result, 4, {Value(), "world"}));
REQUIRE(CHECK_COLUMN(result, 5, {Value(), "world r"}));
// unsupported aggregates
REQUIRE_FAIL(con.Query("SELECT SUM(s) FROM strings GROUP BY g ORDER BY g"));
REQUIRE_FAIL(con.Query("SELECT AVG(s) FROM strings GROUP BY g ORDER BY g"));
// booleans
REQUIRE_NO_FAIL(con.Query("CREATE TABLE booleans(b BOOLEAN, g INTEGER)"));
REQUIRE_NO_FAIL(con.Query("INSERT INTO booleans VALUES (false, 0), (true, 1), (NULL, 0), (false, 1)"));
// simple agg (no grouping)
result = con.Query("SELECT COUNT(*), COUNT(b), MIN(b), MAX(b) FROM booleans");
REQUIRE(CHECK_COLUMN(result, 0, {4}));
REQUIRE(CHECK_COLUMN(result, 1, {3}));
REQUIRE(CHECK_COLUMN(result, 2, {false}));
REQUIRE(CHECK_COLUMN(result, 3, {true}));
// simple agg with only null values
result = con.Query("SELECT COUNT(*), COUNT(b), MIN(b), MAX(b) FROM booleans WHERE b IS NULL");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {0}));
REQUIRE(CHECK_COLUMN(result, 2, {Value()}));
REQUIRE(CHECK_COLUMN(result, 3, {Value()}));
// more complex agg (groups)
result = con.Query("SELECT g, COUNT(*), COUNT(b), MIN(b), MAX(b) FROM booleans GROUP BY g ORDER BY g");
REQUIRE(CHECK_COLUMN(result, 0, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 1, {2, 2}));
REQUIRE(CHECK_COLUMN(result, 2, {1, 2}));
REQUIRE(CHECK_COLUMN(result, 3, {false, false}));
REQUIRE(CHECK_COLUMN(result, 4, {false, true}));
// more complex agg with empty groups
result = con.Query(
"SELECT g, COUNT(*), COUNT(b), MIN(b), MAX(b) FROM booleans WHERE b IS NULL OR b=true GROUP BY g ORDER BY g");
REQUIRE(CHECK_COLUMN(result, 0, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 1, {1, 1}));
REQUIRE(CHECK_COLUMN(result, 2, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 3, {Value(), true}));
REQUIRE(CHECK_COLUMN(result, 4, {Value(), true}));
// unsupported aggregates
REQUIRE_FAIL(con.Query("SELECT SUM(b) FROM booleans GROUP BY g ORDER BY g"));
REQUIRE_FAIL(con.Query("SELECT AVG(b) FROM booleans GROUP BY g ORDER BY g"));
// integers
REQUIRE_NO_FAIL(con.Query("CREATE TABLE integers(i INTEGER, g INTEGER)"));
REQUIRE_NO_FAIL(con.Query("INSERT INTO integers VALUES (12, 0), (22, 1), (NULL, 0), (14, 1)"));
// simple agg (no grouping)
result = con.Query("SELECT COUNT(*), COUNT(i), MIN(i), MAX(i), SUM(i) FROM integers");
REQUIRE(CHECK_COLUMN(result, 0, {4}));
REQUIRE(CHECK_COLUMN(result, 1, {3}));
REQUIRE(CHECK_COLUMN(result, 2, {12}));
REQUIRE(CHECK_COLUMN(result, 3, {22}));
REQUIRE(CHECK_COLUMN(result, 4, {48}));
// simple agg with only null values
result = con.Query("SELECT COUNT(*), COUNT(i), MIN(i), MAX(i), SUM(i) FROM INTEGERS WHERE i IS NULL");
REQUIRE(CHECK_COLUMN(result, 0, {1}));
REQUIRE(CHECK_COLUMN(result, 1, {0}));
REQUIRE(CHECK_COLUMN(result, 2, {Value()}));
REQUIRE(CHECK_COLUMN(result, 3, {Value()}));
REQUIRE(CHECK_COLUMN(result, 4, {Value()}));
// more complex agg (groups)
result = con.Query("SELECT g, COUNT(*), COUNT(i), MIN(i), MAX(i), SUM(i) FROM integers GROUP BY g ORDER BY g");
REQUIRE(CHECK_COLUMN(result, 0, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 1, {2, 2}));
REQUIRE(CHECK_COLUMN(result, 2, {1, 2}));
REQUIRE(CHECK_COLUMN(result, 3, {12, 14}));
REQUIRE(CHECK_COLUMN(result, 4, {12, 22}));
REQUIRE(CHECK_COLUMN(result, 5, {12, 36}));
// more complex agg with empty groups
result = con.Query("SELECT g, COUNT(*), COUNT(i), MIN(i), MAX(i), SUM(i) FROM integers WHERE i IS NULL OR i > 15 "
"GROUP BY g ORDER BY g");
REQUIRE(CHECK_COLUMN(result, 0, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 1, {1, 1}));
REQUIRE(CHECK_COLUMN(result, 2, {0, 1}));
REQUIRE(CHECK_COLUMN(result, 3, {Value(), 22}));
REQUIRE(CHECK_COLUMN(result, 4, {Value(), 22}));
REQUIRE(CHECK_COLUMN(result, 5, {Value(), 22}));
}
|
# /usr/bin/env python
# -*- coding:utf8 -*-
from .base import BaseModule
class Category(BaseModule):
def get_categories(self, **kwargs):
"""
Use this call to get categories of product item
:param kwargs:
:return:
"""
return self.client.execute("item/categories/get", "POST", kwargs)
def get_attributes(self, **kwargs):
"""
Use this call to get attributes of product item
:param kwargs:
:return:
"""
return self.client.execute("item/attributes/get", "POST", kwargs)
|
#!/bin/bash
find . -type f -name '.DS_Store' -print -delete
|
<gh_stars>0
export function countryToFlag(isoCode) {
return typeof String.fromCodePoint !== "undefined"
? isoCode
.toUpperCase()
.replace(/./g, (char) =>
String.fromCodePoint(char.charCodeAt(0) + 127397)
)
: isoCode;
}
// export const listOffices = [
// { label: "office 1" },
// { label: "office 2" },
// { label: "office 3" },
// ];
export const countries = [
{ code: "AD", label: "Andorra" },
{ code: "AE", label: "United Arab Emirates" },
{ code: "AF", label: "Afghanistan" },
{ code: "AG", label: "Antigua and Barbuda" },
{ code: "AI", label: "Anguilla" },
{ code: "AL", label: "Albania" },
{ code: "AM", label: "Armenia" },
{ code: "AO", label: "Angola" },
{ code: "AQ", label: "Antarctica" },
{ code: "AR", label: "Argentina" },
{ code: "AS", label: "American Samoa" },
{ code: "AT", label: "Austria" },
{ code: "AU", label: "Australia" },
{ code: "AW", label: "Aruba" },
{ code: "AX", label: "Alland Islands" },
{ code: "AZ", label: "Azerbaijan" },
{ code: "BA", label: "Bosnia and Herzegovina" },
{ code: "BB", label: "Barbados" },
{ code: "BD", label: "Bangladesh" },
{ code: "BE", label: "Belgium" },
{ code: "BF", label: "Burkina Faso" },
{ code: "BG", label: "Bulgaria" },
{ code: "BH", label: "Bahrain" },
{ code: "BI", label: "Burundi" },
{ code: "BJ", label: "Benin" },
{ code: "BL", label: "<NAME>" },
{ code: "BM", label: "Bermuda" },
{ code: "BN", label: "Brunei Darussalam" },
{ code: "BO", label: "Bolivia" },
{ code: "BR", label: "Brazil" },
{ code: "BS", label: "Bahamas" },
{ code: "BT", label: "Bhutan" },
{ code: "BV", label: "Bouvet Island" },
{ code: "BW", label: "Botswana" },
{ code: "BY", label: "Belarus" },
{ code: "BZ", label: "Belize" },
{ code: "CA", label: "Canada" },
{ code: "CC", label: "Cocos (Keeling) Islands" },
{ code: "CD", label: "Congo, Democratic Republic of the" },
{ code: "CF", label: "Central African Republic" },
{ code: "CG", label: "Congo, Republic of the" },
{ code: "CH", label: "Switzerland" },
{ code: "CI", label: "Cote d'Ivoire" },
{ code: "CK", label: "Cook Islands" },
{ code: "CL", label: "Chile" },
{ code: "CM", label: "Cameroon" },
{ code: "CN", label: "China" },
{ code: "CO", label: "Colombia" },
{ code: "CR", label: "Costa Rica" },
{ code: "CU", label: "Cuba" },
{ code: "CV", label: "Cape Verde" },
{ code: "CW", label: "Curacao" },
{ code: "CX", label: "Christmas Island" },
{ code: "CY", label: "Cyprus" },
{ code: "CZ", label: "Czech Republic" },
{ code: "DE", label: "Germany" },
{ code: "DJ", label: "Djibouti" },
{ code: "DK", label: "Denmark" },
{ code: "DM", label: "Dominica" },
{ code: "DO", label: "Dominican Republic" },
{ code: "DZ", label: "Algeria" },
{ code: "EC", label: "Ecuador" },
{ code: "EE", label: "Estonia" },
{ code: "EG", label: "Egypt" },
{ code: "EH", label: "Western Sahara" },
{ code: "ER", label: "Eritrea" },
{ code: "ES", label: "Spain" },
{ code: "ET", label: "Ethiopia" },
{ code: "FI", label: "Finland" },
{ code: "FJ", label: "Fiji" },
{ code: "FK", label: "Falkland Islands (Malvinas)" },
{ code: "FM", label: "Micronesia, Federated States of" },
{ code: "FO", label: "Faroe Islands" },
{ code: "FR", label: "France" },
{ code: "GA", label: "Gabon" },
{ code: "GB", label: "United Kingdom" },
{ code: "GD", label: "Grenada" },
{ code: "GE", label: "Georgia" },
{ code: "GF", label: "French Guiana" },
{ code: "GG", label: "Guernsey" },
{ code: "GH", label: "Ghana" },
{ code: "GI", label: "Gibraltar" },
{ code: "GL", label: "Greenland" },
{ code: "GM", label: "Gambia" },
{ code: "GN", label: "Guinea" },
{ code: "GP", label: "Guadeloupe" },
{ code: "GQ", label: "Equatorial Guinea" },
{ code: "GR", label: "Greece" },
{
code: "GS",
label: "South Georgia and the South Sandwich Islands",
},
{ code: "GT", label: "Guatemala" },
{ code: "GU", label: "Guam" },
{ code: "GW", label: "Guinea-Bissau" },
{ code: "GY", label: "Guyana" },
{ code: "HK", label: "Hong Kong" },
{ code: "HM", label: "Heard Island and McDonald Islands" },
{ code: "HN", label: "Honduras" },
{ code: "HR", label: "Croatia" },
{ code: "HT", label: "Haiti" },
{ code: "HU", label: "Hungary" },
{ code: "ID", label: "Indonesia" },
{ code: "IE", label: "Ireland" },
{ code: "IL", label: "Israel" },
{ code: "IM", label: "Isle of Man" },
{ code: "IN", label: "India" },
{ code: "IO", label: "British Indian Ocean Territory" },
{ code: "IQ", label: "Iraq" },
{ code: "IR", label: "Iran, Islamic Republic of" },
{ code: "IS", label: "Iceland" },
{ code: "IT", label: "Italy" },
{ code: "JE", label: "Jersey" },
{ code: "JM", label: "Jamaica" },
{ code: "JO", label: "Jordan" },
{ code: "JP", label: "Japan" },
{ code: "KE", label: "Kenya" },
{ code: "KG", label: "Kyrgyzstan" },
{ code: "KH", label: "Cambodia" },
{ code: "KI", label: "Kiribati" },
{ code: "KM", label: "Comoros" },
{ code: "KN", label: "Saint Kitts and Nevis" },
{ code: "KP", label: "Korea, Democratic People's Republic of" },
{ code: "KR", label: "Korea, Republic of" },
{ code: "KW", label: "Kuwait" },
{ code: "KY", label: "Cayman Islands" },
{ code: "KZ", label: "Kazakhstan" },
{ code: "LA", label: "Lao People's Democratic Republic" },
{ code: "LB", label: "Lebanon" },
{ code: "LC", label: "Saint Lucia" },
{ code: "LI", label: "Liechtenstein" },
{ code: "LK", label: "Sri Lanka" },
{ code: "LR", label: "Liberia" },
{ code: "LS", label: "Lesotho" },
{ code: "LT", label: "Lithuania" },
{ code: "LU", label: "Luxembourg" },
{ code: "LV", label: "Latvia" },
{ code: "LY", label: "Libya" },
{ code: "MA", label: "Morocco" },
{ code: "MC", label: "Monaco" },
{ code: "MD", label: "Moldova, Republic of" },
{ code: "ME", label: "Montenegro" },
{ code: "MF", label: "Saint Martin (French part)" },
{ code: "MG", label: "Madagascar" },
{ code: "MH", label: "Marshall Islands" },
{
code: "MK",
label: "Macedonia, the Former Yugoslav Republic of",
},
{ code: "ML", label: "Mali" },
{ code: "MM", label: "Myanmar" },
{ code: "MN", label: "Mongolia" },
{ code: "MO", label: "Macao" },
{ code: "MP", label: "Northern Mariana Islands" },
{ code: "MQ", label: "Martinique" },
{ code: "MR", label: "Mauritania" },
{ code: "MS", label: "Montserrat" },
{ code: "MT", label: "Malta" },
{ code: "MU", label: "Mauritius" },
{ code: "MV", label: "Maldives" },
{ code: "MW", label: "Malawi" },
{ code: "MX", label: "Mexico" },
{ code: "MY", label: "Malaysia" },
{ code: "MZ", label: "Mozambique" },
{ code: "NA", label: "Namibia" },
{ code: "NC", label: "New Caledonia" },
{ code: "NE", label: "Niger" },
{ code: "NF", label: "Norfolk Island" },
{ code: "NG", label: "Nigeria" },
{ code: "NI", label: "Nicaragua" },
{ code: "NL", label: "Netherlands" },
{ code: "NO", label: "Norway" },
{ code: "NP", label: "Nepal" },
{ code: "NR", label: "Nauru" },
{ code: "NU", label: "Niue" },
{ code: "NZ", label: "New Zealand" },
{ code: "OM", label: "Oman" },
{ code: "PA", label: "Panama" },
{ code: "PE", label: "Peru" },
{ code: "PF", label: "French Polynesia" },
{ code: "PG", label: "Papua New Guinea" },
{ code: "PH", label: "Philippines" },
{ code: "PK", label: "Pakistan" },
{ code: "PL", label: "Poland" },
{ code: "PM", label: "Saint Pierre and Miquelon" },
{ code: "PN", label: "Pitcairn" },
{ code: "PR", label: "Puerto Rico" },
{ code: "PS", label: "Palestine, State of" },
{ code: "PT", label: "Portugal" },
{ code: "PW", label: "Palau" },
{ code: "PY", label: "Paraguay" },
{ code: "QA", label: "Qatar" },
{ code: "RE", label: "Reunion" },
{ code: "RO", label: "Romania" },
{ code: "RS", label: "Serbia" },
{ code: "RU", label: "Russian Federation" },
{ code: "RW", label: "Rwanda" },
{ code: "SA", label: "Saudi Arabia" },
{ code: "SB", label: "Solomon Islands" },
{ code: "SC", label: "Seychelles" },
{ code: "SD", label: "Sudan" },
{ code: "SE", label: "Sweden" },
{ code: "SG", label: "Singapore" },
{ code: "SH", label: "Saint Helena" },
{ code: "SI", label: "Slovenia" },
{ code: "SJ", label: "Svalbard and <NAME>" },
{ code: "SK", label: "Slovakia" },
{ code: "SL", label: "Sierra Leone" },
{ code: "SM", label: "San Marino" },
{ code: "SN", label: "Senegal" },
{ code: "SO", label: "Somalia" },
{ code: "SR", label: "Suriname" },
{ code: "SS", label: "South Sudan" },
{ code: "ST", label: "Sao Tome and Principe" },
{ code: "SV", label: "El Salvador" },
{ code: "SX", label: "Sint Maarten (Dutch part)" },
{ code: "SY", label: "Syrian Arab Republic" },
{ code: "SZ", label: "Swaziland" },
{ code: "TC", label: "Turks and Caicos Islands" },
{ code: "TD", label: "Chad" },
{ code: "TF", label: "French Southern Territories" },
{ code: "TG", label: "Togo" },
{ code: "TH", label: "Thailand" },
{ code: "TJ", label: "Tajikistan" },
{ code: "TK", label: "Tokelau" },
{ code: "TL", label: "Timor-Leste" },
{ code: "TM", label: "Turkmenistan" },
{ code: "TN", label: "Tunisia" },
{ code: "TO", label: "Tonga" },
{ code: "TR", label: "Turkey" },
{ code: "TT", label: "Trinidad and Tobago" },
{ code: "TV", label: "Tuvalu" },
{ code: "TW", label: "Taiwan, Province of China" },
{ code: "TZ", label: "United Republic of Tanzania" },
{ code: "UA", label: "Ukraine" },
{ code: "UG", label: "Uganda" },
{ code: "US", label: "United States" },
{ code: "UY", label: "Uruguay" },
{ code: "UZ", label: "Uzbekistan" },
{ code: "VA", label: "Holy See (Vatican City State)" },
{ code: "VC", label: "Saint Vincent and the Grenadines" },
{ code: "VE", label: "Venezuela" },
{ code: "VG", label: "British Virgin Islands" },
{ code: "VI", label: "US Virgin Islands" },
{ code: "VN", label: "Vietnam" },
{ code: "VU", label: "Vanuatu" },
{ code: "WF", label: "Wallis and Futuna" },
{ code: "WS", label: "Samoa" },
{ code: "XK", label: "Kosovo" },
{ code: "YE", label: "Yemen" },
{ code: "YT", label: "Mayotte" },
{ code: "ZA", label: "South Africa" },
{ code: "ZM", label: "Zambia" },
{ code: "ZW", label: "Zimbabwe" },
];
|
/**
* Copyright 2015 Autodesk Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
/**
* @ngdoc service
* @name wetLabAccelerator.dyeOptions
* @description
* # dyeOptions
* Constant in the wetLabAccelerator.
*/
angular.module('wetLabAccelerator').constant('DyeOptions', {
"channel1" : ["FAM","SYBR"],
"channel2" : ["VIC","HEX","TET","CALGOLD540"],
"channel3" : ["ROX","TXR","CALRED610"],
"channel4" : ["CY5","QUASAR670"],
"channel5" : ["QUASAR705"]
});
|
<reponame>mjochab/Inzynierski_projekt_zespolowy_2018_gr3<filename>src/main/java/patron/mains/guis/main/controllerExtends/_10_ContainerPaneControllerExtend.java
package patron.mains.guis.main.controllerExtends;
import patron.tickets.enums.TicketType;
import patron.tickets.factories.TicketViewFactory;
import com.appscharles.libs.fxer.abstracts.AbstractStageControllerFxExtend;
import com.appscharles.libs.fxer.switchers.PaneViewSwitcher;
import javafx.fxml.FXML;
import javafx.scene.control.SplitPane;
import javafx.scene.layout.AnchorPane;
/**
* The type 10 container pane controller extend.
*/
public class _10_ContainerPaneControllerExtend extends AbstractStageControllerFxExtend {
/**
* The Container pane.
*/
@FXML
protected AnchorPane containerPane;
/**
* The Split pane.
*/
@FXML
protected SplitPane splitPane;
/**
* Instantiates a new 10 container pane controller extend.
*/
protected _10_ContainerPaneControllerExtend(){
this.addOnInitializeWithSneakyThrow((resourceBundle)->{
PaneViewSwitcher paneViewSwitcher = new PaneViewSwitcher(this.containerPane);
paneViewSwitcher.switchTo(new TicketViewFactory("Bilety na loty", TicketType.FLIGHT).create());
});
}
}
|
package com.galfins.gnss_compare.Constellations;
import android.location.GnssClock;
import android.location.GnssMeasurement;
import android.location.GnssMeasurementsEvent;
import android.location.GnssStatus;
import android.location.Location;
import android.util.Log;
import java.util.ArrayList;
import java.util.List;
import com.galfins.gnss_compare.Corrections.Correction;
import com.galfins.gnss_compare.MainActivity;
import com.galfins.gogpsextracts.Constants;
import com.galfins.gogpsextracts.Coordinates;
import com.galfins.gogpsextracts.NavigationProducer;
import com.galfins.gogpsextracts.RinexNavigationGps;
import com.galfins.gogpsextracts.SatellitePosition;
import com.galfins.gogpsextracts.Time;
import com.galfins.gogpsextracts.TopocentricCoordinates;
/**
* Created by <NAME> on 17/02/2018.
* This class is for...
* <p>
* GPS Pseudorange computation algorithm by: <NAME>
* - variable name changes and comments were added
* to fit the description in the GSA white paper
* by: <NAME>
*/
public class GpsConstellation extends Constellation {
private final static char satType = 'G';
private static final String NAME = "GPS";
private static final String TAG = "GpsConstellation";
private boolean fullBiasNanosInitialized = false;
private long FullBiasNanos;
private Coordinates rxPos;
protected double tRxGPS;
protected double weekNumberNanos;
private static final int constellationId = GnssStatus.CONSTELLATION_GPS;
private static double MASK_ELEVATION = 20; // degrees
private static double MASK_CN0 = 10; // dB-Hz
/**
* Time of the measurement
*/
private Time timeRefMsec;
protected int visibleButNotUsed = 0;
// Condition for the pseudoranges that takes into account a maximum uncertainty for the TOW
// (as done in gps-measurement-tools MATLAB code)
private static final int MAX_TOW_UNC_NS = 50; // [nanoseconds]
private NavigationProducer rinexNavGps = null;
/**
* List holding observed satellites
*/
protected List<SatelliteParameters> observedSatellites = new ArrayList<>();
/**
* Corrections which are to be applied to received pseudoranges
*/
private ArrayList<Correction> corrections;
public GpsConstellation() {
// URL template from where the GPS ephemerides should be downloaded
String IGN_NAVIGATION_HOURLY_ZIM2 = "ftp://igs.ensg.ign.fr/pub/igs/data/hourly/${yyyy}/${ddd}/zim2${ddd}${h}.${yy}n.Z";
String NASA_NAVIGATION_HOURLY = "ftp://cddis.gsfc.nasa.gov/pub/gps/data/hourly/${yyyy}/${ddd}/hour${ddd}0.${yy}n.Z";
String GARNER_NAVIGATION_AUTO_HTTP = "http://garner.ucsd.edu/pub/rinex/${yyyy}/${ddd}/auto${ddd}0.${yy}n.Z";
String BKG_HOURLY_SUPER_SEVER = "ftp://igs.bkg.bund.de/IGS/BRDC/${yyyy}/${ddd}/brdc${ddd}0.${yy}n.Z";
// Declare a RinexNavigation type object
if(rinexNavGps == null)
rinexNavGps = new RinexNavigationGps(BKG_HOURLY_SUPER_SEVER);
}
@Override
public void addCorrections(ArrayList<Correction> corrections) {
synchronized (this) {
this.corrections = corrections;
}
}
@Override
public Time getTime() {
synchronized (this) {
return timeRefMsec;
}
}
@Override
public String getName() {
synchronized (this) {
return NAME;
}
}
@Override
public void updateMeasurements(GnssMeasurementsEvent event) {
synchronized (this) {
visibleButNotUsed = 0;
observedSatellites.clear();
GnssClock gnssClock = event.getClock();
long TimeNanos = gnssClock.getTimeNanos();
timeRefMsec = new Time(System.currentTimeMillis());
double BiasNanos = gnssClock.getBiasNanos();
double gpsTime, pseudorange;
// Use only the first instance of the FullBiasNanos (as done in gps-measurement-tools)
if (!fullBiasNanosInitialized) {
FullBiasNanos = gnssClock.getFullBiasNanos();
fullBiasNanosInitialized = true;
}
// Start computing the pseudoranges using the raw data from the phone's GNSS receiver
for (GnssMeasurement measurement : event.getMeasurements()) {
if (measurement.getConstellationType() != constellationId)
continue;
long ReceivedSvTimeNanos = measurement.getReceivedSvTimeNanos();
double TimeOffsetNanos = measurement.getTimeOffsetNanos();
// GPS Time generation (GSA White Paper - page 20)
gpsTime =
TimeNanos - (FullBiasNanos + BiasNanos); // TODO intersystem bias?
// Measurement time in full GPS time without taking into account weekNumberNanos(the number of
// nanoseconds that have occurred from the beginning of GPS time to the current
// week number)
tRxGPS =
gpsTime + TimeOffsetNanos;
weekNumberNanos =
Math.floor((-1. * FullBiasNanos) / Constants.NUMBER_NANO_SECONDS_PER_WEEK)
* Constants.NUMBER_NANO_SECONDS_PER_WEEK;
// GPS pseudorange computation
pseudorange =
(tRxGPS - weekNumberNanos - ReceivedSvTimeNanos) / 1.0E9
* Constants.SPEED_OF_LIGHT;
// TODO Check that the measurement have a valid state such that valid pseudoranges are used in the PVT algorithm
/*
According to https://developer.android.com/ the GnssMeasurements States required
for GPS valid pseudoranges are:
int STATE_CODE_LOCK = 1 (1 << 0)
int int STATE_TOW_DECODED = 8 (1 << 3)
*/
int measState = measurement.getState();
// Bitwise AND to identify the states
boolean codeLock = (measState & GnssMeasurement.STATE_CODE_LOCK) != 0;
boolean towDecoded = (measState & GnssMeasurement.STATE_TOW_DECODED) != 0;
boolean towUncertainty = measurement.getReceivedSvTimeUncertaintyNanos() < MAX_TOW_UNC_NS;
if (codeLock && towDecoded && towUncertainty && pseudorange < 1e9) {
SatelliteParameters satelliteParameters = new SatelliteParameters(
measurement.getSvid(),
new Pseudorange(pseudorange, 0.0));
satelliteParameters.setUniqueSatId("G" + satelliteParameters.getSatId());
satelliteParameters.setSignalStrength(measurement.getCn0DbHz());
satelliteParameters.setConstellationType(measurement.getConstellationType());
if(measurement.hasCarrierFrequencyHz())
satelliteParameters.setCarrierFrequency(measurement.getCarrierFrequencyHz());
observedSatellites.add(satelliteParameters);
Log.d(TAG, "updateConstellations(" + measurement.getSvid() + "): " + weekNumberNanos + ", " + tRxGPS + ", " + pseudorange);
Log.d(TAG, "updateConstellations: Passed with measurement state: " + measState);
} else {
visibleButNotUsed++;
}
}
}
}
@Override
public double getSatelliteSignalStrength(int index) {
synchronized (this) {
return observedSatellites.get(index).getSignalStrength();
}
}
@Override
public int getConstellationId() {
synchronized (this) {
return constellationId;
}
}
@Override
public void calculateSatPosition(Location initialLocation, Coordinates position) {
// Make a list to hold the satellites that are to be excluded based on elevation/CN0 masking criteria
List<SatelliteParameters> excludedSatellites = new ArrayList<>();
synchronized (this) {
rxPos = Coordinates.globalXYZInstance(position.getX(), position.getY(), position.getZ());
for (SatelliteParameters observedSatellite : observedSatellites) {
// Computation of the GPS satellite coordinates in ECEF frame
// Determine the current GPS week number
int gpsWeek = (int) (weekNumberNanos / Constants.NUMBER_NANO_SECONDS_PER_WEEK);
// Time of signal reception in GPS Seconds of the Week (SoW)
double gpsSow = (tRxGPS - weekNumberNanos) * 1e-9;
Time tGPS = new Time(gpsWeek, gpsSow);
// Convert the time of reception from GPS SoW to UNIX time (milliseconds)
long timeRx = tGPS.getMsec();
SatellitePosition rnp = ((RinexNavigationGps) rinexNavGps).getSatPositionAndVelocities(
timeRx,
observedSatellite.getPseudorange(),
observedSatellite.getSatId(),
satType,
0.0,
initialLocation);
if (rnp == null) {
excludedSatellites.add(observedSatellite);
MainActivity.makeRnpFailedNotification();
continue;
}
observedSatellite.setSatellitePosition(rnp);
observedSatellite.setRxTopo(
new TopocentricCoordinates(
rxPos,
observedSatellite.getSatellitePosition()));
// Add to the exclusion list the satellites that do not pass the masking criteria
if(observedSatellite.getRxTopo().getElevation() < MASK_ELEVATION){
excludedSatellites.add(observedSatellite);
}
double accumulatedCorrection = 0;
for (Correction correction : corrections) {
correction.calculateCorrection(
new Time(timeRx),
rxPos,
observedSatellite.getSatellitePosition(),
rinexNavGps,
initialLocation);
accumulatedCorrection += correction.getCorrection();
}
observedSatellite.setAccumulatedCorrection(accumulatedCorrection);
}
// Remove from the list all the satellites that did not pass the masking criteria
observedSatellites.removeAll(excludedSatellites);
}
}
public static void registerClass() {
register(
NAME,
GpsConstellation.class);
}
@Override
public Coordinates getRxPos() {
synchronized (this) {
return rxPos;
}
}
@Override
public void setRxPos(Coordinates rxPos) {
synchronized (this) {
this.rxPos = rxPos;
}
}
@Override
public SatelliteParameters getSatellite(int index) {
synchronized (this) {
return observedSatellites.get(index);
}
}
@Override
public List<SatelliteParameters> getSatellites() {
synchronized (this) {
return observedSatellites;
}
}
@Override
public int getVisibleConstellationSize() {
synchronized (this) {
return getUsedConstellationSize() + visibleButNotUsed;
}
}
@Override
public int getUsedConstellationSize() {
synchronized (this) {
return observedSatellites.size();
}
}
}
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 2 11:19:50 2018
@author: mayank
"""
import numpy as np
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics.pairwise import linear_kernel,rbf_kernel,manhattan_distances,polynomial_kernel,sigmoid_kernel,cosine_similarity,laplacian_kernel,paired_euclidean_distances,pairwise_distances
from sklearn.kernel_approximation import RBFSampler, Nystroem
from sklearn.utils import resample
from numpy.matlib import repmat
from sklearn.neighbors import NearestNeighbors
from sklearn.cluster import MiniBatchKMeans
from sklearn.decomposition import IncrementalPCA
from numpy.linalg import eigh
from sklearn.preprocessing import OneHotEncoder
from sparse import COO
from scipy.sparse import csr_matrix, lil_matrix
from scipy.sparse import issparse
from scipy.sparse import hstack
#%%
class utils:
# def __init__(self):
# return None
def add_bias(self,xTrain):
"""
Adds bias to the data
Parameters:
-----------
xTrain: 2D numpy ndarray/csr_matrix of shape (n_samples, n_features)
Returns:
--------
xTrain: 2D numpy ndarray/csr_matrix of shape (n_samples, n_features + 1)
"""
N = xTrain.shape[0]
if(xTrain.size!=0):
if(issparse(xTrain)==True):
xTrain = csr_matrix(hstack([xTrain,np.ones((N,1))]))
else:
xTrain=np.hstack((xTrain,np.ones((N,1))))
return xTrain
def logsig(self,x):
return 1 / (1 + np.exp(-x))
def saturate_fcn1(self,x,a = 2):
y = np.zeros(x.shape)
idx1 = (x <= a)*(x >=-a)
idx2 = x > a
idx3 = x < -a
y[idx1] = x[idx1]/(2*a) + 1.0/2.0
y[idx2] = 1
y[idx3] = 0
return y
def standardize(self,xTrain,centering):
"""
Transform the data so that each column has zero mean and unit standard deviation
Parameters:
-----------
xTrain: 2D numpy ndarray of shape (n_samples, n_features)
centering: bool,
whether to perform standardization,
if False, it returns me = np.zeros((xTrain.shape[1],))
and std_dev = np.ones((xTrain.shape[1],))
Returns:
--------
xTrain: 2D numpy ndarray of shape (n_samples, n_features)
me: mean of the columns
std_dev: standard deviation of the columns
"""
if(centering == True):
me=np.mean(xTrain,axis=0)
std_dev=np.std(xTrain,axis=0)
else:
me = np.zeros((xTrain.shape[1],))
std_dev = np.ones((xTrain.shape[1],))
#remove columns with zero std
idx=(std_dev!=0.0)
# print(idx.shape)
xTrain[:,idx]=(xTrain[:,idx]-me[idx])/std_dev[idx]
return xTrain,me,std_dev
def divide_into_batches_stratified(self,yTrain,batch_sz):
"""
Divides the data into batches such that each batch contains similar proportion of labels in it
Parameters:
----------
yTrain: np.ndarray labels for the datset of shape (n_samples, )
Returns:
--------
idx_batches: list
index of yTrain in each batch
sample_weights: np.ndarray of size (n_samples,)
weights for each sample in batch = 1/#class_j
num_batches: int
number of batches formed
"""
#data should be of the form samples X features
N=yTrain.shape[0]
num_batches=int(np.ceil(N/batch_sz))
sample_weights=list()
numClasses=np.unique(yTrain).size
idx_batches=list()
skf=StratifiedKFold(n_splits=num_batches, random_state=1, shuffle=True)
j=0
for train_index, test_index in skf.split(np.zeros(N), yTrain):
idx_batches.append(test_index)
class_weights=np.zeros((numClasses,))
sample_weights1=np.zeros((test_index.shape[0],))
temp=yTrain[test_index,]
for i in range(numClasses):
idx1=(temp==i)
class_weights[i]=1.0/(np.sum(idx1)+1e-09)#/idx.shape[0]
sample_weights1[idx1]=class_weights[i]
sample_weights.append(sample_weights1)
j+=1
return idx_batches,sample_weights,num_batches
def margin_kernel(self, X1, kernel_type = 'linear', gamma =1.0):
"""
Forms the kernel matrix using the samples X1
Parameters:
----------
X1: np.ndarray
data (n_samples,n_features) to form a kernel of shape (n_samples,n_samples)
kernel_type : str
type of kernel to be used
gamma: float
kernel parameter
Returns:
-------
X: np.ndarray
the kernel of shape (n_samples,n_samples)
"""
if(kernel_type == 'linear'):
X = linear_kernel(X1,X1)
elif(kernel_type == 'rbf'):
X = rbf_kernel(X1,X1,gamma)
elif(kernel_type == 'tanh'):
X = sigmoid_kernel(X1,X1,-gamma)
elif(kernel_type == 'sin'):
# X = np.sin(gamma*manhattan_distances(X1,X1))
X = np.sin(gamma*pairwise_distances(X1,X1)**2)
elif(kernel_type =='TL1'):
X = np.maximum(0,gamma - manhattan_distances(X1,X1))
else:
print('no kernel_type, returning None')
return None
return X
def kernel_transform(self, X1, X2 = None, kernel_type = 'linear_primal', n_components = 100, gamma = 1.0):
"""
Forms the kernel matrix using the samples X1
Parameters:
----------
X1: np.ndarray
data (n_samples1,n_features) to form a kernel of shape (n_samples1,n_samples1)
X2: np.ndarray
data (n_samples2,n_features) to form a kernel of shape (n_samples1,n_samples2)
kernel_type : str
type of kernel to be used
gamma: float
kernel parameter
Returns:
-------
X: np.ndarray
the kernel of shape (n_samples,n_samples)
"""
if(kernel_type == 'linear'):
X = linear_kernel(X1,X2)
elif(kernel_type == 'rbf'):
X = rbf_kernel(X1,X2,gamma)
elif(kernel_type == 'tanh'):
X = sigmoid_kernel(X1,X2,-gamma)
elif(kernel_type == 'sin'):
# X = np.sin(gamma*manhattan_distances(X1,X2))
X = np.sin(gamma*pairwise_distances(X1,X2)**2)
elif(kernel_type =='TL1'):
X = np.maximum(0,gamma - manhattan_distances(X1,X2))
elif(kernel_type == 'rff_primal'):
rbf_feature = RBFSampler(gamma=gamma, random_state=1, n_components = n_components)
X = rbf_feature.fit_transform(X1)
elif(kernel_type == 'nystrom_primal'):
#cannot have n_components more than n_samples1
if(n_components > X1.shape[0]):
raise ValueError('n_samples should be greater than n_components')
rbf_feature = Nystroem(gamma=gamma, random_state=1, n_components = n_components)
X = rbf_feature.fit_transform(X1)
elif(kernel_type == 'linear_primal'):
X = X1
else:
print('No kernel_type passed: using linear primal solver')
X = X1
return X
def generate_samples(self,X_orig,old_imbalance_ratio,new_imbalance_ratio):
"""
Generates samples based on new imbalance ratio, such that new imbalanced ratio is achieved
Parameters:
----------
X_orig: np.array (n_samples , n_features)
data matrix
old_imbalance_ratio: float
old imbalance ratio in the samples
new_imbalance_ratio: float
new imbalance ratio in the samples
Returns:
-------
X_orig: np.array (n_samples , n_features)
data matrix
X1: 2D np.array
newly generated samples of shape (int((new_imbalance_ratio/old_imbalance_ratio)*n_samples - n_samples), n_features )
"""
N=X_orig.shape[0]
M=X_orig.shape[1]
neighbors_thresh=10
if (new_imbalance_ratio < old_imbalance_ratio):
raise ValueError('new ratio should be greater than old ratio')
new_samples=int((new_imbalance_ratio/old_imbalance_ratio)*N - N)
#each point must generate these many samples
new_samples_per_point_orig=new_imbalance_ratio/old_imbalance_ratio - 1
new_samples_per_point=int(new_imbalance_ratio/old_imbalance_ratio - 1)
#check if the number of samples each point has to generate is > 1
X1=np.zeros((0,M))
if(new_samples_per_point_orig>0 and new_samples_per_point_orig<=1):
idx_samples=resample(np.arange(0,N), n_samples=int(N*new_samples_per_point_orig), random_state=1,replace=False)
X=X_orig[idx_samples,]
new_samples_per_point=1
N=X.shape[0]
else:
X=X_orig
if(N==1):
X1=repmat(X,new_samples,1)
elif(N>1):
if(N<=neighbors_thresh):
n_neighbors=int(N/2)
else:
n_neighbors=neighbors_thresh
nbrs = NearestNeighbors(n_neighbors=n_neighbors, algorithm='ball_tree').fit(X)
for i in range(N):
#for each point find its n_neighbors nearest neighbors
inds=nbrs.kneighbors(X[i,:].reshape(1,-1), n_neighbors, return_distance=False)
temp_data=X[inds[0],:]
std=np.std(temp_data,axis=0)
me=np.mean(temp_data,axis=0)
np.random.seed(i)
x_temp=me + std*np.random.randn(new_samples_per_point,M)
X1=np.append(X1,x_temp,axis=0)
return X_orig, X1
def upsample(self,X,Y,new_imbalance_ratio):
"""
Upsamples the data based on label array, for classification only
Parameters:
----------
X: np.array (n_samples, n_features)
2D data matrix
Y: np.array (n_samples, )
label array, takes values between [0, numClasses-1]
new_imbalance_ratio: float
new imbalance ratio in the data, takes values between [0.5,1]
Returns:
-------
X3: np.array (n_samples1, n_features)
new balanced 2D data matrix
Y3: np.array (n_samples1, )
new balanced label array
"""
#xTrain: samples X features
#yTrain : samples,
#for classification only
numClasses=np.unique(Y).size
class_samples=np.zeros((numClasses,))
X3=np.zeros((0,X.shape[1]))
Y3=np.zeros((0,))
#first find the samples per class per class
for i in range(numClasses):
idx1=(Y==i)
class_samples[i]=np.sum(idx1)
max_samples=np.max(class_samples)
# new_imbalance_ratio=0.5
# if(upsample_type==1):
old_imbalance_ratio_thresh=0.5
# else:
# old_imbalance_ratio_thresh=1
for i in range(numClasses):
idx1=(Y==i)
old_imbalance_ratio=class_samples[i]/max_samples
X1=X[idx1,:]
Y1=Y[idx1,]
if(idx1.size==1):
X1=np.reshape(X1,(1,X.shape[1]))
if(old_imbalance_ratio<=old_imbalance_ratio_thresh and class_samples[i]!=0):
X1,X2=self.generate_samples(X1,old_imbalance_ratio,new_imbalance_ratio)
new_samples=X2.shape[0]
Y2=np.ones((new_samples,))
Y2=Y2*Y1[0,]
#append original and generated samples
X3=np.append(X3,X1,axis=0)
X3=np.append(X3,X2,axis=0)
Y3=np.append(Y3,Y1,axis=0)
Y3=np.append(Y3,Y2,axis=0)
else:
#append original samples only
X3=np.append(X3,X1,axis=0)
Y3=np.append(Y3,Y1,axis=0)
Y3=np.array(Y3,dtype=np.int32)
return X3,Y3
def kmeans_select(self,X,represent_points,do_pca=False):
"""
Takes in data and number of prototype vectors and returns the indices of the prototype vectors.
The prototype vectors are selected based on the farthest distance from the kmeans centers
Parameters
----------
X: np.ndarray
shape = n_samples, n_features
represent_points: int
number of prototype vectors to return
do_pca: boolean
whether to perform incremental pca for dimensionality reduction before selecting prototype vectors
Returns
-------
sv: list
list of the prototype vector indices from the data array given by X
"""
# do_pca = self.do_pca_in_selection
N = X.shape[0]
if(do_pca == True):
if(X.shape[1]>50):
n_components = 50
ipca = IncrementalPCA(n_components=n_components, batch_size=np.min([128,X.shape[0]]))
X = ipca.fit_transform(X)
kmeans = MiniBatchKMeans(n_clusters=represent_points, batch_size=np.min([128,X.shape[0]]),random_state=0).fit(X)
centers = kmeans.cluster_centers_
labels = kmeans.labels_
sv= []
unique_labels = np.unique(labels).size
all_ind = np.arange(N)
for j in range(unique_labels):
X1 = X[labels == j,:]
all_ind_temp = all_ind[labels==j]
tempK = pairwise_distances(X1,np.reshape(centers[j,:],(1,X1.shape[1])))**2
inds = np.argmax(tempK,axis=0)
sv.append(all_ind_temp[inds[0]])
return sv
def renyi_select(self,X,represent_points,do_pca=False):
"""
Takes in data and number of prototype vectors and returns the indices of the prototype vectors.
The prototype vectors are selected based on maximization of quadratic renyi entropy, which can be
written in terms of log sum exp which is a tightly bounded by max operator. Now for rbf kernel,
the max_{ij}(-\|x_i-x_j\|^2) is equivalent to min_{ij}(\|x_i-x_j\|^2).
Parameters
----------
X: np.ndarray
shape = n_samples, n_features
represent_points: int
number of prototype vectors to return
do_pca: boolean
whether to perform incremental pca for dimensionality reduction before selecting prototype vectors
Returns
-------
sv: list
list of the prototype vector indices from the data array given by X
"""
# do_pca = self.do_pca_in_selection
N= X.shape[0]
capacity=represent_points
selectionset=set([])
set_full=set(list(range(N)))
np.random.seed(1)
if(len(selectionset)==0):
selectionset = np.random.permutation(N)
sv = list(selectionset)[0:capacity]
else:
extrainputs = represent_points - len(selectionset)
leftindices =list(set_full.difference(selectionset))
info = np.random.permutation(len(leftindices))
info = info[1:extrainputs]
sv = selectionset.append(leftindices[info])
if(do_pca == True):
if(X.shape[1]>50): #takes more time
n_components = 50
ipca = IncrementalPCA(n_components=n_components, batch_size=np.min([128,X.shape[0]]))
X = ipca.fit_transform(X)
svX = X[sv,:]
min_info = np.zeros((capacity,2))
KsV = pairwise_distances(svX,svX)**2 #this is fast
KsV[KsV==0] = np.inf
min_info[:,1] = np.min(KsV,axis=1)
min_info[:,0] = np.arange(capacity)
minimum = np.min(min_info[:,1])
counter = 0
for i in range(N):
# find for which data the value is minimum
replace = np.argmin(min_info[:,1])
ids = int(min_info[min_info[:,0]==replace,0])
#Subtract from totalcrit once for row
tempminimum = minimum - min_info[ids,1]
#Try to evaluate kernel function
tempsvX = np.zeros(svX.shape)
tempsvX[:] = svX[:]
inputX = X[i,:]
tempsvX[replace,:] = inputX
tempK = pairwise_distances(tempsvX,np.reshape(inputX,(1,X.shape[1])))**2 #this is fast
tempK[tempK==0] = np.inf
distance_eval = np.min(tempK)
tempminimum = tempminimum + distance_eval
if (minimum < tempminimum):
minimum = tempminimum
min_info[ids,1] = distance_eval
svX[:] = tempsvX[:]
sv[ids] = i
counter +=1
return sv
def subset_selection(self,X,Y, n_components , PV_scheme , problem_type,do_pca=False):
"""
Takes in data matrix and label matrix and generates the subset (list) of shape n_components based on the problem type
(classification or regression), prototype vector (PV) selection scheme
Parameters:
----------
X: np.array (n_samples, n_features)
data matrix
Y: np.array (n_samples)
label matrix (continuous or discrete)
PV_scheme: str
prototype vector selection scheme ('renyi' or 'kmeans')
problem_type: str
type of the problem ('classification' or 'regression')
Returns:
--------
subset: list
the index of the prototype vectors selected
"""
N = X.shape[0]
if(problem_type == 'regression'):
if(PV_scheme == 'renyi'):
subset = self.renyi_select(X,n_components,do_pca)
elif(PV_scheme == 'kmeans'):
subset = self.kmeans_select(X,n_components,do_pca)
else:
raise ValueError('Select PV_scheme between renyi and kmeans')
else:
numClasses = np.unique(Y).size
all_samples = np.arange(N)
subset=[]
subset_per_class = np.zeros((numClasses,))
class_dist = np.zeros((numClasses,))
for i in range(numClasses):
class_dist[i] = np.sum(Y == i)
subset_per_class[i] = int(np.ceil((class_dist[i]/N)*n_components))
for i in range(numClasses):
xTrain = X[Y == i,]
samples_in_class = all_samples[Y == i]
if(PV_scheme == 'renyi'):
subset1 = self.renyi_select(xTrain,int(subset_per_class[i]),do_pca)
elif(PV_scheme == 'kmeans'):
subset1 = self.kmeans_select(xTrain,int(subset_per_class[i]),do_pca)
else:
raise ValueError('Select PV_scheme between renyi and kmeans')
temp=list(samples_in_class[subset1])
subset.extend(temp)
return subset
def matrix_decomposition(self, X):
"""
Finds the matrices consisting of positive and negative parts of kernel matrix X
Parameters:
----------
X: n_samples X n_samples
Returns:
--------
K_plus: kernel corresponding to +ve part
K_minus: kernel corresponding to -ve part
"""
[D,U]=eigh(X)
U_plus = U[:,D>0.0]
U_minus = U[:,D<=0.0]
D_plus = np.diag(D[D>0.0])
D_minus = np.diag(D[D<=0.0])
K_plus = np.dot(np.dot(U_plus,D_plus),U_plus.T)
K_minus = -np.dot(np.dot(U_minus,D_minus),U_minus.T)
return K_plus, K_minus
def zero_one_normalization(self,x):
"""
perform 0-1 normalization on the data x
Parameters:
----------
x: 2d np.array (n_samples,n_features)
data matrix
Returns:
--------
x: 2d np.array (n_samples,n_features)
normalized data matrix
"""
return (x-np.min(x,axis = 0))/(np.max(x,axis = 0)-np.min(x,axis = 0))
def quantize(self,x,levels):
"""
perform the quantization of a matrix based on the number of levels given.
Data should be zero-one normalized before quantization can be applied
Parameters:
----------
x: np.ndarray
data matrix
level: int
number of levels in quantization
Returns:
-------
q: np.ndarray
quantized data matrix
"""
if(np.sum(x<0) > 0):
raise ValueError('data is not zero-one normalized')
q = np.zeros(x.shape,dtype = np.int8)
for i in range(1,levels):
q[x > 1.0*i/levels] +=1
return q
def dequantize(self,x,levels):
"""
perform the dequantization of a matrix based on the number of levels given.
data should be quantized with levels = levels
Parameters:
----------
x: np.ndarray
quantized data matrix
level: int
number of levels in quantization
Returns:
-------
q: np.ndarray
dequantized data matrix
"""
if(levels ==1):
raise ValueError('levels should be greater than 1!')
x = x/(levels-1)
return x
def labels2onehot(self,labels):
"""
performs one hot encoding on labels in range (0,levels)
Parameters:
-----------
labels: np.array (n_samples,)
labels for each sample
Returns:
-------
onehotvec: CSR matrix (n_samples,levels)
one hot vector of labels
"""
levels = np.unique(labels).size
N = labels.shape[0]
labels = labels.reshape(N,-1)
enc = OneHotEncoder(n_values= levels)
onehotvec = enc.fit_transform(labels)
return onehotvec
def onehot(self,arr,levels,issparse=False):
"""
performs one hot encoding of the quantized 2D data matrix into levels specified by user
Parameters:
----------
arr: np.array (n_samples,n_features)
data matrix
levels: int
number of one hot levels
issparse: bool
whether to output a sparse COO matrix, requires 'sparse' package
Returns:
-------
arr: np.ndarray or sparse.COO matrix
one hot encoded matrix
"""
N,M = arr.shape
arr = arr.reshape(N,-1)
enc = OneHotEncoder(n_values=levels,sparse=False,dtype = np.int8)
arr = enc.fit_transform(arr)
arr = arr.reshape(N,M,levels)
if(issparse ==True):
arr = COO.from_numpy(arr)
return arr
def onehot_minibatch(self,arr,levels):
"""
performs one hot encoding of the quantized 2D data matrix into levels specified by user
Parameters:
----------
arr: np.array (n_samples,n_features)
data matrix
levels: int
number of one hot levels
Returns:
-------
arr: csr matrix
one hot encoded matrix
"""
N,M = arr.shape
arr = arr.reshape(N,-1)
enc = OneHotEncoder(n_values=levels,sparse=False,dtype = np.int8)
arr2 = lil_matrix((N,M*levels),dtype = np.int8)
batch_sz = np.min([10000,N])
num_batches=int(np.ceil(N/batch_sz))
for j in range(num_batches):
if(j==num_batches-1):
remainder= N-batch_sz*(num_batches-1)
test_idx=np.array(range(0,remainder),dtype = np.int32)+ j*batch_sz
else:
test_idx=np.array(range(0,batch_sz),dtype = np.int32)+ j*batch_sz
arr1 = enc.fit_transform(arr[test_idx,])
arr2[test_idx,:] = arr1
arr2 = csr_matrix(arr2)
return arr2
def tempcode_minibatch(self,arr,levels):
"""
performs thermometer encoding of the one hot encoded 2D data matrix into levels specified by user
Parameters:
----------
arr: np.array (n_samples,n_features)
data matrix
levels: int
number of thermometer encoding levels
levels should be equal to the levels of arr
Returns:
-------
arr: csr matrix
one hot encoded matrix
"""
N, M1 =arr.shape
tempcode1 = lil_matrix((N,M1),dtype = np.int8)
batch_sz = np.min([10000,N])
num_batches=int(np.ceil(N/batch_sz))
for j in range(num_batches):
if(j==num_batches-1):
remainder= N-batch_sz*(num_batches-1)
test_idx=np.array(range(0,remainder),dtype = np.int32)+ j*batch_sz
else:
test_idx=np.array(range(0,batch_sz),dtype = np.int32)+ j*batch_sz
arr1 = arr[test_idx,:].toarray()
N1 = arr1.shape[0]
arr1 = np.reshape(arr1,(N1,int(M1/levels),levels))
tempcode = np.zeros(arr1.shape,dtype =np.int8)
for i in range(levels-1):
tempcode[:,:,i+1] = np.sum(arr1[:,:,:i+1],axis=2)
idx_zero = tempcode ==0
idx_one = tempcode ==1
tempcode[idx_zero ] =1
tempcode[idx_one] = 0
tempcode = np.reshape(tempcode,(N1,M1))
tempcode1[test_idx,] = tempcode
tempcode1 = csr_matrix(tempcode1)
return tempcode1
def tempcode_ICLR2018(self,arr,levels,issparse=False):
"""
performs thermometer encoding of the one hot encoded 3D data matrix into levels specified by user, as in ICLR 2018 paper
Parameters:
----------
arr: np.array (n_samples,n_features)
data matrix
levels: int
number of thermometer encoding levels
levels should be equal to the levels of arr
issparse: bool
whether to output a sparse COO matrix, requires 'sparse' package
Returns:
-------
arr: np.ndarray or sparse.COO matrix
one hot encoded matrix
"""
if(levels != arr.shape[2]):
raise ValueError('Levels specified by the user does not match the one hot encoded input')
tempcode = np.zeros(arr.shape,dtype = np.int8)
for i in range(levels):
tempcode[:,:,i] = np.sum(arr[:,:,:i+1],axis=2)
if(issparse ==True):
tempcode = COO.from_numpy(tempcode)
return tempcode
def select_(self, xTest, xTrain, kernel_type, subset, idx_features, idx_samples):
"""
selects samples and features based on indices of the data
Parameters:
----------
xTest: np.array (n_samples,n_features)
test data
xTrain: np.array (n_samples,n_features)
train data
kernel_type: 'str'
type of kernel: linear,rbf,sin,tanh,TL1,linear_primal,rff_primal,nystrom_primal
subset: list
subset of n_features
idx_features: np.array
array of indices of features
idx_samples: np.array
array of indices of samples
Returns:
-------
X1: np.array
subset of xTest
X2: np.array
subset of xTrain (None if kernel type is not linear,rbf,sin,tanh,TL1)
"""
non_linear_kernels = ['linear','rbf','sin','tanh','TL1']
if(kernel_type in non_linear_kernels):
if(len(subset) == 0):
raise ValueError('Subset cannot be of zero length!')
X2 = xTrain[idx_samples,:]
X2 = X2[:,idx_features]
X2 = X2[subset,]
X1 = xTest[:,idx_features]
else:
X1 = xTest[:,idx_features]
X2 = None
return X1, X2
def normalize_(self,xTrain, me, std):
"""
normalizes the data to have mean = me and standard_deviation = std
Parameters:
-----------
xTrain: 2D np.array (n_samples,n_features)
data matrix
me: np.array
mean of samples (n_features,)
std: np.array
standard deviations of samples (n_features,)
Returns:
-------
xTrain: 2D np.array (n_samples,n_features)
normalized data matrix
"""
idx = (std!=0.0)
xTrain[:,idx] = (xTrain[:,idx]-me[idx])/std[idx]
return xTrain
|
// Copyright 2019 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package mesh
import (
"io/ioutil"
"path/filepath"
"testing"
"istio.io/operator/pkg/object"
)
type testGroup []struct {
desc string
flags string
diffSelect string
diffIgnore string
}
func TestManifestGenerateFlags(t *testing.T) {
runTestGroup(t, testGroup{
{
desc: "all_off",
},
{
desc: "all_on",
diffIgnore: "ConfigMap:*:istio",
},
{
desc: "flag_set_values",
diffIgnore: "ConfigMap:*:istio",
flags: "-s values.global.proxy.image=myproxy",
},
// TODO: test output flag
})
}
func TestManifestGeneratePilot(t *testing.T) {
runTestGroup(t, testGroup{
{
desc: "pilot_default",
// TODO: remove istio ConfigMap
diffIgnore: "CustomResourceDefinition:*:*,ConfigMap:*:istio",
},
{
desc: "pilot_k8s_settings",
diffIgnore: "CustomResourceDefinition:*:*,ConfigMap:*:istio",
},
{
desc: "pilot_override_values",
diffSelect: "Deployment:*:istio-pilot",
},
{
desc: "pilot_override_kubernetes",
diffSelect: "Deployment:*:istio-pilot, Service:*:istio-pilot",
},
})
}
func TestManifestGenerateTelemetry(t *testing.T) {
runTestGroup(t, testGroup{
{
desc: "all_off",
},
{
desc: "telemetry_default",
diffIgnore: "",
},
{
desc: "telemetry_k8s_settings",
diffSelect: "Deployment:*:istio-telemetry, HorizontalPodAutoscaler:*:istio-telemetry",
},
{
desc: "telemetry_override_values",
diffSelect: "handler:*:prometheus",
},
{
desc: "telemetry_override_kubernetes",
diffSelect: "Deployment:*:istio-telemetry, handler:*:prometheus",
},
})
}
func TestManifestGenerateOrdered(t *testing.T) {
// Since this is testing the special case of stable YAML output order, it
// does not use the established test group pattern
t.Run("stable_manifest", func(t *testing.T) {
inPath := filepath.Join(testDataDir, "input", "all_on.yaml")
got1, err := runManifestGenerate(inPath, "")
if err != nil {
t.Fatal(err)
}
got2, err := runManifestGenerate(inPath, "")
if err != nil {
t.Fatal(err)
}
if got1 != got2 {
t.Errorf("stable_manifest: Manifest generation is not producing stable text output.")
}
})
}
func runTestGroup(t *testing.T, tests testGroup) {
testDataDir = filepath.Join(repoRootDir, "cmd/mesh/testdata/manifest-generate")
for _, tt := range tests {
t.Run(tt.desc, func(t *testing.T) {
inPath := filepath.Join(testDataDir, "input", tt.desc+".yaml")
outPath := filepath.Join(testDataDir, "output", tt.desc+".yaml")
got, err := runManifestGenerate(inPath, tt.flags)
if err != nil {
t.Fatal(err)
}
if refreshGoldenFiles() {
t.Logf("Refreshing golden file for %s", outPath)
if err := ioutil.WriteFile(outPath, []byte(got), 0644); err != nil {
t.Error(err)
}
}
want, err := readFile(outPath)
if err != nil {
t.Fatal(err)
}
diffSelect := "*:*:*"
if tt.diffSelect != "" {
diffSelect = tt.diffSelect
}
for _, v := range []bool{true, false} {
diff, err := object.ManifestDiffWithSelectAndIgnore(got, want, diffSelect, tt.diffIgnore, v)
if err != nil {
t.Fatal(err)
}
if diff != "" {
t.Errorf("%s: got:\n%s\nwant:\n%s\n(-got, +want)\n%s\n", tt.desc, "", "", diff)
}
}
})
}
}
// runManifestGenerate runs the manifest generate command. If flags is not set, passes the given path as a -f flag,
// otherwise flags is passed to the command verbatim. Both path and flags should not be simultaneously set.
func runManifestGenerate(path, flags string) (string, error) {
args := "manifest generate " + flags
if flags == "" {
args += "-f " + path
}
return runCommand(args)
}
|
#!/bin/bash
#
# Copyright IBM Corp. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
# This script is designed to be run in the CSP3cli container as the
# first step of the EYFN tutorial. It creates and submits a
# configuration transaction to add CSP3 to the cloud network
#
CHANNEL_NAME="$1"
DELAY="$2"
TIMEOUT="$3"
VERBOSE="$4"
: ${CHANNEL_NAME:="cloudchannel"}
: ${DELAY:="3"}
: ${TIMEOUT:="10"}
: ${VERBOSE:="false"}
COUNTER=1
MAX_RETRY=5
# import environment variables
. scripts/CSP3-scripts/envVarCLI.sh
# fetchChannelConfig <channel_id> <output_json>
# Writes the current channel config for a given channel to a JSON file
fetchChannelConfig() {
ORG=$1
CHANNEL=$2
OUTPUT=$3
setOrdererGlobals
setGlobals $ORG
echo "Fetching the most recent configuration block for the channel"
set -x
peer channel fetch config config_block.pb -o orderer.cloud.com:7050 --ordererTLSHostnameOverride orderer.cloud.com -c $CHANNEL --tls --cafile $ORDERER_CA
{ set +x; } 2>/dev/null
echo "Decoding config block to JSON and isolating config to ${OUTPUT}"
set -x
configtxlator proto_decode --input config_block.pb --type common.Block | jq .data.data[0].payload.data.config >"${OUTPUT}"
{ set +x; } 2>/dev/null
}
# createConfigUpdate <channel_id> <original_config.json> <modified_config.json> <output.pb>
# Takes an original and modified config, and produces the config update tx
# which transitions between the two
createConfigUpdate() {
CHANNEL=$1
ORIGINAL=$2
MODIFIED=$3
OUTPUT=$4
set -x
configtxlator proto_encode --input "${ORIGINAL}" --type common.Config >original_config.pb
configtxlator proto_encode --input "${MODIFIED}" --type common.Config >modified_config.pb
configtxlator compute_update --channel_id "${CHANNEL}" --original original_config.pb --updated modified_config.pb >config_update.pb
configtxlator proto_decode --input config_update.pb --type common.ConfigUpdate >config_update.json
echo '{"payload":{"header":{"channel_header":{"channel_id":"'$CHANNEL'", "type":2}},"data":{"config_update":'$(cat config_update.json)'}}}' | jq . >config_update_in_envelope.json
configtxlator proto_encode --input config_update_in_envelope.json --type common.Envelope >"${OUTPUT}"
{ set +x; } 2>/dev/null
}
# signConfigtxAsPeerOrg <org> <configtx.pb>
# Set the peerOrg admin of an org and signing the config update
signConfigtxAsPeerOrg() {
PEERORG=$1
TX=$2
setGlobals $PEERORG
set -x
peer channel signconfigtx -f "${TX}"
{ set +x; } 2>/dev/null
}
echo
echo "========= Creating config transaction to add CSP3 to network =========== "
echo
# Fetch the config for the channel, writing it to config.json
fetchChannelConfig 1 ${CHANNEL_NAME} config.json
# Modify the configuration to append the new org
set -x
jq -s '.[0] * {"channel_group":{"groups":{"Application":{"groups": {"CSP3MSP":.[1]}}}}}' config.json ./organizations/peerOrganizations/CSP3.cloud.com/CSP3.json > modified_config.json
{ set +x; } 2>/dev/null
# Compute a config update, based on the differences between config.json and modified_config.json, write it as a transaction to CSP3_update_in_envelope.pb
createConfigUpdate ${CHANNEL_NAME} config.json modified_config.json CSP3_update_in_envelope.pb
echo
echo "========= Config transaction to add CSP3 to network created ===== "
echo
echo "Signing config transaction"
echo
signConfigtxAsPeerOrg 1 CSP3_update_in_envelope.pb
echo
echo "========= Submitting transaction from a different peer (peer0.CSP2) which also signs it ========= "
echo
setGlobals 2
set -x
peer channel update -f CSP3_update_in_envelope.pb -c ${CHANNEL_NAME} -o orderer.cloud.com:7050 --ordererTLSHostnameOverride orderer.cloud.com --tls --cafile ${ORDERER_CA}
{ set +x; } 2>/dev/null
echo
echo "========= Config transaction to add CSP3 to network submitted! =========== "
echo
exit 0
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 <NAME> All rights reserved.
#
"""Initializing a defaultdict.
"""
__version__ = "$Id$"
#end_pymotw_header
import collections
def default_factory():
return 'default value'
d = collections.defaultdict(default_factory, foo='bar')
print 'd:', d
print 'foo =>', d['foo']
print 'bar =>', d['bar']
|
# Copyright 2019 <NAME>
# Pycoin
# 11 February 2019
# transaction.py
from hashlib import sha256
from tools import tobytes, getbytes
class Transaction:
def __init__(self, origins = [], destinations = []):
# origins are tuples of (tx hashes, and addresses)
# destinations are tuples of (address, and amounts)
self.origins = origins
self.destinations = destinations
# Signatures are (public x, public y, r, q)
self.signatures = []
self.hash = None
def serialize_unsigned(self):
# start a byte sequence
serial = tobytes(0,0)
serial += tobytes(len(self.origins), 1)
serial += tobytes(len(self.destinations), 1)
for origin in self.origins:
# 64 bytes for each input
serial += tobytes(origin[0], 32) # tx hash of origin
serial += tobytes(origin[1], 32) # public x of origin
for destination in self.destinations:
# 64 bytes for each output
serial += tobytes(destination[0], 32) # public x of destination
serial += tobytes(destination[1], 32) # amount in units
return serial
def serialize(self):
serial = self.serialize_unsigned()
# 1 bytes for num of signatures
serial += tobytes(len(self.signatures),1)
for signature in self.signatures:
serial += tobytes(signature[0], 32) # Public x
serial += tobytes(signature[1], 32) # Public y
# Signatures part
serial += tobytes(signature[2], 32) # r
serial += tobytes(signature[3], 32) # q
return serial
def from_serial(self, message):
(nInputs, message) = getbytes(1, message)
(nOutputs, message) = getbytes(1, message)
for i in range(nInputs):
(txid, message) = getbytes(32, message)
(address, message) = getbytes(32, message)
self.origins.append((txid, address))
for i in range(nOutputs):
(address, message) = getbytes(32, message)
(value, message) = getbytes(32, message)
self.destinations.append((address, value))
(nSignatures, message) = getbytes(1, message)
for i in range(nSignatures):
(px, message) = getbytes(4, message)
(py, message) = getbytes(4, message)
(r, message) = getbytes(4, message)
(q, message) = getbytes(4, message)
self.signatures((px, py, r, q))
# Signatures
def get_unsigned_hash(self):
m = self.serialize_unsigned()
hash = sha256()
hash.update(m)
return hash.hexdigest()
|
#!/usr/bin/env bash
#
# MIT License
#
# Copyright (c) 2017-2018 Vernier Software & Technology
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
set -e
test -n "$DEBUG" && set -x
# Static config
readonly REPO="git@github.org:sample/projectcheckout.git"
readonly CLONE_DIR="$CIRCLE_WORKING_DIR/../project"
readonly BUILD_INFO_BRANCH="build-info"
readonly BUILD_BUMP_SCRIPT_CMD="./build/official/bump-build-numbers.py"
readonly BUILD_BUMP_SCRIPT_LOG="/tmp/bump-build-numbers-LOG.txt"
readonly PRIV_SSH_KEY_ENC="$CIRCLE_WORKING_DIR/creds/encrypted"
readonly PRIV_SSH_KEY_DEC="$CIRCLE_WORKING_DIR/creds/decrypted"
# Note: these cannot be be changed without also changing the official build
# logic in the build/official scripts in the main repo, as well.
readonly BUILDER_GIT_USER="Builder"
readonly BUILDER_GIT_EMAIL="builds@example.com"
readonly KICKOFF_BUILD_MARKER_FILE="KICKOFF_OFFICIAL_BUILD.txt"
function strip_string() {
echo "$1" | sed -e 's:^[[:space:]]*::' | sed -e 's:[[:space:]]*$::'
}
# THIS IS IMPORTANT:
#
# We need this so the git commands below use the ssh key in the
# official-build repo (which has permissions to commit and push back
# to sample), NOT the CircleCI deploy key that's associated with this
# repository.
#
# The important part is that we export this so that the python script below
# (which calls git itself) uses the correct ssh key. Otherwise, when it pushes,
# it will fail.
export GIT_SSH_COMMAND="ssh -i $PRIV_SSH_KEY_DEC"
# Runtime config
build_info_branch="UNKNOWN"
# Is this an official build request?
official_build_request=0
# Should we kick off a build; this is different than the above variable in that
# there are various reasons we'd kick off a build, though not all of them are
# official build "requests"
kickoff_official_build=0
# Additional context for the commit message; this _should_ ALWAYS get filled in
# below
build_reason="UNKNOWN"
# main(), as it were
# Determine if we even need to kick off a build; converted to pseudo-code,
# the logic below roughly is:
#
# if (this is a nightly build job) {
# kickoff_build_flag = true
# } else {
# if (list of modified files is ONLY the kickoff build marker) {
# kickoff_build_flag = true
# official_build_request_flag = true
# } else {
# if (the commit message has "[do official]" in it somewhere) {
# kickoff_build_flag = true
# official_build_request_flag = true
# }
# }
# }
#
if test -n "$(echo "$CIRCLE_STAGE" | grep '^nightly_build')"; then
kickoff_official_build=1
build_reason="Request source: scheduled nightly build"
if test -z "$BUILD_BRANCH"; then
echo 'Mis-configured nightly official build circle.yml (missing branch); bailing.' >&2
exit 1
fi
build_info_branch="$BUILD_BRANCH"
else
# This isn't an official build run triggered by the nightly automated setup.
# So, check to see if it's a forced-build request.
last_commit_modified_files="$(cd $CIRCLE_WORKING_DIRECTORY && git log --format=short --pretty="format:" --name-only -n 1 HEAD)"
last_commit_author="$(cd $CIRCLE_WORKING_DIRECTORY && git log --format='%ce' -n 1 HEAD)"
last_commit_subject="$(cd $CIRCLE_WORKING_DIRECTORY && git log --format='%s' -n 1 HEAD)"
build_reason="Request source: ${last_commit_author}; reason: $last_commit_subject"
echo "Detected last-commit file list: $last_commit_modified_files"
if [[ "$last_commit_modified_files" == "$KICKOFF_BUILD_MARKER_FILE" ]]; then
# If this is a build request, then kick off a build
kickoff_official_build=1
official_build_request=1
# The last non-commented line of the kickoff build marker is what we
# consider "the branch to build"; see request-official-build.sh for more
# info
build_info_branch="$(cat $KICKOFF_BUILD_MARKER_FILE | grep -v '^#' | tail -n 1)"
build_info_branch="$(strip_string "$build_info_branch")"
# We only support official builds on master and release/ branches right
# now, so validate that
if [[ "$build_info_branch" != "master" ]] && [[ -z "$(echo "$build_info_branch" | grep '^release\/.')" ]]; then
echo 'Official builds may only be requested on the master and release/ branches.' >&2
exit 1
fi
else
last_commit_full_commit_message="$(cd $CIRCLE_WORKING_DIR && git log --format='%s %b' -n 1 HEAD)"
if test -n "$(echo "$last_commit_full_commit_message" | grep -i '\[do kickoff\]')"; then
# We're not an automated build kickoff, nor are we an official build
# request, but we've been told to kick off a build via a commit.
#
# This is mostly to be used in cases where we want a merge of an
# MR to _also_ kick off an official build (which we won't do by
# default.)
kickoff_official_build=1
official_build_request=1
# For now, in these cases, always build master; we may decide to
# change this in the future...
build_info_branch="master"
fi
fi
fi
if [[ "$kickoff_official_build" -eq 0 ]]; then
echo "Neither nightly automated build job, official build request marker change, or merge request test detected." >&2
echo "NOT SPAWNING OFFICIAL BUILD." >&2
exit 0
fi
echo "Attempting to build branch: $build_info_branch"
# -x needs to be turned off here to protect the key
set +x
openssl aes-256-cbc -d -in $PRIV_SSH_KEY_ENC -out $PRIV_SSH_KEY_DEC -k "$PASSWORD"
set -x
# Set permissions, as required by ssh
chmod 0600 $PRIV_SSH_KEY_DEC
# After decrypting the key, because this script is basically the entire
# official build job, set command tracing on, so the log shows what was done.
#
# (This is similar to what we do with circle.yml in the main repo, since all the
# real job logic is in the scripts, not the yml directly.)
set -x
# Show the version of git we're using
git --version
# We check out the specific branch, with a limited depth for performance
# reasons. However, because we manipulate the build-info branch in this clone,
# we also need to fetch it separately; we do that a couple of lines down.
git clone --depth 5 -b $build_info_branch $REPO $CLONE_DIR
cd $CLONE_DIR
# See the comment above the git clone for an explanation of this.
git remote set-branches --add origin $BUILD_INFO_BRANCH
git fetch origin $BUILD_INFO_BRANCH
git config user.name "$BUILDER_GIT_USER"
git config user.email "$BUILDER_GIT_EMAIL"
# This script can return an error to indicate that an official build should not
# be performed. It originally did this to halt the build (so a superfluous build
# was not performed.
#
# So, turn off exit-on-error. But also, capture the output; and if it fails for
# that specific reason, print that to the log, but also return success, so the
# job isn't red.
#
# Of course, the script can fail for other (legit) reasons, and if that's the
# case, then mark the job red.
set +e
# Also, disable execution tracing here, mostly because everything below is
# status reporting; with -x set, we get the output strings in the log multiple
# times, which is hard to read.
set +x
# If this build was the result of a build request, then pass --force to the
# version bump script, so it will ignore whether or not there were previous
# commits and will just kickoff a new official build.
if [[ "$official_build_request" -eq 1 ]]; then
additional_build_bump_script_args="--force"
else
additional_build_bump_script_args=""
fi
$BUILD_BUMP_SCRIPT_CMD -m "$build_reason" $additional_build_bump_script_args 2>&1 | tee $BUILD_BUMP_SCRIPT_LOG
bump_rv=${PIPESTATUS[0]}
echo "$BUILD_BUMP_SCRIPT_CMD rv: $bump_rv"
set -e
# Only perform this check is this _wasn't_ a manually-request official build
if [[ "$official_build_request" -eq 0 ]]; then
no_checkins_mesg="$(cat $BUILD_BUMP_SCRIPT_LOG | grep 'No checkins since last')"
else
no_checkins_mesg=""
fi
if test -n "$no_checkins_mesg"; then
exit 0
else
exit $bump_rv
fi
|
<filename>node_modules/applicationinsights/out/AutoCollection/NetworkStatsbeat.d.ts
export declare class NetworkStatsbeat {
time: number;
lastTime: number;
endpoint: number;
host: string;
totalRequestCount: number;
lastRequestCount: number;
totalSuccesfulRequestCount: number;
totalFailedRequestCount: number;
retryCount: number;
exceptionCount: number;
throttleCount: number;
intervalRequestExecutionTime: number;
lastIntervalRequestExecutionTime: number;
constructor(endpoint: number, host: string);
}
|
# listaa plottaa kuvei:n avulla vain valmiit tapaukset
a="";for i in $(${SCRIPT}/check_emulaattorisetin_status.bash /lustre/tmp/aholaj/UCLALES-SALSA/case_emulator_DESIGN_v3.3.1_LES_ECLAIR_Jaakko.ECLAIRv2.0.cray.fast_LVL4 | grep VALMIS | cut -c1-7 ); do a="$a ${i}/${i}"; done; echo $a |
'use strict'
const { test } = require('ava')
const fs = require('fs')
const path = require('path')
const exiftool = require('.')
const fixturesPath = path.join(__dirname, `media`)
const keyBlacklist = new Set([
'directory',
'sourceFile',
'exifToolVersion',
'fileModifyDate',
'fileAccessDate',
'fileInodeChangeDate',
'filePermissions',
'megapixels'
])
function filter (result) {
return Object
.entries(result)
.filter(([ key, value ]) => !keyBlacklist.has(key))
.reduce((acc, [ key, value ]) => {
acc[key] = value
return acc
}, { })
}
fs.readdirSync(fixturesPath)
.filter((filename) => {
return /[^.]+\.[^.]+$/.test(filename)
})
.forEach((filename) => {
const input = path.join(fixturesPath, filename)
test(`${filename} metadata`, async (t) => {
const metadata = await exiftool(input)
const filtered = filter(metadata)
console.log(JSON.stringify(filtered, null, 2))
t.snapshot(filtered)
})
})
|
SELECT customer_id, COUNT(*) as total_payments
FROM payments
GROUP BY customer_id
ORDER BY total_payments DESC |
import { Shield } from "../abstracts";
import { RouteHandler } from "../handlers";
import { GenericShield } from "../generics";
// tslint:disable-next-line
export const Shields = (...shields: Array<typeof Shield>): ClassDecorator => {
return (target: any) => {
const className = target.name;
RouteHandler.addShields(shields as Array<typeof GenericShield>, className);
};
}; |
export type PropertyKeys<T> = {
[K in Extract<keyof T, string>]: T[K] extends (
...args: readonly unknown[]
) => void
? never
: K;
}[Extract<keyof T, string>];
|
package com.wish.jsoup;
import com.wish.entity.Flower;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* @Description: Jsoup基础学习
* @author handx <EMAIL>
* @date 2017年5月11日 下午2:48:42
*
*/
public class MyJsoup2 {
public static void main(String[] args) throws InterruptedException {
List<Flower> flowers = new ArrayList<Flower>();
try {
Document doc = Jsoup.connect("http://www.aihuhua.com/baike/sugen/").get();
Elements elementsByClass = doc.getElementsByClass("cate_list");
// 要给【FlowerCategory】修改,添加简介说明
String cateName = elementsByClass.select("h1").text();
// System.out.println("简介:" +
// elementsByClass.select(".cont").text());
Elements firstList = elementsByClass.select(".list");
Elements elements = firstList.select("li");
for (Element element : elements) {
Flower flower = new Flower();
flower.setName(element.select("a").attr("title"));
flower.setNick(element.select("label").last().text());
flower.setUrl(element.select("a").attr("href"));
flower.setImgPath(element.select("img").attr("src"));
flowers.add(flower);
}
System.out.println("获取成功,正在解析类别为【" + cateName + "】");
// 如果有分页,循环解析
int isPagination = elementsByClass.select(".pagination a").size();
if (isPagination > 0) {
String paginationText = elementsByClass.select(".pagination a")
.eq(elementsByClass.select(".pagination a").size() - 2).text();
int paginationTotal;
if (isPagination > 7) {
paginationTotal = Integer.parseInt(paginationText.substring(2, paginationText.length()));
} else {
paginationTotal = Integer.parseInt(paginationText);
}
String urlCate = elementsByClass.select(".pagination a").eq(1).attr("href").split("/")[2];
List<String> urls = new ArrayList<String>();
System.out.println("开始解析分页路径....");
for (int i = 1; i < paginationTotal; i++) {
String url = "http://www.aihuhua.com/baike/" + urlCate + "/page-" + i + ".html";
urls.add(url);
}
System.out.println("路径解析完毕,共" + paginationTotal + "页,开始爬取...");
for (String url : urls) {
doc = Jsoup.connect(url).get();
elementsByClass = doc.getElementsByClass("cate_list");
firstList = elementsByClass.select(".list");
elements = firstList.select("li");
for (Element element : elements) {
Flower flower = new Flower();
flower.setName(element.select("a").attr("title"));
flower.setNick(element.select("label").last().text());
flower.setUrl(element.select("a").attr("href"));
flower.setImgPath(element.select("img").attr("src"));
flowers.add(flower);
}
}
System.out.println(cateName + "解析完毕!");
}
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("解析完毕,共解析数据:" + flowers.size() + "条。");
}
}
|
def map_write(stream_in, stream_out, function, *args):
while True:
try:
data = stream_in.read()
if not data:
break
result = function(data, *args)
stream_out.write(result)
stream_out.flush()
except IOError:
try:
stream_in.close()
except IOError:
pass
try:
stream_out.close()
except IOError:
pass |
public class DoublyLinkedList {
Node head;
// Node class
class Node {
int data;
Node next;
Node prev;
// Constructor to create a new node
// next and prev is by default initialized as null
Node(int d) { data = d; }
}
// Adding a node at the front of the list
public void push(int new_data)
{
//1 & 2: Allocate the Node & Put in the data
Node new_node = new Node(new_data);
//3. Make next of new node as head and previous as NULL
new_node.next = head;
new_node.prev = null;
//4. change prev of head node to new node
if (head != null)
head.prev = new_node;
//5. move the head to point to the new node
head = new_node;
}
// Function to print nodes in a given doubly linked list
public void printList(Node node)
{
while (node != null) {
System.out.print(node.data + " ");
node = node.next;
}
}
// Driver program of the above program
public static void main(String[] args)
{
DoublyLinkedList dll = new DoublyLinkedList();
// Insert 6. So linked list becomes 6->NULL
dll.push(6);
// Insert 7 at the beginning. So linked list becomes 7->6->NULL
dll.push(7);
// Insert 1 at the beginning. So linked list becomes 1->7->6->NULL
dll.push(1);
// Insert 4 at the end. So linked list becomes 1->7->6->4->NULL
dll.append(4);
System.out.println("Created DLL is: ");
dll.printList(dll.head);
}
} |
def perform_image_cropping(img, top_left_pix, rc, raw_width, raw_height):
bottom_right_pix = [int(rc[1][0] * raw_width), int(rc[1][1] * raw_height)]
img_cropped = img[top_left_pix[1]:bottom_right_pix[1], top_left_pix[0]:bottom_right_pix[0]]
return img_cropped |
#!/usr/bin/env bash
set -euo pipefail
### Functions
stop_services() {
kill -INT $PID
kill $WH_PID
}
wait_for_port() {
local PORT=$1
echo "waiting for $PORT"
for _ in $(seq 1 240);
do
nc -z localhost $PORT && echo "port $PORT is ready" && return
echo -n .
sleep 0.25
done
echo "Failed waiting for $PORT" && exit 1
}
init_jwt() {
CUR_DIR="$PWD"
mkdir -p "$OUTPUT_FOLDER/ssl"
cd "$OUTPUT_FOLDER/ssl"
openssl genrsa -out jwt_private.key 2048
openssl rsa -pubout -in jwt_private.key -out jwt_public.key
cd "$CUR_DIR"
}
init_ssl() {
CUR_DIR="$PWD"
mkdir -p "$OUTPUT_FOLDER/ssl"
cd "$OUTPUT_FOLDER/ssl"
CNF_TEMPLATE='[req]
req_extensions = v3_req
distinguished_name = req_distinguished_name
[req_distinguished_name]
[ v3_req ]
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
subjectAltName = @alt_names
[alt_names]
DNS.1 = localhost
IP.1 = 127.0.0.1'
echo "$CNF_TEMPLATE" > webhook-req.cnf
openssl genrsa -out ca-key.pem 2048
openssl req -x509 -new -nodes -key ca-key.pem -days 10 -out ca.pem -subj "/CN=webhook-ca"
openssl genrsa -out webhook-key.pem 2048
openssl req -new -key webhook-key.pem -out webhook.csr -subj "/CN=hge-webhook" -config webhook-req.cnf
openssl x509 -req -in webhook.csr -CA ca.pem -CAkey ca-key.pem -CAcreateserial -out webhook.pem -days 10 -extensions v3_req -extfile webhook-req.cnf
cp ca.pem /etc/ssl/certs/webhook.crt
update-ca-certificates
cd "$CUR_DIR"
}
combine_hpc_reports() {
(stack --allow-different-user exec -- hpc combine graphql-engine.tix graphql-engine-combined.tix --union > graphql-engine-combined.tix2 && mv graphql-engine-combined.tix2 graphql-engine-combined.tix ) || true
rm graphql-engine.tix || true
}
kill_hge_and_combine_hpc_reports() {
kill -INT $PID
wait $PID || true
combine_hpc_reports
}
if [ -z "${HASURA_GRAPHQL_DATABASE_URL:-}" ] ; then
echo "Env var HASURA_GRAPHQL_DATABASE_URL is not set"
exit 1
fi
if ! stack --allow-different-user exec which hpc ; then
echo "hpc not found; Install it with 'stack install hpc'"
exit 1
fi
CIRCLECI_FOLDER="${BASH_SOURCE[0]%/*}"
cd $CIRCLECI_FOLDER
CIRCLECI_FOLDER="$PWD"
if ! $CIRCLECI_FOLDER/test-server-flags.sh ; then
echo "Testing GraphQL server flags failed"
exit 1
fi
if ! $CIRCLECI_FOLDER/test-deprecated-server-flags.sh ; then
echo "Testing GraphQL deprecated server flags failed"
exit 1
fi
PYTEST_ROOT="$CIRCLECI_FOLDER/../server/tests-py"
OUTPUT_FOLDER=${OUTPUT_FOLDER:-"$CIRCLECI_FOLDER/test-server-output"}
mkdir -p "$OUTPUT_FOLDER"
cd $PYTEST_ROOT
if ! stack --allow-different-user exec -- which graphql-engine > /dev/null && [ -z "${GRAPHQL_ENGINE:-}" ] ; then
echo "Do 'stack build' before tests, or export the location of executable in the GRAPHQL_ENGINE envirnoment variable"
exit 1
fi
GRAPHQL_ENGINE=${GRAPHQL_ENGINE:-"$(stack --allow-different-user exec -- which graphql-engine)"}
if ! [ -x "$GRAPHQL_ENGINE" ] ; then
echo "$GRAPHQL_ENGINE is not present or is not an executable"
exit 1
fi
RUN_WEBHOOK_TESTS=true
echo -e "\nINFO: GraphQL Executable : $GRAPHQL_ENGINE"
echo -e "INFO: Logs Folder : $OUTPUT_FOLDER\n"
pip3 install -r requirements.txt
mkdir -p "$OUTPUT_FOLDER"
export EVENT_WEBHOOK_HEADER="MyEnvValue"
export HGE_URL="http://localhost:8080"
export WEBHOOK_FROM_ENV="http://127.0.0.1:5592"
export HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES=true
PID=""
WH_PID=""
HS_PID=""
trap stop_services ERR
trap stop_services INT
echo -e "\n<########## TEST GRAPHQL-ENGINE WITHOUT ADMIN SECRET ###########################################>\n"
"$GRAPHQL_ENGINE" serve > "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL"
kill -INT $PID
sleep 4
mv graphql-engine.tix graphql-engine-combined.tix || true
##########
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET #####################################>\n"
export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET"
kill_hge_and_combine_hpc_reports
##########
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT #####################################>\n"
init_jwt
export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key }')"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET"
kill_hge_and_combine_hpc_reports
unset HASURA_GRAPHQL_JWT_SECRET
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET AND JWT (in stringified mode) #####################################>\n"
export HASURA_GRAPHQL_JWT_SECRET="$(jq -n --arg key "$(cat $OUTPUT_FOLDER/ssl/jwt_public.key)" '{ type: "RS512", key: $key , claims_format: "stringified_json"}')"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" & PID=$!
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-jwt-key-file="$OUTPUT_FOLDER/ssl/jwt_private.key" --hge-jwt-conf="$HASURA_GRAPHQL_JWT_SECRET" test_jwt.py
kill_hge_and_combine_hpc_reports
unset HASURA_GRAPHQL_JWT_SECRET
# test with CORS modes
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH CORS DOMAINS ########>\n"
export HASURA_GRAPHQL_CORS_DOMAIN="http://*.localhost, http://localhost:3000, https://*.foo.bar.com"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-cors test_cors.py
kill_hge_and_combine_hpc_reports
unset HASURA_GRAPHQL_CORS_DOMAIN
# test websocket transport with initial cookie header
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH COOKIE IN WEBSOCKET INIT ########>\n"
export HASURA_GRAPHQL_AUTH_HOOK="http://localhost:9876/auth"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
python3 test_cookie_webhook.py > "$OUTPUT_FOLDER/cookie_webhook.log" 2>&1 & WHC_PID=$!
wait_for_port 9876
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
echo "testcase 1: read cookie, cors enabled"
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py
kill -INT $PID
sleep 1
echo "testcase 2: no read cookie, cors disabled"
"$GRAPHQL_ENGINE" serve --disable-cors >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=noread test_websocket_init_cookie.py
kill -INT $PID
sleep 1
echo "testcase 3: read cookie, cors disabled and ws-read-cookie"
export HASURA_GRAPHQL_WS_READ_COOKIE="true"
"$GRAPHQL_ENGINE" serve --disable-cors >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-ws-init-cookie=read test_websocket_init_cookie.py
kill -INT $PID
kill -INT $WHC_PID
unset HASURA_GRAPHQL_WS_READ_COOKIE
unset HASURA_GRAPHQL_AUTH_HOOK
unset HASURA_GRAPHQL_AUTH_HOOK_MODE
sleep 4
combine_hpc_reports
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH GRAPHQL DISABLED ########>\n"
export HASURA_GRAPHQL_ENABLED_APIS="metadata"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
unset HASURA_GRAPHQL_ENABLED_APIS
"$GRAPHQL_ENGINE" serve --enabled-apis metadata >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-graphql-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH METADATA DISABLED ########>\n"
export HASURA_GRAPHQL_ENABLED_APIS="graphql"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
unset HASURA_GRAPHQL_ENABLED_APIS
"$GRAPHQL_ENGINE" serve --enabled-apis graphql >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-metadata-disabled test_apis_disabled.py
kill_hge_and_combine_hpc_reports
# webhook tests
if [ $EUID != 0 ] ; then
echo -e "SKIPPING webhook based tests, as \nroot permission is required for running webhook tests (inorder to trust certificate authority)."
RUN_WEBHOOK_TESTS=false
fi
if [ "$RUN_WEBHOOK_TESTS" == "true" ] ; then
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (GET) #########################>\n"
export HASURA_GRAPHQL_AUTH_HOOK="https://localhost:9090/"
init_ssl
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
python3 webhook.py 9090 "$OUTPUT_FOLDER/ssl/webhook-key.pem" "$OUTPUT_FOLDER/ssl/webhook.pem" > "$OUTPUT_FOLDER/webhook.log" 2>&1 & WH_PID=$!
wait_for_port 8080
wait_for_port 9090
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK"
kill_hge_and_combine_hpc_reports
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & WEBHOOK (POST) #########################>\n"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK"
rm /etc/ssl/certs/webhook.crt
update-ca-certificates
kill_hge_and_combine_hpc_reports
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN SECRET & HTTPS INSECURE WEBHOOK (GET) ########>\n"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="GET"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py
kill_hge_and_combine_hpc_reports
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH ADMIN_SECRET & HTTPS INSECURE WEBHOOK (POST) ########>\n"
export HASURA_GRAPHQL_AUTH_HOOK_MODE="POST"
"$GRAPHQL_ENGINE" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --hge-webhook="$HASURA_GRAPHQL_AUTH_HOOK" --test-webhook-insecure test_webhook_insecure.py
kill_hge_and_combine_hpc_reports
kill $WH_PID
fi
# horizontal scale test
unset HASURA_GRAPHQL_AUTH_HOOK
unset HASURA_GRAPHQL_AUTH_HOOK_MODE
unset HASURA_GRAPHQL_ADMIN_SECRET
echo -e "\n<########## TEST GRAPHQL-ENGINE WITH HORIZONTAL SCALING ########>\n"
HASURA_HS_TEST_DB='postgres://postgres:postgres@localhost:6543/hs_hge_test'
psql "$HASURA_GRAPHQL_DATABASE_URL" -c "create database hs_hge_test;"
# create pgbouncer user
useradd pgbouncer
cd $CIRCLECI_FOLDER
chown -R pgbouncer:pgbouncer pgbouncer
# start pgbouncer
pgbouncer -u pgbouncer -d pgbouncer/pgbouncer.ini
cd $PYTEST_ROOT
# start 1st server
"$GRAPHQL_ENGINE" --database-url "$HASURA_HS_TEST_DB" serve >> "$OUTPUT_FOLDER/graphql-engine.log" 2>&1 & PID=$!
wait_for_port 8080
# start 2nd server
"$GRAPHQL_ENGINE" --database-url "$HASURA_HS_TEST_DB" serve \
--server-port 8081 \
>> "$OUTPUT_FOLDER/hs-graphql-engine.log" 2>&1 & HS_PID=$!
wait_for_port 8081
# run test
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py
# Shutdown pgbouncer
psql "postgres://postgres:postgres@localhost:6543/pgbouncer" -c "SHUTDOWN;" || true
cd $CIRCLECI_FOLDER
# start pgbouncer again
pgbouncer -u pgbouncer -d pgbouncer/pgbouncer.ini
cd $PYTEST_ROOT
# sleep for 30 seconds
sleep 30
# run test
pytest -vv --hge-url="$HGE_URL" --pg-url="$HASURA_GRAPHQL_DATABASE_URL" --test-hge-scale-url="http://localhost:8081" test_horizontal_scale.py
# Shutdown pgbouncer
psql "postgres://postgres:postgres@localhost:6543/pgbouncer" -c "SHUTDOWN;" || true
kill $PID
kill $HS_PID
psql "$HASURA_GRAPHQL_DATABASE_URL" -c "drop database hs_hge_test;"
sleep 4
combine_hpc_reports
unset HASURA_HS_TEST_DB
# end horizontal scale test
mv graphql-engine-combined.tix "$OUTPUT_FOLDER/graphql-engine.tix" || true
|
//model
var mongoose = require('mongoose');
var NodeSchema = new mongoose.Schema({
name:String,
gender: {type:String, default:"f"},
created_at:Date,
friends: [{
type: mongoose.Schema.Types.ObjectId, ref: 'nodes'
}],
});
mongoose.model('nodes',NodeSchema); |
#!/bin/bash
# Copyright 2016 - 2021 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source ${CCPROOT}/examples/common.sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# This var lets us change the image to gis by setting
# CCP_PG_IMAGE='-gis'.
export CCP_PG_IMAGE=${CCP_PG_IMAGE:-}
cat $DIR/backrest-restored.json | envsubst | ${CCP_CLI?} create --namespace=${CCP_NAMESPACE?} -f -
|
// SPDX-License-Identifier: MIT
package syntax
import (
"context"
"net/http"
"strconv"
"sync"
"github.com/issue9/mux/v5/params"
)
// 每次申请 Params.Params 分配的大小
const defaultParamsCap = 5
var paramsPool = &sync.Pool{
New: func() any { return &Params{Params: make([]Param, 0, defaultParamsCap)} },
}
const contextKeyParams contextKey = 0
type contextKey int
// Params 路由参数
//
// 实现了 params.Params 接口
type Params struct {
Path string // 这是在 Segment.Match 中用到的路径信息。
Params []Param // 实际需要传递的参数
}
type Param struct {
K, V string // 如果 K 为空,则表示该参数已经被删除。
}
func NewParams(path string) *Params {
ps := paramsPool.Get().(*Params)
ps.Path = path
ps.Params = ps.Params[:0]
return ps
}
func (p *Params) Destroy() {
if p != nil {
paramsPool.Put(p)
}
}
// GetParams 获取当前请求实例上的参数列表
func GetParams(r *http.Request) *Params {
if ps := r.Context().Value(contextKeyParams); ps != nil {
return ps.(*Params)
}
return nil
}
// WithValue 将参数 ps 附加在 r 上
//
// 与 context.WithValue 功能相同,但是考虑了在同一个 r 上调用多次 WithValue 的情况。
func WithValue(r *http.Request, ps *Params) *http.Request {
if ps == nil || len(ps.Params) == 0 {
return r
}
if ps2 := GetParams(r); ps2 != nil && len(ps2.Params) > 0 {
for _, p := range ps2.Params {
ps.Set(p.K, p.V)
}
}
return r.WithContext(context.WithValue(r.Context(), contextKeyParams, ps))
}
func (p *Params) Exists(key string) bool {
_, found := p.Get(key)
return found
}
func (p *Params) String(key string) (string, error) {
if v, found := p.Get(key); found {
return v, nil
}
return "", params.ErrParamNotExists
}
func (p *Params) MustString(key, def string) string {
if v, found := p.Get(key); found {
return v
}
return def
}
func (p *Params) Int(key string) (int64, error) {
if str, found := p.Get(key); found {
return strconv.ParseInt(str, 10, 64)
}
return 0, params.ErrParamNotExists
}
func (p *Params) MustInt(key string, def int64) int64 {
if str, found := p.Get(key); found {
if val, err := strconv.ParseInt(str, 10, 64); err == nil {
return val
}
}
return def
}
func (p *Params) Uint(key string) (uint64, error) {
if str, found := p.Get(key); found {
return strconv.ParseUint(str, 10, 64)
}
return 0, params.ErrParamNotExists
}
func (p *Params) MustUint(key string, def uint64) uint64 {
if str, found := p.Get(key); found {
if val, err := strconv.ParseUint(str, 10, 64); err == nil {
return val
}
}
return def
}
func (p *Params) Bool(key string) (bool, error) {
if str, found := p.Get(key); found {
return strconv.ParseBool(str)
}
return false, params.ErrParamNotExists
}
func (p *Params) MustBool(key string, def bool) bool {
if str, found := p.Get(key); found {
if val, err := strconv.ParseBool(str); err == nil {
return val
}
}
return def
}
func (p *Params) Float(key string) (float64, error) {
if str, found := p.Get(key); found {
return strconv.ParseFloat(str, 64)
}
return 0, params.ErrParamNotExists
}
func (p *Params) MustFloat(key string, def float64) float64 {
if str, found := p.Get(key); found {
if val, err := strconv.ParseFloat(str, 64); err == nil {
return val
}
}
return def
}
func (p *Params) Get(key string) (string, bool) {
if p == nil {
return "", false
}
for _, kv := range p.Params {
if kv.K == key {
return kv.V, true
}
}
return "", false
}
func (p *Params) Clone() params.Params {
if p == nil {
return nil
}
pp := &Params{
Path: p.Path,
Params: make([]Param, len(p.Params)),
}
copy(pp.Params, p.Params)
return pp
}
func (p *Params) Count() (cnt int) {
if p == nil {
return 0
}
for _, param := range p.Params {
if param.K != "" {
cnt++
}
}
return cnt
}
func (p *Params) Map() map[string]string {
if p == nil || len(p.Params) == 0 {
return nil
}
m := make(map[string]string, len(p.Params))
for _, pp := range p.Params {
if pp.K != "" {
m[pp.K] = pp.V
}
}
return m
}
func (p *Params) Set(k, v string) {
deletedIndex := -1
for i, param := range p.Params {
if param.K == k {
p.Params[i].V = v
return
}
if param.K == "" && deletedIndex == -1 {
deletedIndex = i
}
}
if deletedIndex != -1 {
p.Params[deletedIndex].K = k
p.Params[deletedIndex].V = v
} else {
p.Params = append(p.Params, Param{K: k, V: v})
}
}
func (p *Params) Delete(k string) {
if p == nil {
return
}
for i, pp := range p.Params {
if pp.K == k {
p.Params[i].K = ""
return
}
}
}
|
<reponame>jaidis/yay
import React, { Component } from "react";
import { View } from "react-native";
import NetInfo from "@react-native-community/netinfo";
import { connect } from "react-redux";
import { addUser, deleteUser, addBookings } from "../../store/actions/index";
import * as AppConsts from "../../../config/app_consts";
import * as YAY_Api from "../../functions/YAY_Api_helper";
// FUNCTIONS OR HELPERS
import ResponsiveImage from "react-native-responsive-image";
/**
* @description Componente AuthCheck, realiza un login basado en el email y la contraseña del usuario
*/
class AuthCheck extends Component {
static navigationOptions = {
title: "AuthCheck"
};
async componentDidMount() {
if (this.props.appJson != null) {
NetInfo.isConnected
.fetch()
.then(async isConnected => {
if (isConnected) {
try {
AuthCheckJSON = {
token: this.props.appJson.userdata.token
};
let response = await YAY_Api.fetchInternetDataAsync(
AppConsts.URL_USER_REFRESH,
await YAY_Api.getRequestPostAsync(AuthCheckJSON)
);
if (response.status === "success") {
this.props.c_addUser(response);
this.props.navigation.navigate("Home");
}
bookingJSON = { token: this.props.appJson.userdata.token };
response = await YAY_Api.fetchInternetDataAsync(
AppConsts.URL_BOOKINGS_SEARCH,
await YAY_Api.getRequestPostAsync(bookingJSON)
);
if (response.status === "success") {
this.props.c_addBookings(response);
}
} catch (error) {
console.log(error);
this.props.appJson != null
? this.props.navigation.navigate("Home")
: this.props.navigation.navigate("SignIn");
}
} else {
this.props.appJson != null
? this.props.navigation.navigate("Home")
: this.props.navigation.navigate("SignIn");
}
})
.catch(error => {
console.log(error);
});
} else {
this.props.navigation.navigate("SignIn");
}
}
render() {
return (
<View
style={{
flex: 1,
backgroundColor: "rgba(46, 50, 72, 1)",
justifyContent: "center",
alignItems: "center"
}}
>
<ResponsiveImage
source={require("../../../assets/img/yay-logo-rounded.png")}
initWidth="200"
initHeight="200"
/>
</View>
);
}
}
const mapStateToProps = state => {
return {
appJson: state.mainReducer.appJson,
loading_bar: state.mainReducer.loading
};
};
const mapDispatchToProps = dispatch => {
return {
c_addUser: userJSON => dispatch(addUser(userJSON)),
c_deleteUser: () => dispatch(deleteUser()),
c_addBookings: reservaJSON => dispatch(addBookings(reservaJSON))
};
};
export default connect(
mapStateToProps,
mapDispatchToProps
)(AuthCheck);
|
#!/bin/bash
fileName="/opt/atlassian/jira/bin/bootstrap.jar"
# -----------------------------------------------------------------------------
# 判断文件是否存在
# -----------------------------------------------------------------------------
if [ ! -f "$fileName" ]
then
# -----------------------------------------------------------------------------
# 初始化 JIRA 的安装
# -----------------------------------------------------------------------------
expect /home/work/script/jira-install.sh \
&& cd /home/work/temp \
&& echo "copy atlassian-extras-3.2.jar to /opt/atlassian/jira/atlassian-jira/WEB-INF/lib/atlassian-extras-3.2.jar" \
&& \cp -f /home/work/temp/jira-extras/atlassian-extras-3.2.jar /opt/atlassian/jira/atlassian-jira/WEB-INF/lib/atlassian-extras-3.2.jar \
&& echo "copy mysql-connector-java-5.1.39-bin.jar to /opt/atlassian/jira/atlassian-jira/WEB-INF/lib/mysql-connector-java-5.1.39-bin.jar" \
&& \cp -f /home/work/temp/jira-extras/mysql-connector-java-5.1.39-bin.jar /opt/atlassian/jira/atlassian-jira/WEB-INF/lib/mysql-connector-java-5.1.39-bin.jar \
&& echo "copy JIRA Core-7.3.3-language-pack-zh_CN.jar to /opt/atlassian/jira/atlassian-jira/WEB-INF/lib/JIRA Core-7.3.3-language-pack-zh_CN.jar" \
&& \cp -f /home/work/temp/jira-extras/language/* /opt/atlassian/jira/atlassian-jira/WEB-INF/lib/
else
# -----------------------------------------------------------------------------
# 如果 JIRA 用户和用户组不存在则创建
# -----------------------------------------------------------------------------
user=jira
group=jira
egrep "^$group" /etc/group >& /dev/null
if [ $? -ne 0 ]
then
groupadd $group
fi
egrep "^$user" /etc/passwd >& /dev/null
if [ $? -ne 0 ]
then
useradd -g $group $user
fi
# -----------------------------------------------------------------------------
# 停止服务,并重新启动
# -----------------------------------------------------------------------------
# service jira stop
service jira start
fi |
<reponame>HaBaLeS/recommendation-bay
package net.projektfriedhof.de.habales.datagen.services;
import net.projektfriedhof.recbay.model.RecType;
/**
* Created by falko on 4/13/15.
*/
public interface RecomendationService {
}
|
<gh_stars>0
export { default as LayoutById } from './LayoutById'
export { default as MainLayout } from './MainLayout'
export { default as LayoutRoute } from './LayoutRoute'
export { default as LayoutSingle } from './LayoutSingle'
export { default as LayoutRoutePrivate } from './LayoutRoutePrivate'
export { default as LayoutSinglePrivate } from './LayoutSinglePrivate'
export { default as LayoutRouteSearch } from './LayoutRouteSearch'
export { default as LayoutRouteReset } from './LayoutRouteReset' |
package com.redhat.developers.demos.quarkus.service;
import com.redhat.developers.demos.quarkus.model.RandomResponse;
import java.util.UUID;
public class RandomGeneratorService {
private static final UUID id = UUID.randomUUID();
public RandomResponse createRandomMessage() {
return new RandomResponse(id.toString());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.