text stringlengths 1 1.05M |
|---|
<filename>packages/ds-csv/test/csv.test.ts
import { generate, SkimahConfig } from "@skimah/api";
import { graphql } from "graphql";
import CSVSource from "../src/csv";
const typeDefs = `
type Album @datasource(name: "albums") {
id: ID @named(as: "AlbumId")
title: String @named(as: "Title")
artist: Artist @relation @named(as: "ArtistId")
}
type Artist @datasource(name: "artists") {
id: ID @named(as: "ArtistId")
name: String @named(as: "Name")
albums: [Album] @relation
}
`;
describe("Datasource CSV", () => {
let schema;
beforeAll(async () => {
const albums = new CSVSource({
filepath: "fixtures/Album.csv"
});
const artists = new CSVSource({
filepath: "fixtures/Artist.csv"
});
const sources = { albums, artists };
const config: SkimahConfig = { typeDefs, sources };
const scaffoldResult = await generate(config);
schema = scaffoldResult.schema;
});
describe("Selection", () => {
test("Skip and Limit", async () => {
const query = `
query {
findAlbums(skip: 2, limit: 2) {
title
}
}
`;
const result = await graphql(schema, query);
expect(result.errors).toBeUndefined();
expect(result.data).toMatchInlineSnapshot(`
Object {
"findAlbums": Array [
Object {
"title": "Restless and Wild",
},
Object {
"title": "Let There Be Rock",
},
],
}
`);
});
test("One-to-Many relationship", async () => {
const query = `
query {
findArtists(where: { name: { eq: "U2" } }) {
id
name
albums {
title
}
}
}
`;
const result = await graphql(schema, query);
expect(result.errors).toBeUndefined();
expect(result.data).toMatchInlineSnapshot(`
Object {
"findArtists": Array [
Object {
"albums": Array [
Object {
"title": "Achtung Baby",
},
Object {
"title": "All That You Can't Leave Behind",
},
Object {
"title": "B-Sides 1980-1990",
},
Object {
"title": "How To Dismantle An Atomic Bomb",
},
Object {
"title": "Pop",
},
Object {
"title": "Rattle And Hum",
},
Object {
"title": "The Best Of 1980-1990",
},
Object {
"title": "War",
},
Object {
"title": "Zooropa",
},
Object {
"title": "Instant Karma: The Amnesty International Campaign to Save Darfur",
},
],
"id": "150",
"name": "U2",
},
],
}
`);
});
test("One-to-One relationship", async () => {
const query = `
query {
findAlbums(limit: 2) {
title
artist {
name
}
}
}
`;
const result = await graphql(schema, query);
expect(result.errors).toBeUndefined();
expect(result.data).toMatchInlineSnapshot(`
Object {
"findAlbums": Array [
Object {
"artist": Object {
"name": "AC/DC",
},
"title": "For Those About To Rock We Salute You",
},
Object {
"artist": Object {
"name": "Accept",
},
"title": "Balls to the Wall",
},
],
}
`);
});
});
describe("Mutation", () => {
it("Create", async () => {
const createId = () => Math.round(Math.random() * 100000);
const query = `
mutation {
createArtists(data: [
{ id: ${createId()}, name: "Artist-${createId()}" },
{ id: ${createId()}, name: "Artist-${createId()}" },
]) {
affected
}
}
`;
const result = await graphql(schema, query);
expect(result.errors).toBeUndefined();
expect(result.data.createArtists.affected).toHaveLength(2);
});
it("Update", async () => {
const query = `
mutation {
updateArtists(changes: { name: "<NAME>", }, where: { id: {eq: 100} }) {
affected
artists {
id
name
}
}
}
`;
const result = await graphql(schema, query);
expect(result.errors).toBeUndefined();
expect(result.data).toMatchInlineSnapshot(`
Object {
"updateArtists": Object {
"affected": Array [
"100",
],
"artists": Array [
Object {
"id": "100",
"name": "<NAME>",
},
],
},
}
`);
});
it("Delete", async () => {
const query = `
mutation {
deleteAlbums(where: { title: { like: "For Those*" } } ) {
affected
albums {
title
}
}
}
`;
const result = await graphql(schema, query);
expect(result.errors).toBeUndefined();
expect(result.data).toMatchInlineSnapshot(`
Object {
"deleteAlbums": Object {
"affected": Array [
"1",
],
"albums": Array [
Object {
"title": "For Those About To Rock We Salute You",
},
],
},
}
`);
});
});
});
|
<reponame>matheusvmg/CRUD_nodeJS_mongoDB
const express = require('express')
const atualizarUsuarios = express.Router()
const usuariosSchema = require('../model/schema')
//atualiza um usuário
atualizarUsuarios.put('/atualizar-usuario/:id', async(req, res) => {
try{
const usuarioAtualizado = await usuariosSchema.updateOne(
{id: req.params.id},
{$set: {nome: req.body.nome,
sobrenome: req.body.sobrenome,
email: req.body.email,
idade: req.body.idade,
senha: req.body.senha}})
res.json(usuarioAtualizado)
}catch(err){
res.json({
error: err
})
}
})
module.exports = atualizarUsuarios |
package sds
import (
"encoding/hex"
"fmt"
sdk "github.com/cosmos/cosmos-sdk/types"
sdkerrors "github.com/cosmos/cosmos-sdk/types/errors"
"github.com/stratosnet/stratos-chain/x/sds/keeper"
"github.com/stratosnet/stratos-chain/x/sds/types"
)
// NewHandler ...
func NewHandler(k keeper.Keeper) sdk.Handler {
return func(ctx sdk.Context, msg sdk.Msg) (*sdk.Result, error) {
ctx = ctx.WithEventManager(sdk.NewEventManager())
switch msg := msg.(type) {
case types.MsgFileUpload:
return handleMsgFileUpload(ctx, k, msg)
case types.MsgPrepay:
return handleMsgPrepay(ctx, k, msg)
default:
errMsg := fmt.Sprintf("unrecognized %s message type: %T", types.ModuleName, msg)
return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, errMsg)
}
}
}
// Handle MsgFileUpload.
func handleMsgFileUpload(ctx sdk.Context, k keeper.Keeper, msg types.MsgFileUpload) (*sdk.Result, error) {
// check if reporter addr belongs to an registered sp node
if _, found := k.RegisterKeeper.GetIndexingNode(ctx, msg.Reporter); found == false {
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnauthorized, "Reporter %s isn't an SP node", msg.Reporter.String())
}
height := sdk.NewInt(ctx.BlockHeight())
heightByteArr, _ := height.MarshalJSON()
var heightReEncoded sdk.Int
heightReEncoded.UnmarshalJSON(heightByteArr)
fileInfo := types.NewFileInfo(heightReEncoded, msg.Reporter, msg.Uploader)
k.SetFileHash(ctx, msg.FileHash, fileInfo)
ctx.EventManager().EmitEvents(sdk.Events{
sdk.NewEvent(
types.EventTypeFileUpload,
sdk.NewAttribute(types.AttributeKeyReporter, msg.Reporter.String()),
sdk.NewAttribute(types.AttributeKeyUploader, msg.Uploader.String()),
sdk.NewAttribute(types.AttributeKeyFileHash, hex.EncodeToString(msg.FileHash)),
),
sdk.NewEvent(
sdk.EventTypeMessage,
sdk.NewAttribute(sdk.AttributeKeyModule, types.AttributeValueCategory),
),
})
return &sdk.Result{Events: ctx.EventManager().Events()}, nil
}
// Handle MsgPrepay.
func handleMsgPrepay(ctx sdk.Context, k keeper.Keeper, msg types.MsgPrepay) (*sdk.Result, error) {
if k.BankKeeper.GetSendEnabled(ctx) == false {
return nil, nil
}
purchased, err := k.Prepay(ctx, msg.Sender, msg.Coins)
if err != nil {
return nil, err
}
ctx.EventManager().EmitEvents(sdk.Events{
sdk.NewEvent(
types.EventTypePrepay,
sdk.NewAttribute(types.AttributeKeyReporter, msg.Sender.String()),
sdk.NewAttribute(types.AttributeKeyCoins, msg.Coins.String()),
sdk.NewAttribute(types.AttributeKeyPurchasedUoz, purchased.String()),
),
sdk.NewEvent(
sdk.EventTypeMessage,
sdk.NewAttribute(sdk.AttributeKeyModule, types.AttributeValueCategory),
),
})
return &sdk.Result{Events: ctx.EventManager().Events()}, nil
}
|
package api
import "github.com/aquasecurity/trivy/pkg/module/serialize"
const (
Version = 1
ActionInsert serialize.PostScanAction = "INSERT"
ActionUpdate serialize.PostScanAction = "UPDATE"
ActionDelete serialize.PostScanAction = "DELETE"
)
type Module interface {
Version() int
Name() string
}
type Analyzer interface {
RequiredFiles() []string
Analyze(filePath string) (*serialize.AnalysisResult, error)
}
type PostScanner interface {
PostScanSpec() serialize.PostScanSpec
PostScan(serialize.Results) (serialize.Results, error)
}
|
#!/usr/bin/env bash
# Update client repository
git pull origin master
# Build parent-app-web
yarn install
yarn run build
# Prepare client_build
if [ -e client_build ]
then
cd client_build
git pull origin master
cd ..
else
git clone https://github.com/DanbiEduCorp/client_build.git
fi
# Update builds
rm -r client_build/winkapp-parent-ios/
cp -R www/ client_build/winkapp-parent-ios/
# Git Commit and Push
cd client_build/
git add *
git commit -m 'update build(winkapp-parent-ios)'
git push origin master
cd ..
|
<filename>soluciones/Mundo_Animal_Herencia_JARS/src/es/jeremyramos/Main.java<gh_stars>1-10
package es.jeremyramos;
import es.jeremyramos.Clases.Gatos;
import es.jeremyramos.Clases.Perros;
public class Main {
public static void main(String[] args) {
Perros doberman = new Perros();
Gatos faraon = new Gatos();
System.out.println("Perro");
doberman.hacerCaso();
doberman.hacerRuido();
System.out.println("Gato");
faraon.hacerCaso();
faraon.hacerRuido();
}
}
|
package depth_first_search;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.InputMismatchException;
/**
*
* @author minchoba
* 백준 16437번: 양 구출 작전
*
* @see https://www.acmicpc.net/problem/16437/
*
*/
public class Boj16437 {
public static void main(String[] args) throws Exception {
InputReader in = new InputReader(System.in);
int N = in.readInt();
ArrayList<Integer>[] tree = new ArrayList[N + 1];
for (int i = 0; i < N + 1; i++) {
tree[i] = new ArrayList<>();
}
long[] cost = new long[N + 1];
for (int i = 2; i < N + 1; i++) {
boolean type = in.readString().charAt(0) == 'S' ? true : false;
long count = in.readLong();
int parent = in.readInt();
tree[parent].add(i);
cost[i] = type ? count : -count; // 양인 경우 양수 늑대인 경우 음수
}
System.out.println(dfs(tree, cost, 1)); // 결과 출력
}
private static long dfs(ArrayList<Integer>[] arr, long[] cost, int current) {
long total = cost[current];
for(int next: arr[current]) {
total += dfs(arr, cost, next);
}
return total > 0 ? total : 0; // 늑대에 의해 부분합이 음수가 된 경우
}
private static class InputReader {
private InputStream stream;
private byte[] buf = new byte[1024];
private int curChar;
private int numChars;
private SpaceCharFilter filter;
public InputReader(InputStream stream) {
this.stream = stream;
}
public int read() {
if (numChars == -1) {
throw new InputMismatchException();
}
if (curChar >= numChars) {
curChar = 0;
try {
numChars = stream.read(buf);
} catch (IOException e) {
throw new InputMismatchException();
}
if (numChars <= 0) {
return -1;
}
}
return buf[curChar++];
}
public int readInt() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
int sgn = 1;
if (c == '-') {
sgn = -1;
c = read();
}
int res = 0;
do {
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
res *= 10;
res += c - '0';
c = read();
} while (!isSpaceChar(c));
return res * sgn;
}
public long readLong() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
int sgn = 1;
if (c == '-') {
sgn = -1;
c = read();
}
long res = 0;
do {
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
res *= 10;
res += c - '0';
c = read();
} while (!isSpaceChar(c));
return res * sgn;
}
public String readString() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
StringBuilder res = new StringBuilder();
do {
res.appendCodePoint(c);
c = read();
} while (!isSpaceChar(c));
return res.toString();
}
public boolean isSpaceChar(int c) {
if (filter != null) {
return filter.isSpaceChar(c);
}
return c == ' ' || c == '\n' || c == '\r' || c == '\t' || c == -1;
}
public interface SpaceCharFilter {
public boolean isSpaceChar(int ch);
}
}
}
|
public class Program
{
public static void Main(string[] args)
{
int[] arr = {2, 3, 6, 5, 4, 2, 1, 5};
int max = arr[0];
List<int> indexList = new List<int>();
for (int i = 0; i < arr.Length; i++)
{
if (arr[i] > max)
{
max = arr[i];
indexList.Clear();
indexList.Add(i);
}
else if (arr[i] == max)
{
indexList.Add(i);
}
}
Console.WriteLine("Max element = {0}", max);
Console.WriteLine("Indexes of max element: ");
foreach (int index in indexList)
{
Console.Write(index + " ");
}
Console.WriteLine();
}
} |
<gh_stars>0
let dashboard = {
initialize: function() {
dashboard.getIp();
dashboard.getTimestamp();
},
getIp: function() {
dashboard.request('/api/network/ip', '.showIp');
},
getTimestamp: function() {
dashboard.request('/api/system/timestamp', '.showTimestamp');
},
request: function(url, cardClass) {
fetch(url).then(response => response.text()).then(data => {
document.querySelectorAll(cardClass).forEach(card => {
card.querySelector('.card-text').innerHTML = data;
});
});
}
};
dashboard.initialize();
|
#-*- coding: utf-8 -*-
#!/usr/bin/env bash
echo "--- STARTING UP SERVER ---"
sudo service elasticsearch start
#PATH=~/home/vagrant/.rvm/gems/ruby-2.3.3@openfarm/bin/:$PATH
source /home/vagrant/.rvm/scripts/rvm
rvm reload
ELASTICSEARCH_URL='http://127.0.0.1:9201'
sleep 10
cd /vagrant
# bundle install
rails s -d -b 0.0.0.0
echo "--- SERVER STARTED ---"
|
func fibonnaci(numTerms: Int) -> Int {
if numTerms < 2 {
return numTerms
}
return(fibonnaci(numTerms: numTerms-1) + fibonnaci(numTerms: numTerms-2))
}
for i in 0...10 {
print(fibonnaci(numTerms: i))
} |
/*
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.maven.classpath.munger.logging;
/**
* @author <NAME>.
* @since Jan 1, 2014 12:43:18 PM
*/
public abstract class AbstractLog implements Log {
protected AbstractLog() {
super();
}
@Override
public void trace(Object message) {
trace(message, null);
}
@Override
public void debug(Object message) {
debug(message, null);
}
@Override
public void info(Object message) {
info(message, null);
}
@Override
public void warn(Object message) {
warn(message, null);
}
@Override
public void error(Object message) {
error(message, null);
}
@Override
public void fatal(Object message) {
fatal(message, null);
}
}
|
def factorial(n):
result = 1
while n > 1:
result *= n
n -= 1
return result |
<reponame>magma/fbc-js-core
/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow strict-local
* @format
*/
require('@fbcnms/babel-register');
const path = require('path');
module.exports = {
config: path.resolve(__dirname, 'sequelizeConfig.js'),
'migrations-path': path.resolve(__dirname, 'migrations'),
'models-path': path.resolve(__dirname, 'models'),
'seeders-path': path.resolve(__dirname, 'seeders'),
};
|
<gh_stars>0
"""
Graph class, takes number of cities, edges listed as an adjacency matrix, and the colors of each node. We can find a node which returns the color and the edges of a node n returned as a tuple
"""
class Graph(object):
def __init__(self, numCities, edges, colors):
self.numCities = numCities
self.edges = edges
self.colors = []
for c in range(len(colors)):
self.colors.append(colors[c])
def findNode(self, n):
return (self.colors[n], self.edges[n])
def smallestBlueEdge(self, n1):
dist = 1000
for v in range(len(self.edges[n1])):
if v != n1 and self.colors[v] == "B":
if dist > self.edges[n1][v]:
dist = self.edges[n1][v]
blueEdge = (n1, v, dist)
return blueEdge
def smallestRedEdge(self, n1):
dist = 1000
for v in range(len(self.edges[n1])):
if v != n1 and self.colors[v] == "R":
if dist > self.edges[n1][v]:
dist = self.edges[n1][v]
redEdge = (n1, v, dist)
return redEdge
def checkPath(graph, path):
temp = list(path)
curr = temp[0][0]
color = graph.colors[curr]
colorCities = [(curr, color)]
count = 1
while len(temp) > 0:
nextEdge = -1
for e in temp:
if e[0] == curr or e[1] == curr:
nextEdge = e
break
if nextEdge == -1:
return False
elif nextEdge[0] == curr:
curr = nextEdge[1]
temp.remove(nextEdge)
else:
curr = nextEdge[0]
temp.remove(nextEdge)
if color == graph.colors[curr] and count == 3:
return False
elif color == graph.colors[curr]:
colorCities.append((curr, color))
count += 1
else:
color = graph.colors[curr]
colorCities = [(curr, color)]
count = 1
return True
def getCost(graph, path):
length = 0
prev = -1
for x in range(len(path)):
if prev >= 0:
length += graph.edges[prev][path[x][0]]
prev = path[x][0]
return length
def frontColor(graph, path, v):
if len(path) < 4: return False
return graph.colors[path[0][0]] == graph.colors[path[1][0]] and graph.colors[path[1][0]] == graph.colors[path[2][0]] and graph.colors[path[0][0]] == graph.colors[v]
def backColor(graph, path, v):
if len(path) < 4: return False
return graph.colors[path[-1][0]] == graph.colors[path[-2][0]] and graph.colors[path[-2][0]] == graph.colors[path[-3][0]] and graph.colors[path[-1][0]] == graph.colors[v]
def isEdge(path, edge):
head = path[0]
tail = path[-1]
return head in [edge[1], edge[2]] or tail in [edge[1], edge[2]]
|
<filename>src/main/java/cn/gobyte/apply/utils/poi/convert/ExportConvert.java
package cn.gobyte.apply.utils.poi.convert;
/**
* TODO: 出口转换
*
* @author shanLan <EMAIL>
* @date 2019/4/7 13:18
*/
public interface ExportConvert {
String handler(Object val);
}
|
<reponame>duckie/boson
#include <algorithm>
#include <random>
#include <vector>
#include "boson/event_loop.h"
#include "boson/memory/sparse_vector.h"
#include "catch.hpp"
TEST_CASE("Sparse vector - Allocation algorithm", "[memory][sparse_vector]") {
constexpr size_t const nb_elements = 1e2;
std::random_device seed;
std::mt19937_64 generator{seed()};
// Instantiate a sparse vector
boson::memory::sparse_vector<bool> sparse_instance{nb_elements -
1}; // -1 to force and end allocation
// Create a permutation
std::vector<size_t> indexes;
indexes.resize(nb_elements);
for (size_t index = 0; index < nb_elements; ++index) indexes[index] = index;
std::vector<size_t> expected_allocate_order = indexes;
std::vector<size_t> permuted_indexes = indexes;
shuffle(begin(permuted_indexes), end(permuted_indexes), generator);
// Allocate every cell
std::vector<size_t> allocate_order;
for (size_t index = 0; index < nb_elements; ++index)
allocate_order.emplace_back(sparse_instance.allocate());
CHECK(allocate_order == expected_allocate_order);
// Free every cell in random order
for (auto index : permuted_indexes) sparse_instance.free(index);
// Reallocate
allocate_order.clear();
for (size_t index = 0; index < nb_elements; ++index)
allocate_order.emplace_back(sparse_instance.allocate());
reverse(begin(allocate_order), end(allocate_order));
CHECK(allocate_order == permuted_indexes);
}
|
export {
BotInfoCommands,
ConfigurateCommands,
CoreCommands,
VoiceCommands,
defineConfCommandSchema,
defineConfigurateCommandSchema,
defineCoreCommandSchema,
definedBotInfoCommandSchema,
definedVoiceCommandSchema,
schemaTextSupplier,
} from "./bootstrap";
export {
usageFromSchema,
usageBodyFromSchema,
usageEntrysFromSchema,
usageEntryFromSchema2,
} from "./util";
|
from bs4 import BeautifulSoup
import requests
from urllib.parse import urljoin
def extract_unique_urls(html_content):
soup = BeautifulSoup(html_content, 'html.parser')
anchor_tags = soup.find_all('a')
urls = set()
for tag in anchor_tags:
href = tag.get('href')
if href and href.startswith(('http://', 'https://')):
urls.add(href)
return sorted(urls)
def main():
# Assuming the HTML content is stored in a file named 'sample.html'
with open('sample.html', 'r') as file:
html_content = file.read()
unique_urls = extract_unique_urls(html_content)
for url in unique_urls:
print(url)
if __name__ == "__main__":
main() |
<filename>server/pm2.ecosystem.config.js
module.exports = {
apps: [
{
name: "nothingbookapi",
script: "./server/index.js",
watch: false,
ignore_watch : [ "../logs/*", "../node_modules","../.git", "../uploads/*","../audio/*", "../private/files/**/*", "../private/imgs/**/*" ],
env_development: {
// "PORT": 3030,
"NODE_ENV": "development",
"ENV_CONFIG": "dev",
},
env_uat: {
// "PORT": 3030,
"NODE_ENV": "uat",
"ENV_CONFIG": "uat",
},
env_pre: {
// "PORT": 80,
"NODE_ENV": "pre",
"ENV_CONFIG": "pre",
},
env_production: {
// "PORT": 80,
"NODE_ENV": "production",
"ENV_CONFIG": "prod",
}
}
]
} |
/*
* Copyright 2012-2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.geoxp.oss.servlet;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.security.PublicKey;
import java.util.Arrays;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.bouncycastle.util.encoders.Base64;
import org.bouncycastle.util.encoders.Hex;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.geoxp.oss.CryptoHelper;
import com.geoxp.oss.OSS;
import com.geoxp.oss.OSS.OSSToken;
import com.geoxp.oss.OSSException;
import com.google.inject.Singleton;
@Singleton
public class GetSecretServlet extends HttpServlet {
private static final Logger LOGGER = LoggerFactory.getLogger(GetSecretServlet.class);
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
if (!OSS.isInitialized()) {
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Open Secret Server not yet initialized.");
return;
}
//
// Extract token
//
String token = req.getParameter("token");
if (null == token) {
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing 'token'.");
}
//
// Decode token
//
byte[] tokendata = Base64.decode(token);
//
// Extract OSS Token
//
OSSToken osstoken = null;
try {
osstoken = OSS.checkToken(tokendata);
} catch (OSSException osse) {
LOGGER.error("doPost", osse);
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, osse.getMessage());
return;
}
//
// Extract secretname and RSA pub key from secret
//
byte[] secretname = CryptoHelper.decodeNetworkString(osstoken.getSecret(), 0);
byte[] rsapubblob = CryptoHelper.decodeNetworkString(osstoken.getSecret(), secretname.length + 4);
//
// Retrieve secret
//
byte[] secret = null;
try {
secret = OSS.getKeyStore().getSecret(new String(secretname, "UTF-8"), new String(Hex.encode(CryptoHelper.sshKeyBlobFingerprint(osstoken.getKeyblob()))));
} catch (OSSException osse) {
LOGGER.error("doPost", osse);
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, osse.getMessage());
return;
}
//
// Unwrap secret
//
byte[] k = OSS.getMasterSecret();
secret = CryptoHelper.unwrapBlob(k, secret);
Arrays.fill(k, (byte) 0);
if (null == secret) {
LOGGER.error("[" + new String(Hex.encode(CryptoHelper.sshKeyBlobFingerprint(osstoken.getKeyblob()))) + "] failed to retrieve secret '" + new String(secretname, "UTF-8") + "', integrity check failed.");
resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Secret integrity failed.");
return;
}
//
// Wrap secret with a temporary AES key
//
byte[] wrappingkey = new byte[32];
CryptoHelper.getSecureRandom().nextBytes(wrappingkey);
byte[] wrappedsecret = CryptoHelper.wrapAES(wrappingkey, secret);
Arrays.fill(secret, (byte) 0);
secret = wrappedsecret;
//
// Seal wrapping key with provided RSA pub key
//
PublicKey rsapub = CryptoHelper.sshKeyBlobToPublicKey(rsapubblob);
byte[] sealedwrappingkey = CryptoHelper.encryptRSA(rsapub, wrappingkey);
Arrays.fill(wrappingkey, (byte) 0);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write(CryptoHelper.encodeNetworkString(secret));
baos.write(CryptoHelper.encodeNetworkString(sealedwrappingkey));
resp.setStatus(HttpServletResponse.SC_OK);
resp.getWriter().println(new String(Base64.encode(baos.toByteArray()), "UTF-8"));
LOGGER.info("[" + new String(Hex.encode(CryptoHelper.sshKeyBlobFingerprint(osstoken.getKeyblob()))) + "] retrieved " + (secret.length - OSS.NONCE_BYTES) + " bytes of secret '" + new String(secretname, "UTF-8") + "'.");
}
}
|
#!/bin/sh
cd "`dirname $0`"
DIR="/var/www/html/devices"
TS="`date +'%Y%m%d-%H'`"
FILE="${DIR}/data/bidmc-cros-${TS}.json"
curl --noproxy '*' -k https://localhost:3333/crosby/devices -o "${FILE}" &> /dev/null
[ -f "${FILE}" ] || exit 1
[ -s "${DIR}/data/latest.json" ] && rm -f "${DIR}/data/latest.json"
ln -s "${FILE}" "${DIR}/data/latest.json"
source /etc/profile.d/nodejs.sh
NODE=`which node &> /dev/null`
[ -z "${NODE}" ] && NODE=/opt/rh/rh-nodejs14/root/usr/bin/node
CSV="${DIR}/data/bidmc-cros-${TS}.csv"
XLSX="${DIR}/sheets/report-${TS}.xlsx"
${NODE} render "${FILE}" "${XLSX}"
[ -s "${DIR}/data/latest.csv" ] && rm -f "${DIR}/data/latest.csv"
ln -s "${CSV}" "${DIR}/data/latest.csv"
[ -s "${DIR}/sheets/latest.xlsx" ] && rm -f "${DIR}/sheets/latest.xlsx"
ln -s "${XLSX}" "${DIR}/sheets/latest.xlsx"
# keep last 3-weeks
find "${DIR}/data" -name '*.json' -mtime +20 -exec rm -f {} \;
find "${DIR}/data" -name '*.csv' -mtime +20 -exec rm -f {} \;
find "${DIR}/sheets" -name '*.xlsx' -mtime +20 -exec rm -f {} \;
|
#!/bin/bash
cd crypto-cpp
mkdir -p build/Release
CMAKE_CXX_COMPILER="g++"
IFS='-' read -r -a TARGET_ARR_WRONG_ORDER <<< "$PLAT"
SYS_V="${TARGET_ARR_WRONG_ORDER[1]}"
TARGET_ARCH="${TARGET_ARR_WRONG_ORDER[2]}"
if [ "$(uname)" == "Darwin" ]; then
TARGET_TRIPLET="${TARGET_ARCH}-apple-macos${SYS_V}"
if [[ "$(uname -m)" != "$TARGET_ARCH" ]]; then
echo "Crosscompiling enabled"
export CMAKE_CROSSCOMPILING="1"
fi
if [[ "$TARGET_ARCH" == *"arm"* ]]; then
echo "Compiling for arm architecture"
export CMAKE_SYSTEM_PROCESSOR="arm"
fi
echo "Targeting ${TARGET_TRIPLET}"
export MACOSX_DEPLOYMENT_TARGET="${SYS_V}"
export MACOSX_VERSION_MIN="${SYS_V}"
sed -i'.original' "s/\${CMAKE_CXX_FLAGS} -std=c++17 -Werror -Wall -Wextra -fno-strict-aliasing -fPIC/-std=c++17 -Werror -Wall -Wextra -fno-strict-aliasing -fPIC \${CMAKE_CXX_FLAGS} -target ${TARGET_TRIPLET}/" CMakeLists.txt
CMAKE_CXX_COMPILER="clang++"
else
sed -i'.original' "s/\${CMAKE_CXX_FLAGS} -std=c++17 -Werror -Wall -Wextra -fno-strict-aliasing -fPIC/-std=c++17 -Werror -Wall -Wextra -fno-strict-aliasing -fPIC \${CMAKE_CXX_FLAGS}/" CMakeLists.txt
fi
cat CMakeLists.txt
(cd build/Release; cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER="${CMAKE_CXX_COMPILER}" -DCMAKE_CXX_FLAGS="-Wno-type-limits -Wno-range-loop-analysis -Wno-unused-parameter" ../..)
if [ "$(uname)" == "Darwin" ]; then
TARGET_TRIPLET="${TARGET_ARCH}-apple-macos${SYS_V}"
sed -i'.original' "s/#Note that googlemock target already builds googletest/set(CMAKE_CXX_FLAGS \"-target ${TARGET_TRIPLET}\")/" build/Release/_deps/googletest-src/CMakeLists.txt
fi
make -C build/Release
if [ $? -ne 0 ]; then
exit 1
fi
cp build/Release/src/starkware/crypto/ffi/libcrypto_c_exports.* ../starknet_py/utils/crypto
|
var gdgApp = angular.module('gdgApp', []);
gdgApp.controller('GDGData', ['$scope', '$sce', '$http', function ($scope, $sce, $http) {
$scope.renderHtml = function (htmlCode) {
return $sce.trustAsHtml(htmlCode);
};
$scope.gdgData = {"id": PLUS_ID};
$http.get('gdg-info.json').then(function(res){
$scope.gdgData = res.data;
$scope.gdgData.id = PLUS_ID;
$scope.gdgData.calendarEmbedUrl = $sce.trustAsResourceUrl($scope.gdgData.calendarUrl);
$http.get("https://www.googleapis.com/plus/v1/people/" + PLUS_ID + "/activities/public?alt=json&key=" + API_KEY)
.success(function(data, status, headers, config) {
$scope.posts = data['items'];
})
.error(function(data, status, headers, config) {
console.error(data);
});
});
}]); |
package fr.syncrase.ecosyst.web.rest;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import fr.syncrase.ecosyst.IntegrationTest;
import fr.syncrase.ecosyst.domain.Ensoleillement;
import fr.syncrase.ecosyst.domain.Plante;
import fr.syncrase.ecosyst.repository.EnsoleillementRepository;
import fr.syncrase.ecosyst.service.criteria.EnsoleillementCriteria;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
import javax.persistence.EntityManager;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.http.MediaType;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.transaction.annotation.Transactional;
/**
* Integration tests for the {@link EnsoleillementResource} REST controller.
*/
@IntegrationTest
@AutoConfigureMockMvc
@WithMockUser
class EnsoleillementResourceIT {
private static final String DEFAULT_ORIENTATION = "AAAAAAAAAA";
private static final String UPDATED_ORIENTATION = "BBBBBBBBBB";
private static final Double DEFAULT_ENSOLEILEMENT = 1D;
private static final Double UPDATED_ENSOLEILEMENT = 2D;
private static final Double SMALLER_ENSOLEILEMENT = 1D - 1D;
private static final String ENTITY_API_URL = "/api/ensoleillements";
private static final String ENTITY_API_URL_ID = ENTITY_API_URL + "/{id}";
private static Random random = new Random();
private static AtomicLong count = new AtomicLong(random.nextInt() + (2 * Integer.MAX_VALUE));
@Autowired
private EnsoleillementRepository ensoleillementRepository;
@Autowired
private EntityManager em;
@Autowired
private MockMvc restEnsoleillementMockMvc;
private Ensoleillement ensoleillement;
/**
* Create an entity for this test.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which requires the current entity.
*/
public static Ensoleillement createEntity(EntityManager em) {
Ensoleillement ensoleillement = new Ensoleillement().orientation(DEFAULT_ORIENTATION).ensoleilement(DEFAULT_ENSOLEILEMENT);
return ensoleillement;
}
/**
* Create an updated entity for this test.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which requires the current entity.
*/
public static Ensoleillement createUpdatedEntity(EntityManager em) {
Ensoleillement ensoleillement = new Ensoleillement().orientation(UPDATED_ORIENTATION).ensoleilement(UPDATED_ENSOLEILEMENT);
return ensoleillement;
}
@BeforeEach
public void initTest() {
ensoleillement = createEntity(em);
}
@Test
@Transactional
void createEnsoleillement() throws Exception {
int databaseSizeBeforeCreate = ensoleillementRepository.findAll().size();
// Create the Ensoleillement
restEnsoleillementMockMvc
.perform(
post(ENTITY_API_URL).contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isCreated());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeCreate + 1);
Ensoleillement testEnsoleillement = ensoleillementList.get(ensoleillementList.size() - 1);
assertThat(testEnsoleillement.getOrientation()).isEqualTo(DEFAULT_ORIENTATION);
assertThat(testEnsoleillement.getEnsoleilement()).isEqualTo(DEFAULT_ENSOLEILEMENT);
}
@Test
@Transactional
void createEnsoleillementWithExistingId() throws Exception {
// Create the Ensoleillement with an existing ID
ensoleillement.setId(1L);
int databaseSizeBeforeCreate = ensoleillementRepository.findAll().size();
// An entity with an existing ID cannot be created, so this API call must fail
restEnsoleillementMockMvc
.perform(
post(ENTITY_API_URL).contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isBadRequest());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeCreate);
}
@Test
@Transactional
void getAllEnsoleillements() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList
restEnsoleillementMockMvc
.perform(get(ENTITY_API_URL + "?sort=id,desc"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.[*].id").value(hasItem(ensoleillement.getId().intValue())))
.andExpect(jsonPath("$.[*].orientation").value(hasItem(DEFAULT_ORIENTATION)))
.andExpect(jsonPath("$.[*].ensoleilement").value(hasItem(DEFAULT_ENSOLEILEMENT.doubleValue())));
}
@Test
@Transactional
void getEnsoleillement() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get the ensoleillement
restEnsoleillementMockMvc
.perform(get(ENTITY_API_URL_ID, ensoleillement.getId()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.id").value(ensoleillement.getId().intValue()))
.andExpect(jsonPath("$.orientation").value(DEFAULT_ORIENTATION))
.andExpect(jsonPath("$.ensoleilement").value(DEFAULT_ENSOLEILEMENT.doubleValue()));
}
@Test
@Transactional
void getEnsoleillementsByIdFiltering() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
Long id = ensoleillement.getId();
defaultEnsoleillementShouldBeFound("id.equals=" + id);
defaultEnsoleillementShouldNotBeFound("id.notEquals=" + id);
defaultEnsoleillementShouldBeFound("id.greaterThanOrEqual=" + id);
defaultEnsoleillementShouldNotBeFound("id.greaterThan=" + id);
defaultEnsoleillementShouldBeFound("id.lessThanOrEqual=" + id);
defaultEnsoleillementShouldNotBeFound("id.lessThan=" + id);
}
@Test
@Transactional
void getAllEnsoleillementsByOrientationIsEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where orientation equals to DEFAULT_ORIENTATION
defaultEnsoleillementShouldBeFound("orientation.equals=" + DEFAULT_ORIENTATION);
// Get all the ensoleillementList where orientation equals to UPDATED_ORIENTATION
defaultEnsoleillementShouldNotBeFound("orientation.equals=" + UPDATED_ORIENTATION);
}
@Test
@Transactional
void getAllEnsoleillementsByOrientationIsNotEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where orientation not equals to DEFAULT_ORIENTATION
defaultEnsoleillementShouldNotBeFound("orientation.notEquals=" + DEFAULT_ORIENTATION);
// Get all the ensoleillementList where orientation not equals to UPDATED_ORIENTATION
defaultEnsoleillementShouldBeFound("orientation.notEquals=" + UPDATED_ORIENTATION);
}
@Test
@Transactional
void getAllEnsoleillementsByOrientationIsInShouldWork() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where orientation in DEFAULT_ORIENTATION or UPDATED_ORIENTATION
defaultEnsoleillementShouldBeFound("orientation.in=" + DEFAULT_ORIENTATION + "," + UPDATED_ORIENTATION);
// Get all the ensoleillementList where orientation equals to UPDATED_ORIENTATION
defaultEnsoleillementShouldNotBeFound("orientation.in=" + UPDATED_ORIENTATION);
}
@Test
@Transactional
void getAllEnsoleillementsByOrientationIsNullOrNotNull() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where orientation is not null
defaultEnsoleillementShouldBeFound("orientation.specified=true");
// Get all the ensoleillementList where orientation is null
defaultEnsoleillementShouldNotBeFound("orientation.specified=false");
}
@Test
@Transactional
void getAllEnsoleillementsByOrientationContainsSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where orientation contains DEFAULT_ORIENTATION
defaultEnsoleillementShouldBeFound("orientation.contains=" + DEFAULT_ORIENTATION);
// Get all the ensoleillementList where orientation contains UPDATED_ORIENTATION
defaultEnsoleillementShouldNotBeFound("orientation.contains=" + UPDATED_ORIENTATION);
}
@Test
@Transactional
void getAllEnsoleillementsByOrientationNotContainsSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where orientation does not contain DEFAULT_ORIENTATION
defaultEnsoleillementShouldNotBeFound("orientation.doesNotContain=" + DEFAULT_ORIENTATION);
// Get all the ensoleillementList where orientation does not contain UPDATED_ORIENTATION
defaultEnsoleillementShouldBeFound("orientation.doesNotContain=" + UPDATED_ORIENTATION);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement equals to DEFAULT_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.equals=" + DEFAULT_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement equals to UPDATED_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.equals=" + UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsNotEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement not equals to DEFAULT_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.notEquals=" + DEFAULT_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement not equals to UPDATED_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.notEquals=" + UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsInShouldWork() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement in DEFAULT_ENSOLEILEMENT or UPDATED_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.in=" + DEFAULT_ENSOLEILEMENT + "," + UPDATED_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement equals to UPDATED_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.in=" + UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsNullOrNotNull() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement is not null
defaultEnsoleillementShouldBeFound("ensoleilement.specified=true");
// Get all the ensoleillementList where ensoleilement is null
defaultEnsoleillementShouldNotBeFound("ensoleilement.specified=false");
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsGreaterThanOrEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement is greater than or equal to DEFAULT_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.greaterThanOrEqual=" + DEFAULT_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement is greater than or equal to UPDATED_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.greaterThanOrEqual=" + UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsLessThanOrEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement is less than or equal to DEFAULT_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.lessThanOrEqual=" + DEFAULT_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement is less than or equal to SMALLER_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.lessThanOrEqual=" + SMALLER_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsLessThanSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement is less than DEFAULT_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.lessThan=" + DEFAULT_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement is less than UPDATED_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.lessThan=" + UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByEnsoleilementIsGreaterThanSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
// Get all the ensoleillementList where ensoleilement is greater than DEFAULT_ENSOLEILEMENT
defaultEnsoleillementShouldNotBeFound("ensoleilement.greaterThan=" + DEFAULT_ENSOLEILEMENT);
// Get all the ensoleillementList where ensoleilement is greater than SMALLER_ENSOLEILEMENT
defaultEnsoleillementShouldBeFound("ensoleilement.greaterThan=" + SMALLER_ENSOLEILEMENT);
}
@Test
@Transactional
void getAllEnsoleillementsByPlanteIsEqualToSomething() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
Plante plante;
if (TestUtil.findAll(em, Plante.class).isEmpty()) {
plante = PlanteResourceIT.createEntity(em);
em.persist(plante);
em.flush();
} else {
plante = TestUtil.findAll(em, Plante.class).get(0);
}
em.persist(plante);
em.flush();
ensoleillement.setPlante(plante);
ensoleillementRepository.saveAndFlush(ensoleillement);
Long planteId = plante.getId();
// Get all the ensoleillementList where plante equals to planteId
defaultEnsoleillementShouldBeFound("planteId.equals=" + planteId);
// Get all the ensoleillementList where plante equals to (planteId + 1)
defaultEnsoleillementShouldNotBeFound("planteId.equals=" + (planteId + 1));
}
/**
* Executes the search, and checks that the default entity is returned.
*/
private void defaultEnsoleillementShouldBeFound(String filter) throws Exception {
restEnsoleillementMockMvc
.perform(get(ENTITY_API_URL + "?sort=id,desc&" + filter))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$.[*].id").value(hasItem(ensoleillement.getId().intValue())))
.andExpect(jsonPath("$.[*].orientation").value(hasItem(DEFAULT_ORIENTATION)))
.andExpect(jsonPath("$.[*].ensoleilement").value(hasItem(DEFAULT_ENSOLEILEMENT.doubleValue())));
// Check, that the count call also returns 1
restEnsoleillementMockMvc
.perform(get(ENTITY_API_URL + "/count?sort=id,desc&" + filter))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(content().string("1"));
}
/**
* Executes the search, and checks that the default entity is not returned.
*/
private void defaultEnsoleillementShouldNotBeFound(String filter) throws Exception {
restEnsoleillementMockMvc
.perform(get(ENTITY_API_URL + "?sort=id,desc&" + filter))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(jsonPath("$").isArray())
.andExpect(jsonPath("$").isEmpty());
// Check, that the count call also returns 0
restEnsoleillementMockMvc
.perform(get(ENTITY_API_URL + "/count?sort=id,desc&" + filter))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(content().string("0"));
}
@Test
@Transactional
void getNonExistingEnsoleillement() throws Exception {
// Get the ensoleillement
restEnsoleillementMockMvc.perform(get(ENTITY_API_URL_ID, Long.MAX_VALUE)).andExpect(status().isNotFound());
}
@Test
@Transactional
void putNewEnsoleillement() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
// Update the ensoleillement
Ensoleillement updatedEnsoleillement = ensoleillementRepository.findById(ensoleillement.getId()).get();
// Disconnect from session so that the updates on updatedEnsoleillement are not directly saved in db
em.detach(updatedEnsoleillement);
updatedEnsoleillement.orientation(UPDATED_ORIENTATION).ensoleilement(UPDATED_ENSOLEILEMENT);
restEnsoleillementMockMvc
.perform(
put(ENTITY_API_URL_ID, updatedEnsoleillement.getId())
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(updatedEnsoleillement))
)
.andExpect(status().isOk());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
Ensoleillement testEnsoleillement = ensoleillementList.get(ensoleillementList.size() - 1);
assertThat(testEnsoleillement.getOrientation()).isEqualTo(UPDATED_ORIENTATION);
assertThat(testEnsoleillement.getEnsoleilement()).isEqualTo(UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void putNonExistingEnsoleillement() throws Exception {
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
ensoleillement.setId(count.incrementAndGet());
// If the entity doesn't have an ID, it will throw BadRequestAlertException
restEnsoleillementMockMvc
.perform(
put(ENTITY_API_URL_ID, ensoleillement.getId())
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isBadRequest());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
}
@Test
@Transactional
void putWithIdMismatchEnsoleillement() throws Exception {
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
ensoleillement.setId(count.incrementAndGet());
// If url ID doesn't match entity ID, it will throw BadRequestAlertException
restEnsoleillementMockMvc
.perform(
put(ENTITY_API_URL_ID, count.incrementAndGet())
.contentType(MediaType.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isBadRequest());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
}
@Test
@Transactional
void putWithMissingIdPathParamEnsoleillement() throws Exception {
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
ensoleillement.setId(count.incrementAndGet());
// If url ID doesn't match entity ID, it will throw BadRequestAlertException
restEnsoleillementMockMvc
.perform(put(ENTITY_API_URL).contentType(MediaType.APPLICATION_JSON).content(TestUtil.convertObjectToJsonBytes(ensoleillement)))
.andExpect(status().isMethodNotAllowed());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
}
@Test
@Transactional
void partialUpdateEnsoleillementWithPatch() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
// Update the ensoleillement using partial update
Ensoleillement partialUpdatedEnsoleillement = new Ensoleillement();
partialUpdatedEnsoleillement.setId(ensoleillement.getId());
partialUpdatedEnsoleillement.orientation(UPDATED_ORIENTATION).ensoleilement(UPDATED_ENSOLEILEMENT);
restEnsoleillementMockMvc
.perform(
patch(ENTITY_API_URL_ID, partialUpdatedEnsoleillement.getId())
.contentType("application/merge-patch+json")
.content(TestUtil.convertObjectToJsonBytes(partialUpdatedEnsoleillement))
)
.andExpect(status().isOk());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
Ensoleillement testEnsoleillement = ensoleillementList.get(ensoleillementList.size() - 1);
assertThat(testEnsoleillement.getOrientation()).isEqualTo(UPDATED_ORIENTATION);
assertThat(testEnsoleillement.getEnsoleilement()).isEqualTo(UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void fullUpdateEnsoleillementWithPatch() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
// Update the ensoleillement using partial update
Ensoleillement partialUpdatedEnsoleillement = new Ensoleillement();
partialUpdatedEnsoleillement.setId(ensoleillement.getId());
partialUpdatedEnsoleillement.orientation(UPDATED_ORIENTATION).ensoleilement(UPDATED_ENSOLEILEMENT);
restEnsoleillementMockMvc
.perform(
patch(ENTITY_API_URL_ID, partialUpdatedEnsoleillement.getId())
.contentType("application/merge-patch+json")
.content(TestUtil.convertObjectToJsonBytes(partialUpdatedEnsoleillement))
)
.andExpect(status().isOk());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
Ensoleillement testEnsoleillement = ensoleillementList.get(ensoleillementList.size() - 1);
assertThat(testEnsoleillement.getOrientation()).isEqualTo(UPDATED_ORIENTATION);
assertThat(testEnsoleillement.getEnsoleilement()).isEqualTo(UPDATED_ENSOLEILEMENT);
}
@Test
@Transactional
void patchNonExistingEnsoleillement() throws Exception {
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
ensoleillement.setId(count.incrementAndGet());
// If the entity doesn't have an ID, it will throw BadRequestAlertException
restEnsoleillementMockMvc
.perform(
patch(ENTITY_API_URL_ID, ensoleillement.getId())
.contentType("application/merge-patch+json")
.content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isBadRequest());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
}
@Test
@Transactional
void patchWithIdMismatchEnsoleillement() throws Exception {
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
ensoleillement.setId(count.incrementAndGet());
// If url ID doesn't match entity ID, it will throw BadRequestAlertException
restEnsoleillementMockMvc
.perform(
patch(ENTITY_API_URL_ID, count.incrementAndGet())
.contentType("application/merge-patch+json")
.content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isBadRequest());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
}
@Test
@Transactional
void patchWithMissingIdPathParamEnsoleillement() throws Exception {
int databaseSizeBeforeUpdate = ensoleillementRepository.findAll().size();
ensoleillement.setId(count.incrementAndGet());
// If url ID doesn't match entity ID, it will throw BadRequestAlertException
restEnsoleillementMockMvc
.perform(
patch(ENTITY_API_URL).contentType("application/merge-patch+json").content(TestUtil.convertObjectToJsonBytes(ensoleillement))
)
.andExpect(status().isMethodNotAllowed());
// Validate the Ensoleillement in the database
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeUpdate);
}
@Test
@Transactional
void deleteEnsoleillement() throws Exception {
// Initialize the database
ensoleillementRepository.saveAndFlush(ensoleillement);
int databaseSizeBeforeDelete = ensoleillementRepository.findAll().size();
// Delete the ensoleillement
restEnsoleillementMockMvc
.perform(delete(ENTITY_API_URL_ID, ensoleillement.getId()).accept(MediaType.APPLICATION_JSON))
.andExpect(status().isNoContent());
// Validate the database contains one less item
List<Ensoleillement> ensoleillementList = ensoleillementRepository.findAll();
assertThat(ensoleillementList).hasSize(databaseSizeBeforeDelete - 1);
}
}
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
import math
from torch.utils.data import Dataset
def split_list(list_to_split, target_n, result):
if (len(list_to_split) / target_n) % 1 == 0:
n = int(len(list_to_split) / target_n)
# print(n)
i = 0
while i < len(list_to_split):
result.append(list_to_split[i:i + n])
i += n
# print(i)
else:
n = int(math.floor(len(list_to_split) / target_n))
result.append(list_to_split[0:n])
del list_to_split[0:n]
split_list(list_to_split, target_n - 1, result)
class ThisRankDataset(Dataset):
"""产生数据倾斜,每一个learner中含有固定的label的数据集 - Data skew, each learner owns a certain subset according with fixed labels"""
def __init__(self, all_data, labels, transform=None):
img_list = []
for idx, (img, label) in enumerate(all_data):
if label in labels:
img_list.append((img, int(label)))
self.img_list = img_list
self.transform = transform
def __getitem__(self, index):
img, label = self.img_list[index]
if self.transform is not None:
img = self.transform(img)
return img, label
def __len__(self):
return len(self.img_list)
|
<reponame>naq219/Telpoo-framework
package com.telpoo.example.activity;
import java.util.ArrayList;
import java.util.List;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import com.example.testframework.R;
import com.telpoo.example.task.TaskType;
import com.telpoo.example.task.TaskUser1;
import com.telpoo.frame.model.TaskParams;
import com.telpoo.frame.ui.BaseActivity;
public class UseTask extends BaseActivity implements OnClickListener, TaskType {
AsyncTask<TaskParams, Void, Boolean> taskUser1;
Button button1, button2, button3, button4, button5, button6, button7, button8;
@Override
protected void onCreate(Bundle arg0) {
super.onCreate(arg0);
setContentView(R.layout.only_button);
button1 = (Button) findViewById(R.id.button1);
button2 = (Button) findViewById(R.id.button2);
button3 = (Button) findViewById(R.id.button3);
button4 = (Button) findViewById(R.id.button4);
button5 = (Button) findViewById(R.id.button5);
button1.setOnClickListener(this);
button2.setOnClickListener(this);
button3.setOnClickListener(this);
button4.setOnClickListener(this);
button5.setOnClickListener(this);
button1.setText("get text from a website");
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
String[] url = { "http://vnexpress.net/rss/tin-moi-nhat.rss" };
TaskParams params = new TaskParams(url);
taskUser1 = new TaskUser1(model, TaskType.TASK_GET_DATA, null, getBaseContext());
showProgressDialog(UseTask.this);
model.exeTask(params, taskUser1);
/*
* an asyntask is starting
* when finish, data will return at function onsuccess() or onfail()
*/
break;
case R.id.button2:
break;
case R.id.button3:
break;
case R.id.button4:
break;
case R.id.button5:
break;
default:
break;
}
}
@SuppressWarnings("unchecked")
@Override
public void onSuccess(int taskType, ArrayList<?> list, String msg) {
/*
* if you return TASK_DONE at TaskUser1, data will return here
*/
switch (taskType) {
case TASK_GET_DATA:
List<String> content = (List<String>) list;
String value = content.get(0);
closeProgressDialog();
showToast(value);
break;
default:
break;
}
super.onSuccess(taskType, list, msg);
}
@Override
public void onFail(int taskType, String msg) {
/*
* if you return TASK_FAIL at TaskUser1, data will return here
*/
switch (taskType) {
case TASK_GET_DATA:
closeProgressDialog();
showToast(msg);
break;
default:
break;
}
super.onFail(taskType, msg);
}
}
|
// *******************************************************************************
// © The Pythian Group Inc., 2017
// All Rights Reserved.
// *******************************************************************************
import {defineProcessEnv} from "./helpers/bootstrap";
defineProcessEnv();
// base package
export {SkeletosCursor} from "./base/SkeletosCursor";
export {SkeletosDb, ISkeletosDbListener, SkeletosDbSetterOptions, TreeNodeValueType} from "./base/SkeletosDb";
export {SkeletosTransaction} from "./base/SkeletosTransaction";
// decorators package
export {ClassTypeInfo} from "./decorators/helpers/ClassTypeInfo";
export {MetadataRegistry} from "./decorators/helpers/MetadataRegistry";
export {PropTypeInfo, EPropType} from "./decorators/helpers/PropTypeInfo";
export {Cursor} from "./decorators/Cursor";
export {Dictionary} from "./decorators/Dictionary";
export {DictionaryRef} from "./decorators/DictionaryRef";
export {Id, IS_ID_PROP_KEY, ID_PROP_KEY} from "./decorators/Id";
export {List} from "./decorators/List";
export {ListRef} from "./decorators/ListRef";
export {Primitive} from "./decorators/Primitive";
export {PrimitiveRef} from "./decorators/PrimitiveRef";
export {State} from "./decorators/State";
export {StateClass, IStateClassMetaDataOptions, STATE_META_DATA_KEY} from "./decorators/StateClass";
export {StateRef} from "./decorators/StateRef";
// extendible package
export {AbstractAction} from "./extendible/AbstractAction";
export {AbstractProgressAction} from "./extendible/AbstractProgressAction";
export {AbstractSkeletosAction} from "./extendible/AbstractSkeletosAction";
export {SimpleSkeletosAction} from "./extendible/SimpleSkeletosAction";
export {AbstractSkeletosPromiseAction} from "./extendible/AbstractSkeletosPromiseAction";
export {SimpleSkeletosPromiseAction} from "./extendible/SimpleSkeletosPromiseAction";
export {AbstractPromiseAction} from "./extendible/AbstractPromiseAction";
export {AbstractSkeletosState} from "./extendible/AbstractSkeletosState";
export {ISkeletosCommand} from "./extendible/ISkeletosCommand";
// reusable package
export {ResetLoadingStatesAction, ILoadingStateToReset} from "./reusable/actions/ResetLoadingStatesAction";
export {ISimpleDictionaryReferenceRemoverActionArg, SimpleDictionaryReferenceRemoverAction} from "./reusable/actions/SimpleDictionaryReferenceRemoverAction";
export {ISimpleDictionaryReferenceSetterActionArg, SimpleDictionaryReferenceSetterAction} from "./reusable/actions/SimpleDictionaryReferenceSetterAction";
export {SimpleStateSetterAction, ISimpleStateSetterActionArg} from "./reusable/actions/SimpleStateSetterAction";
export {BooleanState} from "./reusable/states/BooleanState";
export {DateState} from "./reusable/states/DateState";
export {ErrorState} from "./reusable/states/ErrorState";
export {LoadingAndErrorState} from "./reusable/states/LoadingAndErrorState";
export {LoadingState} from "./reusable/states/LoadingState";
export {NoopAction} from "./reusable/states/NoopAction";
export {NumberState} from "./reusable/states/NumberState";
export {SkeletosDictionary} from "./reusable/states/SkeletosDictionary";
export {SkeletosList} from "./reusable/states/SkeletosList";
export {StringState} from "./reusable/states/StringState";
// helpers package
export {generateUniqueId} from "./helpers/generateUniqueId";
export {ProcessEnvUtils} from "./helpers/ProcessEnvUtils";
export {ObjectType} from "./helpers/ObjectType";
export {UrlUtils} from "./helpers/UrlUtils";
export * from "./helpers/logging/ConsoleLogger";
export * from "./helpers/logging/DefaultLogger";
export {ELogLevel} from "./helpers/logging/ELogLevel";
export {ErrorWithLevel} from "./helpers/logging/ErrorWithLevel";
export {ILogger} from "./helpers/logging/ILogger";
export {ErrorUtil} from "./helpers/logging/ErrorUtil";
// only to be used by frameworks building on top
export {ITreeNode} from "./base/SkeletosDb"; |
<reponame>SenthilKumarGS/TizenRT
/*
* //******************************************************************
* //
* // Copyright 2016 Samsung Electronics All Rights Reserved.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
* //
* // Licensed under the Apache License, Version 2.0 (the "License");
* // you may not use this file except in compliance with the License.
* // You may obtain a copy of the License at
* //
* // http://www.apache.org/licenses/LICENSE-2.0
* //
* // Unless required by applicable law or agreed to in writing, software
* // distributed under the License is distributed on an "AS IS" BASIS,
* // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* // See the License for the specific language governing permissions and
* // limitations under the License.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
*/
package org.iotivity.cloud.base.protocols.coap;
import org.iotivity.cloud.base.protocols.enums.RequestMethod;
import org.iotivity.cloud.base.protocols.enums.ResponseStatus;
public class CoapRequest extends CoapMessage {
private RequestMethod mRequestMethod;
public CoapRequest(RequestMethod requestMethod) {
mRequestMethod = requestMethod;
}
public CoapRequest(int code) {
switch (code) {
case 1:
mRequestMethod = RequestMethod.GET;
break;
case 2:
mRequestMethod = RequestMethod.POST;
break;
case 3:
mRequestMethod = RequestMethod.PUT;
break;
case 4:
mRequestMethod = RequestMethod.DELETE;
break;
default:
throw new IllegalArgumentException("Invalid CoapRequest code");
}
}
@Override
public int getCode() {
switch (mRequestMethod) {
case GET:
return 1;
case POST:
return 2;
case PUT:
return 3;
case DELETE:
return 4;
default:
break;
}
return 0;
}
@Override
public RequestMethod getMethod() {
return mRequestMethod;
}
// This request object does not support response status
@Override
public ResponseStatus getStatus() {
return ResponseStatus.METHOD_NOT_ALLOWED;
}
} |
#export BA="org/armedbear/lisp|org/armedbear/lisp"
export BA="org/armedbear/lisp|org/armedbear/lisp"
find src/ -name "*.java" -printf "%p\n" -exec sed -e "s|$BA|g" -i {} \;
find src/ -name "*.lisp" -printf "%p\n" -exec sed -e "s|$BA|g" -i {} \;
#find ! -name "*.*" -type f -printf "%p\n" -exec sed -e "s|$BA|g" -i {} \;
sed -e "s|$BA|g" -i -i *.*
#export BA="org\\.armedbear\\.lisp|org.armedbear.lisp"
export BA="com\\.cyc\\.tool\\.subl\\.jrtl\\.nativeCode\\.commonLisp|org.armedbear.lisp"
find src/ -name "*.java" -printf "%p\n" -exec sed -e "s|$BA|g" -i {} \;
find src/ -name "*.lisp" -printf "%p\n" -exec sed -e "s|$BA|g" -i {} \;
#find ! -name "*.*" -type f -printf "%p\n" -exec sed -e "s|$BA|g" -i {} \;
sed -e "s|$BA|g" -i -i *.*
(jstatic (jmethod "com.cyc.tool.subl.jrtl.nativeCode.subLisp.SubLMain" "mainFromProlog") )
|
<filename>dist/lib/fieldsMap.js
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.FieldsMap = void 0;
const w3cdate_1 = require("./w3cdate");
class FieldsMap extends Map {
/**
* Returns Map as array of objects with key moved inside like a key property
*/
toJSON() {
if (!this.size)
return undefined;
return [...this].map(([key, data]) => {
// Remap Date objects to string
if (data.value instanceof Date)
data.value = w3cdate_1.getW3CDateString(data.value);
return { key, ...data };
});
}
/**
* Adds a field to the end of the list
*
* @param {Field} field - Field key or object with all fields
* @returns {FieldsMap}
* @memberof FieldsMap
*/
add(field) {
const { key, ...data } = field;
if (typeof key !== 'string')
throw new TypeError(`To add a field you must provide string key value, received ${typeof key}`);
if (!('value' in data))
throw new TypeError(`To add a field you must provide a value field, received: ${JSON.stringify(data)}`);
if ('dateStyle' in data) {
const date = data.value instanceof Date ? data.value : new Date(data.value);
if (!Number.isFinite(date.getTime()))
throw new TypeError(`When dateStyle specified the value must be a valid Date instance or string, received ${data.value}`);
this.set(key, { ...data, value: date });
}
else
this.set(key, data);
return this;
}
/**
* Sets value field for a given key, without changing the rest of field properties
*
* @param {string} key
* @param {string} value
* @memberof FieldsMap
*/
setValue(key, value) {
if (typeof key !== 'string')
throw new TypeError(`key for setValue must be a string, received ${typeof key}`);
if (typeof value !== 'string')
throw new TypeError(`value for setValue must be a string, received ${typeof value}`);
const field = this.get(key) || { value };
field.value = value;
this.set(key, field);
return this;
}
/**
* Set a field as Date value with appropriated options
*
* @param {string} key
* @param {string} label
* @param {Date} date
* @param {{dateStyle?: string, ignoresTimeZone?: boolean, isRelative?: boolean, timeStyle?:string, changeMessage?: string}} [formatOptions]
* @returns {FieldsMap}
* @throws if date is not a Date or invalid Date
* @memberof FieldsMap
*/
setDateTime(key, label, date, { dateStyle, ignoresTimeZone, isRelative, timeStyle, changeMessage, } = {}) {
if (typeof key !== 'string')
throw new TypeError(`Key must be a string, received ${typeof key}`);
if (typeof label !== 'string')
throw new TypeError(`Label must be a string, received ${typeof label}`);
if (!(date instanceof Date))
throw new TypeError('Third parameter of setDateTime must be an instance of Date');
// Either specify both a date style and a time style, or neither.
if (!!dateStyle !== !!timeStyle)
throw new ReferenceError('Either specify both a date style and a time style, or neither');
// adding
this.set(key, {
label,
value: date,
changeMessage,
dateStyle,
ignoresTimeZone,
isRelative,
timeStyle,
});
return this;
}
}
exports.FieldsMap = FieldsMap;
//# sourceMappingURL=fieldsMap.js.map |
class HelloWorld:
def __init__(self):
print(“Hello World”) |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SS/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SS/1024+0+512-STG-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_trigrams_globally_first_two_thirds_full --eval_function last_element_eval |
$(document).ready(function() {
//Array type/class of filters
var filters = [
"saturate",
"saturotate",
"rotamatrix",
"tablen",
"dishue",
"matrix",
"matrix-dos",
"huerotate",
"luminance",
"discrete",
"discrete-dos",
"table",
"table-dos",
"identity",
"linear",
"gamma",
"turbulence",
"sepia",
"turbu-map",
"fematrix-uno",
"fematrix-dos",
"map-uno",
"map-dos",
"blur-uno",
"blur-dos",
"morpho-uno",
"morpho-dos",
"multitable",
"begamma",
"bulence"
];
//Variables
var btnHeight = $('.section-buttons').outerHeight() + 15;
var picture = $('picture').find("img");
var pictureHeight = $(picture).outerHeight();
var btnFilters = $('.section-buttons').find("button");
var btnClose = $('#panel-btn-close');
var panel = $(".panel");
var panelInfo = $(".panel-info");
var panelInput = $(".panel-input");
var btnSaturate = $(".btn-saturate");
var btnSaturotate = $(".btn-saturotate");
var btnRotamatrix = $(".btn-rotamatrix");
var btnTablen = $(".btn-tablen");
var btnMultitable = $(".btn-multitable");
var btnDishue = $(".btn-dishue");
var btnMatrix = $(".btn-matrix");
var btnMatrix2 = $(".btn-matrix-dos");
var btnHueRotate = $(".btn-hueRotate");
var btnLuminance = $(".btn-luminance");
var btnDiscrete = $(".btn-discrete");
var btnDiscrete2 = $(".btn-discrete-dos");
var btnTable = $(".btn-table");
var btnTable2 = $(".btn-table-dos");
var btnIdentity = $(".btn-identity");
var btnLinear = $(".btn-linear");
var btnGamma = $(".btn-gamma");
var btnTurbelence = $(".btn-turbulence");
var btnSepia = $(".btn-sepia");
var btnTurbuMap = $(".btn-turbu-map");
var btnConMatrix = $(".btn-con-matrix-uno");
var btnConMatrix2 = $(".btn-con-matrix-dos");
var btnMap = $(".btn-map-uno");
var btnMap2 = $(".btn-map-dos");
var btnBlur = $(".btn-blur-uno");
var btnBlur2 = $(".btn-blur-dos");
var btnMorpho = $(".btn-morpho-uno");
var btnMorpho2 = $(".btn-morpho-dos");
var btnPoison = $(".btn-poison");
var btnBegamma = $(".btn-begamma");
var btnBulence = $(".btn-bulence");
var panelSaturate = $("#panel-saturate");
var panelHuerotate = $("#panel-huerotate");
var panelLuminance = $("#panel-luminance");
var panelLinear = $("#panel-linear");
var panelTurbulence = $("#panel-turbulence");
var panelGamma = $("#panel-gamma");
var panelBlur = $("#panel-blur");
var panelMorpho = $("#panel-morpho");
var panelMorphoDilate = $("#panel-morpho-dilate");
var panelSaturotate = $("#panel-saturotate");
var panelDiscrete = $("#panel-discrete");
var panelDiscrete2 = $("#panel-discrete-dos");
var panelMatrix = $("#panel-matrix");
var panelMatrix2 = $("#panel-matrix-dos");
var panelTable = $("#panel-table");
var panelTable2 = $("#panel-table-dos");
var panelIdentity = $("#panel-identity");
var panelTurbuMap = $("#panel-turbumap");
var panelSepia = $("#panel-sepia");
var panelFeMatrix = $("#panel-fematrix");
var panelFeMatrix2 = $("#panel-fematrix-dos");
var panelBlur2 = $("#panel-blur-dos");
var panelRotamatrix = $("#panel-rotamatrix");
var panelMultitable = $("#panel-multitable");
var panelTablen = $("#panel-tablen");
var panelDishue = $("#panel-dishue");
//Functions
function addFilter(button, filter) {
$(button).click(function() {
$("img").removeClass().addClass(filter);
});
}
function poison() {
$(btnPoison).click(function() {
var bodys = document.getElementsByTagName("body");
var classRandom = filters[~~(Math.random() * filters.length)];
$(bodys).each(function() {
$(bodys).addClass(classRandom);
setTimeout(function() {
$(bodys).removeClass(classRandom);
}, 3500);
});
});
}
function closePanel() {
$(panel)
.animate({
left: '-100%'
}, 400);
$("img").removeClass();
}
function openPanel() {
$(panel)
.animate({
left: '0'
}, 400)
.addClass("panel-bottom")
.css("height", btnHeight);
}
function equalHeight() {
$("#holder").css("height", pictureHeight);
}
function showPanelFilter(btn, panel) {
$(btn).click(function() {
$(panel).removeClass("hide");
});
}
//Print values defaultStatus in every filter
var saturateCode = document.getElementById("panel-code-saturate");
var huerotateCode = document.getElementById("panel-code-huerotate");
//Saturate
$(saturateCode).text(
'<filter id="saturate-customize">\n' +
' <feColorMatrix type="saturate" values="14"/>\n' +
'</filter>');
//Saturotate
$(huerotateCode).text(
'<filter id="huerotate-customize">\n' +
' <feColorMatrix type="hueRotate" values="90"/>\n' +
'</filter>');
//Gamma
var gammaPanel = document.getElementById("panel-code-gamma");
$(gammaPanel).text(
'<filter id="gamma-customize">\n' +
'<feComponentTransfer>\n' +
'<feFuncR type="gamma" amplitude="1" exponent="1"/>\n' +
'<feFuncG type="gamma" amplitude="0.4" exponent="0.25"/>\n' +
'<feFuncB type="gamma" amplitude="1" exponent="1"/>\n' +
'</feComponentTransfer>\n' +
'</filter>');
//Blur
var panelBLur = document.getElementById("panel-code-blur");
$(panelBLur).text(
'<filter id="blur-customize">\n' +
' <feGaussianBlur stdDeviation="3"/>\n' +
'</filter>');
//Morpho - Erode
var codeMorpho = document.getElementById("panel-code-morpho");
$(codeMorpho).text(
'<filter id="morpho-customize">\n' +
' <feGaussianBlur operator="erode" in="SourceGraphic" radius="3"/>\n' +
'</filter>');
//Morpho - Dilate
var codeMorphoDilate = document.getElementById("panel-code-morpho-dilate");
$(codeMorphoDilate).text(
'<filter id="morpho-customize-dilate">\n' +
' <feGaussianBlur operator="dilate" in="SourceGraphic" radius="6"/>\n' +
'</filter>');
//Saturotate
var saturotateCode = document.getElementById("panel-code-saturotate");
$(saturotateCode).text(
'<filter id="saturotate-customize" filterUnits="objectBoundingBox">\n' +
' <feColorMatrix type="saturate" result="saturado" values="20"/>\n' +
' <feColorMatrix type="hueRotate" in="saturado" in2="SourceGraphic" values="291"/>\n' +
'</filter>');
var sliderTurbuBasePanel = document.getElementById("panel-code-turbulence");
$(sliderTurbuBasePanel).text(
'<filter id="turbulence-customize">\n' +
' <feTurbulence type="turbulence" result="fuzz" baseFrequency="0.7" numOctaves="2" stitchTiles="stitch"/>\n' +
' <feComposite in="SourceGraphic" in2="fuzz" operator="arithmetic" k1="0" k2="1" k3="-3" k4="0.01"/>\n' +
'</filter>');
//Linear
var sliderFeLineRsPanel = document.getElementById("panel-code-linear");
$(sliderFeLineRsPanel).text(
'<filter id="linear-customize">\n' +
'<feComponentTransfer>\n' +
'<feFuncR type="linear" slope="0.5" intercept="0.25"/>\n' +
'<feFuncG type="linear" slope="0.5" intercept="0"/>\n' +
'<feFuncB type="linear" slope="0.5" intercept="0.5"/>\n' +
'</feComponentTransfer>\n' +
'</filter>');
var matrixCodePanel = document.getElementById("panel-code-matrix");
$(matrixCodePanel).text(
'<filter id="matrix">\n' +
'<feColorMatrix type="matrix" in="SourceGraphic" values="-1 2 -3 0 -.5 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0"/>\n' +
'</filter>');
var matrixCodePanelDos = document.getElementById("panel-code-matrix-dos");
$(matrixCodePanelDos).text(
'<filter id="matrix">\n' +
'<feColorMatrix type="matrix" in="SourceGraphic" values="-1 2 -3 0 -.5 2 1 0 0 0 0 3 0 0 0 0 0 1 1 0"/>\n' +
'</filter>');
var luminancePanel = document.getElementById("panel-code-luminance");
$(luminancePanel).text(
'<filter id="luminanceToAlpha" filterUnits="objectBoundingBox">\n' +
'<feColorMatrix id="luminance-value" type="luminanceToAlpha" in="SourceGraphic"/>\n' +
'</filter>');
var discretePanel = document.getElementById("panel-code-discrete");
$(discretePanel).text(
'<filter id="discrete">\n' +
'<feFuncR type="discrete" tableValues="0 .5 1 1"/>\n' +
'<feFuncG type="discrete" tableValues="0 .5 1"\n' +
'<feFuncB type="discrete" tableValues="0 .5 "\n' +
'</filter>');
var discretePanel2 = document.getElementById("panel-code-discrete-dos");
$(discretePanel2).text(
'<filter id="discrete-dos">\n' +
'<feFuncR type="discrete" tableValues="0 .5 1 1"/>\n' +
'<feFuncG type="discrete" tableValues="0 .5 1"\n' +
'<feFuncB type="discrete" tableValues="0 .5 "\n' +
'</filter>');
var tablePanel = document.getElementById("panel-code-table");
$(tablePanel).text(
'<filter id="table">\n' +
'<feFuncR type="table" tableValues="1 0 1" exponent="5"/>\n' +
'<feFuncG type="table" tableValues="1 0 1" exponent="2"\n' +
'<feFuncB type="table" tableValues="1 0 1" exponent="5"\n' +
'</filter>');
var tablePanel2 = document.getElementById("panel-code-table-dos");
$(tablePanel2).text(
'<filter id="table">\n' +
'<feFuncR type="table" tableValues="1 0 1" exponent="5"/>\n' +
'<feFuncG type="table" tableValues="1 10 1" exponent="2"\n' +
'<feFuncB type="table" tableValues="1 5 1" exponent="5"\n' +
'</filter>');
var tableIdentity = document.getElementById("panel-code-identity");
$(tableIdentity).text(
'<filter id="table">\n' +
'<feFuncR type="identity"/>\n' +
'<feFuncG type="identity"\n' +
'<feFuncB type="identity"\n' +
'<feFuncA type="identity"\n' +
'</filter>');
var tableTurbuMap = document.getElementById("panel-code-turbumap");
$(tableTurbuMap).text(
'<filter id="turbuMap">\n' +
' <feTurbulence type="fractalNoise" baseFrequency="0.015" numOctaves="2" result="turbulence" data-filterId="3"/>\n' +
'<feDisplacementMap xChannelSelector="R" yChannelSelector="G" in="SourceGraphic" in2="turbulence" scale="40"/>\n' +
'</filter>');
var tableSepia = document.getElementById("panel-code-sepia");
$(tableSepia).text(
'<filter id="sepiatone">\n' +
'<feColorMatrix type="matrix" values=".343 .669 .119 0 0 .249 .626 .130 0 0 .172 .334 .111 0 0 .000 .000 .000 1 0"/>\n' +
'</filter>');
var tableFeMatrix = document.getElementById("panel-code-fematrix");
$(tableFeMatrix).text(
'<filter id="sepiatone">\n' +
'<feConvolveMatrix order="3" kernelMatrix="-1 -1 3 -2 1 -1 -1 1 -1"/>\n' +
'</filter>');
var tableFeMatrix2 = document.getElementById("panel-code-fematrix-dos");
$(tableFeMatrix2).text(
'<filter id="sepiatone">\n' +
'<feConvolveMatrix order="4" kernelMatrix="-2 2 1 -1 -1 3 2 1 -1 0 -1 -4 -1 1 0 0"/>\n' +
'</filter>');
var tableBlur2 = document.getElementById("panel-code-blur-dos");
$(tableBlur2).text(
'<filter id="sepiatone">\n' +
'<feGaussianBlur stdDeviation= "9"/>\n' +
'</filter>');
var tableRotamatrix = document.getElementById("panel-code-rotamatrix");
$(tableRotamatrix).text(
'<filter id="rotamatrix">\n' +
'<feColorMatrix type="hueRotate" result="romatrix" values="245"/>\n' +
'<feColorMatrix type="matrix" in="romatrix" in2="SourceGraphic" values="-1 2 -3 0 -.5 2 1 0 0 0 0 3 1 0 0 0 0 1 1 0"/>\n' +
'</filter>');
var tableMultitable = document.getElementById("panel-code-multitable");
$(tableMultitable).text(
'<filter id="multitable">\n' +
'<feComponentTransfer result="tablas">\n' +
'<feFuncR type="table" tableValues="1 0 1" exponent="5"/>\n' +
'<feFuncG type="table" tableValues="1 0 1" exponent="2"/>\n' +
'<feFuncB type="table" tableValues="1 0 1" exponent="5"/>\n' +
'</feComponentTransfer>\n' +
'<feColorMatrix type="saturate" in="tablas" in2="SourceGraphic" values="-5"/>\n' +
'</filter>');
var tableTablen = document.getElementById("panel-code-tablen");
$(tableTablen).text(
'<filter id="multitable">\n' +
'<feComponentTransfer result="tablas">\n' +
'<feFuncR type="table" tableValues="1 0 1" exponent="5"/>\n' +
'<feFuncG type="table" tableValues="1 0 1" exponent="2"/>\n' +
'<feFuncB type="table" tableValues="1 0 1" exponent="5"/>\n' +
'</feComponentTransfer>\n' +
'<feColorMatrix type="saturate" in="tablas" in2="SourceGraphic" values="-5"/>\n' +
'</filter>');
var tableDishue = document.getElementById("panel-code-dishue");
$(tableDishue).text(
'<filter id="dishue">\n' +
'<feComponentTransfer result="lsd">\n' +
'<feFuncR type="discrete" tableValues="0 .5 1 1"/>\n' +
'<feFuncG type="discrete" tableValues="0 .5 1"/>\n' +
'<feFuncB type="discrete" tableValues="0 .5"/>\n' +
'</feComponentTransfer>\n' +
'<feColorMatrix type="hueRotate" in="lsd" in2="SourceGraphic" values="-180"/>\n' +
'</filter>');
// Copy to clipboard
var clipboard = new Clipboard('.panel-btn-copy');
clipboard.on('success', function(e) {
var btnPanel = document.getElementsByClassName('panel-btn-copy');
setTimeout(changeText, 300);
function changeText() {
$(btnPanel).text("Copied!").addClass("success");
setTimeout(function() {
$(btnPanel).removeClass("success").text("Copy to clipboard");
}, 1000);
}
});
clipboard.on('error', function(e) {
function changeText() {
$(btnPanel).text("Error!").addClass("error");
setTimeout(function() {
$(btnPanel).removeClass("error").text("Copy to clipboard");
}, 1000);
}
});
//FIRE!
$(btnFilters).click(function() {
if (typeof $(this).data('panel') !== 'undefined') {
openPanel();
}
});
$(btnClose).click(function() {
closePanel();
$(panelInput).addClass("hide");
});
equalHeight();
poison();
addFilter(btnSaturate, filters[0]);
addFilter(btnSaturotate, filters[1]);
addFilter(btnRotamatrix, filters[2]);
addFilter(btnTablen, filters[3]);
addFilter(btnDishue, filters[4]);
addFilter(btnMatrix, filters[5]);
addFilter(btnMatrix2, filters[6]);
addFilter(btnHueRotate, filters[7]);
addFilter(btnLuminance, filters[8]);
addFilter(btnDiscrete, filters[9]);
addFilter(btnDiscrete2, filters[10]);
addFilter(btnTable, filters[11]);
addFilter(btnTable2, filters[12]);
addFilter(btnIdentity, filters[13]);
addFilter(btnLinear, filters[14]);
addFilter(btnGamma, filters[15]);
addFilter(btnTurbelence, filters[16]);
addFilter(btnSepia, filters[17]);
addFilter(btnTurbuMap, filters[18]);
addFilter(btnConMatrix, filters[19]);
addFilter(btnConMatrix2, filters[20]);
addFilter(btnMap, filters[21]);
addFilter(btnMap2, filters[22]);
addFilter(btnBlur, filters[23]);
addFilter(btnBlur2, filters[24]);
addFilter(btnMorpho, filters[25]);
addFilter(btnMorpho2, filters[26]);
addFilter(btnMultitable, filters[27]);
addFilter(btnBegamma, filters[28]);
addFilter(btnBulence, filters[29]);
showPanelFilter(btnSaturate, panelSaturate);
showPanelFilter(btnHueRotate, panelHuerotate);
showPanelFilter(btnLuminance, panelLuminance);
showPanelFilter(btnLinear, panelLinear);
showPanelFilter(btnTurbelence, panelTurbulence);
showPanelFilter(btnGamma, panelGamma);
showPanelFilter(btnBlur, panelBlur);
showPanelFilter(btnBlur2, panelBlur);
showPanelFilter(btnMorpho, panelMorpho);
showPanelFilter(btnMorpho2, panelMorphoDilate);
showPanelFilter(btnSaturotate, panelSaturotate);
showPanelFilter(btnDiscrete, panelDiscrete);
showPanelFilter(btnDiscrete2, panelDiscrete2);
showPanelFilter(btnMatrix, panelMatrix);
showPanelFilter(btnMatrix2, panelMatrix2);
showPanelFilter(btnTable, panelTable);
showPanelFilter(btnTable2, panelTable2);
showPanelFilter(btnIdentity, panelIdentity);
showPanelFilter(btnTurbuMap, panelTurbuMap);
showPanelFilter(btnSepia, panelSepia);
showPanelFilter(btnConMatrix, panelFeMatrix);
showPanelFilter(btnConMatrix2, panelFeMatrix2);
showPanelFilter(btnBlur2, panelBlur2);
showPanelFilter(btnRotamatrix, panelRotamatrix);
showPanelFilter(btnMultitable, panelMultitable);
showPanelFilter(btnTablen, panelTablen);
showPanelFilter(btnDishue, panelDishue);
}); |
# oh-my-zsh custom config
# 初始定义系统常量
# 调用示例:
# 1、 [ -n "$OS_MAC" ] && XXX
# 2、 if [ -n "$OS_MAC" ]; then
# XXX
# fi
OS=$(echo $(uname) | tr '[:upper:]' '[:lower:]')
[ "$OS" = "windowsnt" ] && OS_WIN="yes"
[ "$OS" = "darwin" ] && OS_MAC="yes"
[ "$OS" = "linux" ] && OS_LIN="yes"
# root path
SOMEOK_ZSH=${0:A:h}
export ZSH=${SOMEOK_ZSH}/oh-my-zsh
# history 数量
export HISTFILESIZE=1000
export HISTSIZE=1000
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
HIST_STAMPS="yyyy-mm-dd"
# 关闭自动更新
DISABLE_AUTO_UPDATE=true
# Would you like to use another custom folder than $ZSH/custom?
ZSH_CUSTOM=${SOMEOK_ZSH}/custom
# 取消 oh-my-zsh 默认的 ls colors 处理
# 因为采用了 material-colors 这个第三方颜色库
DISABLE_LS_COLORS=true
plugins=(
# oh-my-zsh 自带插件
extract
sudo
history
colored-man-pages
z
# 类似 cd,不过可以记录历史,使用 scd -v 可方便切换
scd
# 提供 cpv 命令,作为 rsync 的缩写
cp
# 启用这个,会导致上下按钮冲突
# zsh-navigation-tools
# git submodule 增加的 custom 插件 >>>>>
# colors 是 material-colors 的依赖,没有会报错
colors
material-colors
zsh-syntax-highlighting
zsh-autosuggestions
history-search-multi-word
# <<<<<<
# cht.sh
cheatsh
# ssh 自动完成
ssh_config
)
# linux or mac os x
if [ "$(uname -s)" = "Darwin" ]; then
# mac 上专用 plugin
plugins=(
$plugins
git-open
sublime
docker
volta
# 自动查找当前目录是否存在 gradlew 或是全局安装的 gradle
# 支持自动完成
gradle
# 通过 ccat、cless 高亮显示文件,需预先安装 pip install Pygments
colorize
# SDKMAN 自动提示
sdk
# 可以通过一些命令打开或返回 finder 路径
# 例如
# ofd: 通过 finder 打开当前目录
# pfd: 返回当前 finder 打开的路径
# pfs: 返回当前 finder 选择的路径或文件
# cdf: 进入当前 finder 所在路径
# quick-look: 指定文件
# man-preview: 使用预览打开 man page
macos
# `copydir` to copy the $PWD.
copydir
# `copyfile <filename>` to copy the file named `filename`
copyfile
# 使用 ctrl+o 拷贝当前命令
copybuffer
youtube-dl
)
MAC_OVERRIDE_FILE=$SOMEOK_ZSH/override/mac.sh
if [ -f "$MAC_OVERRIDE_FILE" ]; then
source $MAC_OVERRIDE_FILE
fi
else
# linux
ZSH_THEME="someok-linux"
# 关闭 linux 下的安全监测
ZSH_DISABLE_COMPFIX=true
LINUX_OVERRIDE_FILE=$SOMEOK_ZSH/override/linux.sh
if [ -f "$LINUX_OVERRIDE_FILE" ]; then
source $LINUX_OVERRIDE_FILE
fi
fi
# 自定义覆盖 oh-my-zsh 变量
OVERRIDE_FILE=$SOMEOK_ZSH/override/override.sh
if [ -f "$OVERRIDE_FILE" ]; then
source $OVERRIDE_FILE
fi
# 此处开始正式载入 oh-my-zsh 配置
source $ZSH/oh-my-zsh.sh
# zsh-autosuggestions
# [ctrl + space]
# 此快捷键目前出现问题,不起作用,暂时不知是被谁占用了
bindkey '^ ' autosuggest-accept
# [ctrl + j]
bindkey '^j' autosuggest-accept
# 引入常用 aliases
source ${SOMEOK_ZSH}/aliases.zsh
|
#!/bin/bash
set -e
source $(dirname $0)/common.sh
buildName=$( cat artifactory-repo/build-info.json | jq -r '.buildInfo.name' )
buildNumber=$( cat artifactory-repo/build-info.json | jq -r '.buildInfo.number' )
version=$( cat artifactory-repo/build-info.json | jq -r '.buildInfo.modules[0].id' | sed 's/.*:.*:\(.*\)/\1/' )
if [[ $RELEASE_TYPE = "M" ]]; then
targetRepo="plugins-milestone-local"
elif [[ $RELEASE_TYPE = "RC" ]]; then
targetRepo="plugins-milestone-local"
elif [[ $RELEASE_TYPE = "RELEASE" ]]; then
targetRepo="plugins-release-local"
else
echo "Unknown release type $RELEASE_TYPE" >&2; exit 1;
fi
echo "Promoting ${buildName}/${buildNumber} to ${targetRepo}"
curl \
-s \
--connect-timeout 240 \
--max-time 900 \
-u ${ARTIFACTORY_USERNAME}:${ARTIFACTORY_PASSWORD} \
-H "Content-type:application/json" \
-d "{\"status\": \"staged\", \"sourceRepo\": \"libs-staging-local\", \"targetRepo\": \"${targetRepo}\"}" \
-f \
-X \
POST "${ARTIFACTORY_SERVER}/api/build/promote/${buildName}/${buildNumber}" > /dev/null || { echo "Failed to promote" >&2; exit 1; }
echo "Promotion complete"
echo $version > version/version |
package indi.nut.myspring.ioc.aop;
import java.lang.reflect.Method;
/**
* method matcher, 确定一个方法是否匹配一定的规则<br/>
* Created by nut on 2016/12/14.
*/
public interface MethodMatcher {
boolean matches(Method method, Class targetClass);
}
|
<html>
<head>
<title>Login Page</title>
</head>
<body>
<h1>Login Page</h1>
<form action="#" method="post">
Username: <input type="txt" name="username"/><br/>
Password: <input type="password" name="password"/><br/>
<input type="submit" value="Login"/><br/>
</form>
<div>
<?php
if (isset($_POST['username']) && isset($_POST['password'])) {
//check credentials
if (!(correctCredentials())) {
echo "<h3>Incorrect credentials!</h3>";
}
}
?>
</div>
</body>
</html> |
<filename>test/integration/wallet-test.js
/* eslint-env mocha */
/* eslint prefer-arrow-callback: "off" */
'use strict';
const assert = require('../util/assert');
const bcoin = require('bcoin');
const {Network} = bcoin;
const {hd} = bcoin;
const MultisigClient = require('bmultisig-client');
const {WalletClient} = require('bclient');
const NETWORK_NAME = 'regtest';
const API_KEY = 'foo';
const ADMIN_TOKEN = Buffer.alloc(32, 1).toString('hex');
const network = Network.get(NETWORK_NAME);
console.warn('Integration test configs:');
console.warn(` NETWORK must be ${NETWORK_NAME}`);
console.warn(` API_KEY must be ${API_KEY}.`);
console.warn(` ADMIN_TOKEN must be ${ADMIN_TOKEN}.`);
console.warn(' WALLET_AUTH must be TRUE');
const TEST_XPUB_PATH = 'm/44\'/0\'/0\'';
const WALLET_OPTIONS = {
m: 2,
n: 2,
id: 'test'
};
describe('HTTP', function () {
let adminClient;
let multisigClient;
let walletAdminClient;
let testWalletClient1;
let testWalletClient2;
let joinKey;
const priv1 = getPrivKey().derivePath(TEST_XPUB_PATH);
const priv2 = getPrivKey().derivePath(TEST_XPUB_PATH);
const xpub1 = priv1.toPublic();
const xpub2 = priv2.toPublic();
beforeEach(async () => {
adminClient = new MultisigClient({
port: network.walletPort,
apiKey: API_KEY,
token: ADMIN_TOKEN
});
multisigClient = new MultisigClient({
port: network.walletPort,
apiKey: API_KEY
});
walletAdminClient = new WalletClient({
port: network.walletPort,
apiKey: API_KEY,
token: ADMIN_TOKEN
});
});
it('should create multisig wallet', async () => {
const xpub = xpub1.xpubkey(network);
const cosignerName = 'cosigner1';
const id = WALLET_OPTIONS.id;
const walletOptions = Object.assign({
cosignerName, xpub
}, WALLET_OPTIONS);
const wallet = await multisigClient.createWallet(id, walletOptions);
const multisigWallets = await adminClient.getWallets();
const wallets = await walletAdminClient.getWallets();
assert.strictEqual(wallet.wid, 1);
assert.strictEqual(wallet.id, id);
assert.strictEqual(wallet.cosigners.length, 1);
assert.strictEqual(wallet.m, 2);
assert.strictEqual(wallet.n, 2);
const cosigner = wallet.cosigners[0];
assert.strictEqual(cosigner.name, 'cosigner1');
assert.strictEqual(cosigner.path, '');
assert.strictEqual(cosigner.token.length, 64);
assert.strictEqual(cosigner.tokenDepth, 0);
joinKey = wallet.joinKey;
testWalletClient1 = new MultisigClient({
port: network.walletPort,
apiKey: API_KEY,
token: <PASSWORD>
});
assert(Array.isArray(multisigWallets));
assert.strictEqual(multisigWallets.length, 1);
assert.deepEqual(multisigWallets, [id]);
assert(Array.isArray(wallets));
assert.strictEqual(wallets.length, 2);
assert.deepEqual(wallets, ['primary', id]);
});
it('should fail getting multisig wallet - non authenticated', async () => {
const msclient = new MultisigClient({
port: network.walletPort,
apiKey: API_KEY
});
let err;
try {
await msclient.getInfo('test');
} catch (e) {
err = e;
}
assert(err);
assert.strictEqual(err.message, 'Authentication error.');
});
it('should join multisig wallet', async () => {
const xpub = xpub2.xpubkey(network);
const cosignerName = 'cosigner2';
const mswallet = await multisigClient.join(WALLET_OPTIONS.id, {
cosignerName, joinKey, xpub
});
assert(mswallet, 'Did not return multisig wallet.');
assert.strictEqual(mswallet.wid, 1);
assert.strictEqual(mswallet.id, 'test');
assert.strictEqual(mswallet.cosigners.length, 2);
assert.strictEqual(mswallet.initialized, true);
const cosigners = mswallet.cosigners;
assert.deepStrictEqual(cosigners[0], {
id: 0,
name: 'cosigner1'
});
assert.notTypeOf(cosigners[1].token, 'null');
testWalletClient2 = new MultisigClient({
port: network.walletPort,
apiKey: API_KEY,
token: cosigners[1].token
});
assert.deepStrictEqual(cosigners[1], Object.assign({
id: 1,
name: 'cosigner2',
path: '',
tokenDepth: 0
}, {
token: cosigners[1].token
}));
});
it('should get multisig wallet by id', async () => {
const multisigWallet = await testWalletClient1.getInfo('test');
assert(multisigWallet, 'Can not get multisig wallet.');
assert.strictEqual(multisigWallet.wid, 1);
assert.strictEqual(multisigWallet.id, 'test');
assert.strictEqual(multisigWallet.initialized, true);
assert.strictEqual(multisigWallet.cosigners.length, 2);
assert.deepEqual(multisigWallet.cosigners, [
{ id: 0, name: 'cosigner1' },
{ id: 1, name: 'cosigner2' }
]);
// with details
const msWalletDetails = await testWalletClient1.getInfo('test', true);
const account = msWalletDetails.account;
assert(msWalletDetails, 'Can not get multisig wallet');
assert.strictEqual(msWalletDetails.wid, multisigWallet.wid);
assert.strictEqual(msWalletDetails.id, multisigWallet.id);
assert.strictEqual(msWalletDetails.initialized, true);
assert(account, 'Could not get account details');
assert.strictEqual(account.watchOnly, true);
assert.strictEqual(account.initialized, msWalletDetails.initialized);
assert(account.receiveAddress);
assert(account.changeAddress);
assert(account.nestedAddress);
assert.strictEqual(account.keys.length, msWalletDetails.n);
});
it('should return null on non existing wallet', async () => {
const nonMultisigWallet = await multisigClient.getInfo('primary');
const nowallet = await multisigClient.getInfo('nowallet');
assert.typeOf(nonMultisigWallet, 'null');
assert.typeOf(nowallet, 'null');
});
it('should list multisig wallets', async () => {
const multisigWallets = await adminClient.getWallets();
const wallets = await walletAdminClient.getWallets();
assert(Array.isArray(wallets));
assert.strictEqual(wallets.length, 2);
assert.deepEqual(wallets, ['primary', 'test']);
assert(Array.isArray(multisigWallets));
assert.strictEqual(multisigWallets.length, 1);
assert.deepEqual(multisigWallets, ['test']);
});
it('should rescan db', async () => {
const rescan = await adminClient.rescan(0);
assert(rescan);
assert.strictEqual(rescan.success, true);
});
it('should get wallet balance(proxy)', async () => {
// no auth
let err;
try {
await multisigClient.getBalance(WALLET_OPTIONS.id);
} catch (e) {
err = e;
}
// admin
const balance1 = await adminClient.getBalance(WALLET_OPTIONS.id);
// cosigner auth
const balance2 = await testWalletClient1.getBalance(WALLET_OPTIONS.id);
assert(err);
assert.strictEqual(err.message, 'Authentication error.');
assert(balance1);
assert(balance2);
});
it('should fail to get balance(proxy) with incorrect token', async () => {
const msclient = new MultisigClient({
port: network.walletPort,
apiKey: API_KEY,
token: Buffer.alloc(32).toString('hex')
});
let err;
try {
await msclient.getBalance(WALLET_OPTIONS.id);
} catch (e) {
err = e;
}
assert(err);
assert(err.message, 'Authentication error.');
});
it('should get coin (proxy)', async () => {
let err;
try {
await multisigClient.getCoins(WALLET_OPTIONS.id);
} catch (e) {
err = e;
}
const coins1 = await adminClient.getCoins(WALLET_OPTIONS.id);
const coins2 = await testWalletClient1.getCoins(WALLET_OPTIONS.id);
assert(err);
assert.strictEqual(err.message, 'Authentication error.');
assert.strictEqual(coins1.length, 0);
assert.strictEqual(coins2.length, 0);
});
it('should get address (proxy)', async () => {
let err;
try {
await multisigClient.createAddress(WALLET_OPTIONS.id);
} catch (e) {
err = e;
}
const addr1 = await adminClient.createAddress(WALLET_OPTIONS.id);
const addr2 = await testWalletClient2.createAddress(WALLET_OPTIONS.id);
assert(err);
assert.strictEqual(err.message, 'Authentication error.');
assert(addr1);
assert(addr2);
assert.strictEqual(addr1.index, 1);
assert.strictEqual(addr2.index, 2);
assert.strictEqual(addr1.name, 'default');
assert.strictEqual(addr2.name, 'default');
assert.strictEqual(addr1.account, 0);
assert.strictEqual(addr2.account, 0);
});
// TODO: Add funding tests
it('should delete multisig wallet', async () => {
const id = 'test';
const multisigWalletsBefore = await adminClient.getWallets();
const walletsBefore = await walletAdminClient.getWallets();
const removed = await adminClient.removeWallet(id);
const multisigWalletsAfter = await adminClient.getWallets();
const walletsAfter = await walletAdminClient.getWallets();
assert.strictEqual(removed, true, 'Could not remove wallet');
assert.deepEqual(multisigWalletsBefore, [id]);
assert.deepEqual(multisigWalletsAfter, []);
assert.deepEqual(walletsBefore, ['primary', id]);
assert.deepEqual(walletsAfter, ['primary']);
});
it('should fail deleting non existing multisig wallet', async () => {
const removed = await adminClient.removeWallet('nowallet');
const removedPrimary = await adminClient.removeWallet('primary');
assert.strictEqual(removed, false, 'Removed non existing wallet');
assert.strictEqual(removedPrimary, false, 'Can not remove primary wallet');
});
});
/*
* Helpers
*/
function getPrivKey() {
return hd.PrivateKey.generate();
}
|
#!/usr/bin/env zsh
# Modified from https://github.com/robbyrussell/oh-my-zsh/blob/master/tools/check_for_upgrade.sh
# Original Copyright: (c) 2009-2018 Robby Russell and contributors
# Modified Copyright: (c) 2018 David Todd (c0de)
# License: MIT
zmodload zsh/datetime
function _current_epoch() {
echo $(( $EPOCHSECONDS / 60 / 60 / 24 ))
}
function _touch_dotfiles_update() {
echo "export LAST_EPOCH=$(_current_epoch)" > ${HOME}/.dotfiles-update
echo "touched ~/.dotfiles-update"
}
function _upgrade_dotfiles() {
env _DOTFILES=$_DOTFILES sh $_DOTFILES/upgrade.sh
_touch_dotfiles_update
}
# Configure this in shell/env
epoch_target=$UPDATE_DOTFILES_DAYS
if [[ -z "$epoch_target" ]]; then
# Default to every 2 weeks
epoch_target=13
fi
# Cancel upgrade if the current user doesn't have
# write permissions for the dotfiles directory.
if [[ -w "$_DOTFILES" ]]; then
else
echo "You can't write to $(_dotfiles)!"
return 1
fi
# Cancel upgrade if git is unavailable on the system
if [[ $(whence git >/dev/null) || false ]]; then
else
echo "git is not available"
return 2
fi
if mkdir -p "$_DOTFILES/update.lock" 2>/dev/null; then
if [ -f ${HOME}/.dotfiles-update ]; then
. ${HOME}/.dotfiles-update
if [[ -z "$LAST_EPOCH" ]]; then
echo "Missing \$LAST_EPOCH"
_touch_dotfiles_update && return 0;
fi
epoch_diff=$(($(_current_epoch) - $LAST_EPOCH))
if [ $epoch_diff -gt $epoch_target ]; then
if [ "${DISABLE_UPDATE_PROMPT}" ]; then
_upgrade_dotfiles
else
echo "[Dotfiles] Would you like to check for updates? [Y/n]: \c"
read line
if [[ "$line" == Y* ]] || [[ "$line" == y* ]] || [ -z "$line" ]; then
_upgrade_dotfiles
else
_touch_dotfiles_update
fi
fi
fi
else
echo "Missing ~/.dotfiles-update"
# create the zsh file
_touch_dotfiles_update
fi
rmdir $_DOTFILES/update.lock
fi
|
define([], function() {
var eventList = [
{
id: 1,
title: "Christmas Eve 2017",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Sun Dec 24 2017 00:00:00"),
endTime: new Date("Sun Dec 24 2017 23:59:59"),
location: "Atlanta, Ga"
},
{
id: 2,
title: "Christmas Day 2017",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Mon Dec 25 2017 00:00:00"),
endTime: new Date("Mon Dec 25 2017 23:59:59"),
location: "Atlanta, Ga"
},
{
id: 3,
title: "New Years Eve 2017",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Sun Dec 31 2017 00:00:00"),
endTime: new Date("Sun Dec 31 2017 23:59:59"),
location: "Nashville, TN"
},
{
id: 4,
title: "New Years Day 2018",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Mon Jan 01 2018 00:00:00"),
endTime: new Date("Mon Jan 01 2018 23:59:59"),
location: "Nashville, TN"
},
{
id: 5,
title: "Groundhog Day",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Fri Feb 02 2018 05:59:59"),
endTime: new Date("Fri Feb 03 2018 05:59:59"),
location: "Punxsutawney, PA"
},
{
id: 6,
title: "Valentine's Day",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Wed Feb 14 2018 00:00:00"),
endTime: new Date("Wed Feb 14 2018 23:59:59"),
location: "Chicago, IL"
},
{
id: 7,
title: "<NAME>",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Mon Feb 19 2018 00:00:00"),
endTime: new Date("Mon Feb 19 2018 23:59:59"),
location: "Washington D.C."
},
{
id: 8,
title: "<NAME>",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Sat Mar 17 2018 00:00:00"),
endTime: new Date("Sat Mar 17 2018 23:59:59"),
location: "Dublin, Ireland"
},
{
id: 9,
title: "<NAME>",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Thu Apr 01 2018 00:00:00"),
endTime: new Date("Thu Apr 01 2018 23:59:59"),
location: "Palm Beach, FL"
},
{
id: 10,
title: "Earth Day",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Thu Apr 22 2018 00:00:00"),
endTime: new Date("Thu Apr 22 2018 23:59:59"),
location: "Planet Earth"
},
{
id: 11,
title: "<NAME>",
description: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.",
startTime: new Date("Thu May 05 1985 00:00:00"),
endTime: new Date("Thu May 05 1985 11:59:59"),
location: "Mexico City, Mexico"
}
];
function sortByStart(eventList) {
eventList.sort(function(a, b) {
keyA = a.startTime;
keyB = b.startTime;
if(keyA < keyB) return -1;
if(keyA > keyB) return 1;
return 0;
});
}
var dataModule = function() {
this.events = function() {
eventsList = sortByStart(eventList);
return eventList;
}
}
return dataModule;
}); |
#!/bin/bash
set -eux -o pipefail
which go-junit-report || go install github.com/jstemmer/go-junit-report@latest
TEST_RESULTS=${TEST_RESULTS:-test-results}
TEST_FLAGS=
if test "${ARGOCD_TEST_PARALLELISM:-}" != ""; then
TEST_FLAGS="$TEST_FLAGS -p $ARGOCD_TEST_PARALLELISM"
fi
if test "${ARGOCD_TEST_VERBOSE:-}" != ""; then
TEST_FLAGS="$TEST_FLAGS -v"
fi
mkdir -p $TEST_RESULTS
report() {
set -eux -o pipefail
go-junit-report < $TEST_RESULTS/test.out > $TEST_RESULTS/junit.xml
}
trap 'report' EXIT
go test $TEST_FLAGS -failfast $* 2>&1 | tee $TEST_RESULTS/test.out
|
# shellcheck shell=bash
# shellcheck source=../../themes/powerline/powerline.base.bash
. "$BASH_IT/themes/powerline/powerline.base.bash"
PROMPT_CHAR=${POWERLINE_PROMPT_CHAR:=""}
POWERLINE_LEFT_SEPARATOR=${POWERLINE_LEFT_SEPARATOR:=""}
POWERLINE_LEFT_SEPARATOR_SOFT=${POWERLINE_LEFT_SEPARATOR_SOFT:=""}
POWERLINE_LEFT_LAST_SEGMENT_PROMPT_CHAR=${POWERLINE_LEFT_LAST_SEGMENT_PROMPT_CHAR:=""}
POWERLINE_COMPACT=${POWERLINE_COMPACT:=0}
POWERLINE_COMPACT_BEFORE_SEPARATOR=${POWERLINE_COMPACT_BEFORE_SEPARATOR:=${POWERLINE_COMPACT}}
POWERLINE_COMPACT_AFTER_SEPARATOR=${POWERLINE_COMPACT_AFTER_SEPARATOR:=${POWERLINE_COMPACT}}
POWERLINE_COMPACT_BEFOR_FIRST_SEGMENT=${POWERLINE_COMPACT_BEFORE_FIRST_SEGMENT:=${POWERLINE_COMPACT}}
POWERLINE_COMPACT_AFTER_LAST_SEGMENT=${POWERLINE_COMPACT_AFTER_LAST_SEGMENT:=${POWERLINE_COMPACT}}
POWERLINE_COMPACT_PROMPT=${POWERLINE_COMPACT_PROMPT:=${POWERLINE_COMPACT}}
USER_INFO_SSH_CHAR=${POWERLINE_USER_INFO_SSH_CHAR:=" "}
USER_INFO_THEME_PROMPT_COLOR=${POWERLINE_USER_INFO_COLOR:=32}
USER_INFO_THEME_PROMPT_COLOR_SUDO=${POWERLINE_USER_INFO_COLOR_SUDO:=202}
PYTHON_VENV_CHAR=${POWERLINE_PYTHON_VENV_CHAR:="❲p❳ "}
CONDA_PYTHON_VENV_CHAR=${POWERLINE_CONDA_PYTHON_VENV_CHAR:="❲c❳ "}
PYTHON_VENV_THEME_PROMPT_COLOR=${POWERLINE_PYTHON_VENV_COLOR:=35}
SCM_NONE_CHAR=""
SCM_GIT_CHAR=${POWERLINE_SCM_GIT_CHAR:=" "}
SCM_HG_CHAR=${POWERLINE_SCM_HG_CHAR:="☿ "}
SCM_THEME_PROMPT_CLEAN=""
SCM_THEME_PROMPT_DIRTY=""
SCM_THEME_PROMPT_CLEAN_COLOR=${POWERLINE_SCM_CLEAN_COLOR:=25}
SCM_THEME_PROMPT_DIRTY_COLOR=${POWERLINE_SCM_DIRTY_COLOR:=88}
SCM_THEME_PROMPT_STAGED_COLOR=${POWERLINE_SCM_STAGED_COLOR:=30}
SCM_THEME_PROMPT_UNSTAGED_COLOR=${POWERLINE_SCM_UNSTAGED_COLOR:=92}
SCM_THEME_PROMPT_COLOR=${SCM_THEME_PROMPT_CLEAN_COLOR}
NVM_THEME_PROMPT_PREFIX=""
NVM_THEME_PROMPT_SUFFIX=""
NODE_CHAR=${POWERLINE_NODE_CHAR:="❲n❳ "}
NODE_THEME_PROMPT_COLOR=${POWERLINE_NODE_COLOR:=22}
RVM_THEME_PROMPT_PREFIX=""
RVM_THEME_PROMPT_SUFFIX=""
RBENV_THEME_PROMPT_PREFIX=""
RBENV_THEME_PROMPT_SUFFIX=""
RUBY_THEME_PROMPT_COLOR=${POWERLINE_RUBY_COLOR:=161}
RUBY_CHAR=${POWERLINE_RUBY_CHAR:="❲r❳ "}
TERRAFORM_THEME_PROMPT_COLOR=${POWERLINE_TERRAFORM_COLOR:=161}
TERRAFORM_CHAR=${POWERLINE_TERRAFORM_CHAR:="❲t❳ "}
KUBERNETES_CONTEXT_THEME_CHAR=${POWERLINE_KUBERNETES_CONTEXT_CHAR:="⎈ "}
KUBERNETES_CONTEXT_THEME_PROMPT_COLOR=${POWERLINE_KUBERNETES_CONTEXT_COLOR:=26}
AWS_PROFILE_CHAR=${POWERLINE_AWS_PROFILE_CHAR:="❲aws❳ "}
AWS_PROFILE_PROMPT_COLOR=${POWERLINE_AWS_PROFILE_COLOR:=208}
CWD_THEME_PROMPT_COLOR=${POWERLINE_CWD_COLOR:=240}
LAST_STATUS_THEME_PROMPT_COLOR=${POWERLINE_LAST_STATUS_COLOR:=52}
CLOCK_THEME_PROMPT_COLOR=${POWERLINE_CLOCK_COLOR:=240}
BATTERY_AC_CHAR=${BATTERY_AC_CHAR:="⚡"}
BATTERY_STATUS_THEME_PROMPT_GOOD_COLOR=${POWERLINE_BATTERY_GOOD_COLOR:=70}
BATTERY_STATUS_THEME_PROMPT_LOW_COLOR=${POWERLINE_BATTERY_LOW_COLOR:=208}
BATTERY_STATUS_THEME_PROMPT_CRITICAL_COLOR=${POWERLINE_BATTERY_CRITICAL_COLOR:=160}
THEME_CLOCK_FORMAT=${THEME_CLOCK_FORMAT:="%H:%M:%S"}
IN_VIM_THEME_PROMPT_COLOR=${POWERLINE_IN_VIM_COLOR:=245}
IN_VIM_THEME_PROMPT_TEXT=${POWERLINE_IN_VIM_TEXT:="vim"}
IN_TOOLBOX_THEME_PROMPT_COLOR=${POWERLINE_IN_TOOLBOX_COLOR:=125}
IN_TOOLBOX_THEME_PROMPT_TEXT=${POWERLINE_IN_TOOLBOX_TEXT:="⬢ "}
HOST_THEME_PROMPT_COLOR=${POWERLINE_HOST_COLOR:=0}
SHLVL_THEME_PROMPT_COLOR=${POWERLINE_SHLVL_COLOR:=${HOST_THEME_PROMPT_COLOR}}
SHLVL_THEME_PROMPT_CHAR=${POWERLINE_SHLVL_CHAR:="§"}
DIRSTACK_THEME_PROMPT_COLOR=${POWERLINE_DIRSTACK_COLOR:=${CWD_THEME_PROMPT_COLOR}}
DIRSTACK_THEME_PROMPT_CHAR=${POWERLINE_DIRSTACK_CHAR:="←"}
HISTORY_NUMBER_THEME_PROMPT_COLOR=${POWERLINE_HISTORY_NUMBER_COLOR:=0}
HISTORY_NUMBER_THEME_PROMPT_CHAR=${POWERLINE_HISTORY_NUMBER_CHAR:="#"}
COMMAND_NUMBER_THEME_PROMPT_COLOR=${POWERLINE_COMMAND_NUMBER_COLOR:=0}
COMMAND_NUMBER_THEME_PROMPT_CHAR=${POWERLINE_COMMAND_NUMBER_CHAR:="#"}
GCLOUD_THEME_PROMPT_COLOR=${POWERLINE_GCLOUD_COLOR:=161}
GCLOUD_CHAR=${POWERLINE_GCLOUD_CHAR:="❲G❳ "}
POWERLINE_PROMPT=${POWERLINE_PROMPT:="user_info scm python_venv ruby node cwd"}
safe_append_prompt_command __powerline_prompt_command
|
#!/bin/bash
aws cloudformation create-stack --stack-name monitoring-windows-ec2 \
--template-body file://monitoring-windows-ec2.yaml \
--capabilities CAPABILITY_NAMED_IAM \
--parameters file://monitoring-windows-ec2-parameters.json \
--region us-east-1
|
<gh_stars>1-10
package cfg.serialize.exceptions;
import code.math.NumberSystemUtil;
@SuppressWarnings("serial")
public class SheetDataException extends Exception {
// 列
private int col;
// 行
private int row;
/**
* 记录列索引
*
* @param col
* 索引号
*/
public void setCol(int col) {
this.col = col;
}
/**
* 记录行索引
*
* @param row
* 索引号
*/
public void setRow(int row) {
this.row = row;
}
public String getLoc() {
return "[行索引:" + row + ", 行号:" + (row + 1) + ", 列索引:" + col + ", 列号:"
+ NumberSystemUtil.toNumberSystem26(col + 1) + "]";
}
public SheetDataException() {
super();
// TODO Auto-generated constructor stub
}
public SheetDataException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public SheetDataException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public SheetDataException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public SheetDataException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}
|
#!/bin/sh
#
# Downloads sequence for the GRCm38 release 81 version of M. Musculus (mouse) from
# Ensembl.
#
# By default, this script builds and index for just the base files,
# since alignments to those sequences are the most useful. To change
# which categories are built by this script, edit the CHRS_TO_INDEX
# variable below.
#
ENSEMBL_RELEASE=84
ENSEMBL_GRCm38_BASE=ftp://ftp.ensembl.org/pub/release-${ENSEMBL_RELEASE}/fasta/mus_musculus/dna
get() {
file=$1
if ! wget --version >/dev/null 2>/dev/null ; then
if ! curl --version >/dev/null 2>/dev/null ; then
echo "Please install wget or curl somewhere in your PATH"
exit 1
fi
curl -o `basename $1` $1
return $?
else
wget $1
return $?
fi
}
HISAT2_BUILD_EXE=./hisat2-build
if [ ! -x "$HISAT2_BUILD_EXE" ] ; then
if ! which hisat2-build ; then
echo "Could not find hisat2-build in current directory or in PATH"
exit 1
else
HISAT2_BUILD_EXE=`which hisat2-build`
fi
fi
rm -f genome.fa
F=Mus_musculus.GRCm38.dna.primary_assembly.fa
if [ ! -f $F ] ; then
get ${ENSEMBL_GRCm38_BASE}/$F.gz || (echo "Error getting $F" && exit 1)
gunzip $F.gz || (echo "Error unzipping $F" && exit 1)
mv $F genome.fa
fi
CMD="${HISAT2_BUILD_EXE} genome.fa genome"
echo Running $CMD
if $CMD ; then
echo "genome index built; you may remove fasta files"
else
echo "Index building failed; see error message"
fi
|
export SODIUM_USE_PKG_CONFIG=1
|
<gh_stars>10-100
from autocnet.io.db import adapters # imported here to get these registered
|
<filename>NetListener/httpcapture/src/main/java/org/littleshoot/proxy/mitm/Authority.java
package org.littleshoot.proxy.mitm;
import android.os.Environment;
import java.io.File;
/**
* Parameter object holding personal informations given to a SSLEngineSource.
*
* XXX consider to inline within the interface SslEngineSource, if MITM is core
*/
public class Authority {
private final File keyStoreDir;
private final String alias;
private final char[] password;
private final String commonName;
private final String organization;
private final String organizationalUnitName;
private final String certOrganization;
private final String certOrganizationalUnitName;
/**
* Create a parameter object with example certificate and certificate
* authority informations
*/
public Authority() {
keyStoreDir = new File(Environment.getExternalStorageDirectory() + "/har/");
alias = "littleproxy-mitm"; // proxy id
password = "<PASSWORD>".toCharArray();
organization = "LittleProxy-mitm"; // proxy name
commonName = organization + ", describe proxy here"; // MITM is bad
// normally
organizationalUnitName = "Certificate Authority";
certOrganization = organization; // proxy name
certOrganizationalUnitName = organization
+ ", describe proxy purpose here, since Man-In-The-Middle is bad normally.";
}
/**
* Create a parameter object with the given certificate and certificate
* authority informations
*/
public Authority(File keyStoreDir, String alias, char[] password,
String commonName, String organization,
String organizationalUnitName, String certOrganization,
String certOrganizationalUnitName) {
super();
this.keyStoreDir = keyStoreDir;
this.alias = alias;
this.password = password;
this.commonName = commonName;
this.organization = organization;
this.organizationalUnitName = organizationalUnitName;
this.certOrganization = certOrganization;
this.certOrganizationalUnitName = certOrganizationalUnitName;
}
public File aliasFile(String fileExtension) {
return new File(keyStoreDir, alias + fileExtension);
}
public String alias() {
return alias;
}
public char[] password() {
return password;
}
public String commonName() {
return commonName;
}
public String organization() {
return organization;
}
public String organizationalUnitName() {
return organizationalUnitName;
}
public String certOrganisation() {
return certOrganization;
}
public String certOrganizationalUnitName() {
return certOrganizationalUnitName;
}
}
|
def generate_docker_push_command(image_name: str) -> str:
# Check if the image name is in the correct format
if "/" in image_name:
return f'docker push {image_name}'
else:
raise ValueError("Invalid image name format. Please provide the image name in the format 'repository/image_name'.") |
<reponame>MrPepperoni/Reaping2-1
#ifdef DEFINE_GUID
#ifndef _WINIOCTL_DEFINED_GUID_
#define _WINIOCTL_DEFINED_GUID_
#include_next <winioctl.h>
#endif // _WINIOCTL_DEFINED_GUID_
#else
#include_next <winioctl.h>
#endif // DEFINE_GUID
|
def third_smallest(nums):
nums.sort()
return nums[2] |
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const eventemitter3_1 = __importDefault(require("eventemitter3"));
const iterall_1 = __importDefault(require("iterall"));
class EventEmitterAsyncIterator extends eventemitter3_1.default {
constructor() {
super();
this.pullQueue = [];
this.pushQueue = [];
this.listening = true;
this[iterall_1.default.$$asyncIterator] = () => this;
if (Symbol.asyncIterator) {
this[Symbol.asyncIterator] = () => this;
}
}
emptyQueue() {
if (this.listening) {
this.listening = false;
this.pullQueue.forEach(([resolve]) => {
return resolve({ value: undefined, done: true });
});
this.pullQueue.length = 0;
this.pushQueue.length = 0;
}
}
pullValue() {
const self = this;
return new Promise((resolve, reject) => {
if (self.pushQueue.length !== 0)
resolve({
value: self.pushQueue.shift(),
done: false
});
else
self.pullQueue.push([resolve, reject]);
});
}
pushValue(event) {
if (this.pullQueue.length !== 0) {
const [resolve] = this.pullQueue.shift();
resolve({
value: event,
done: false
});
}
else
this.pushQueue.push(event);
}
next() {
return (this.listening ? this.pullValue() : this.return());
}
throw(error) {
this.listening = false;
this.pullQueue.forEach(([resolve, reject]) => {
return reject(error);
});
this.pullQueue.length = 0;
this.emptyQueue();
return Promise.reject(error);
}
return() {
this.emit('return');
this.emptyQueue();
return Promise.resolve({
value: undefined,
done: true
});
}
}
iterall_1.default.$$asyncIterator;
exports.default = EventEmitterAsyncIterator;
|
import java.io.PrintWriter;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Scanner;
public class LinkedListaInt implements Iterable<Integer> {
private NodoInt first;
static class NodoInt {
public int info;
public NodoInt next;
public NodoInt(int value) {
this.info = value;
this.next = null;
}
}
public LinkedListaInt() {
this.first = null;
}
@Override
public String toString() {
StringBuilder emaitza = new StringBuilder("[ ");
NodoInt korri = this.first;
while (korri != null) {
emaitza.append(korri.info + " ");
korri = korri.next;
}
emaitza.append("]");
return emaitza.toString();
}
// HONAINO KODEA BLOKEATURIK DAGO
// public boolean baDago(int elem) {
// NodoInt nodoaInt=first;
// while (nodoaInt!=null) {
// if (nodoaInt.info==elem) {
// return true;
// }
// nodoaInt=nodoaInt.next;
// }
// return false;
// }
public void prepend(int elem) {
NodoInt nodoaInt=new NodoInt(elem);
nodoaInt.next=this.first;
this.first=nodoaInt;
}
public int txikienaAurkitu() {
NodoInt nodoaInt=this.first;
int elem=nodoaInt.info;
while (nodoaInt!=null) {
if (nodoaInt.info<elem) {
elem=nodoaInt.info;
}
nodoaInt=nodoaInt.next;
}
return elem;
}
public void delete(int elem) {
if (this.first.info==elem) {
this.first=this.first.next;
}
else {
NodoInt nodoInt=this.first;
NodoInt nodoInt2=this.first;
boolean ezabatuta=false;
while (nodoInt!=null && ezabatuta==false) {
if (nodoInt.info==elem) {
nodoInt2.next=nodoInt.next;
ezabatuta=true;
}
nodoInt2=nodoInt;
nodoInt=nodoInt.next;
}
}
}
// Complete the iterator() method below
public Iterator<Integer> iterator() {
return new IncreasingIterator(first);
}
// Complete the IncreasingIterator class below.
public static class IncreasingIterator implements Iterator<Integer> {
LinkedListaInt listaLagunInt=new LinkedListaInt();
NodoInt nireNodoInt;
public IncreasingIterator(NodoInt first) {
if (first!=null) {
nireNodoInt=first;
while (nireNodoInt!=null) {
listaLagunInt.prepend(nireNodoInt.info);
nireNodoInt=nireNodoInt.next;
}
}
}
@Override
public boolean hasNext() {
return !(listaLagunInt.first==null);
}
@Override
public Integer next() {
int txikiena=listaLagunInt.txikienaAurkitu();
listaLagunInt.delete(txikiena);
return txikiena;
}
}
// HEMENDIK KODEA BLOKEATURIK DAGO
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
PrintWriter output = new PrintWriter(System.out);
int listSize = input.nextInt();
LinkedListaInt list = new LinkedListaInt();
if (listSize > 0) {
list.first = new NodoInt(input.nextInt());
NodoInt last = list.first;
for (int i=1; i<listSize; i++) {
last.next = new NodoInt(input.nextInt());
last = last.next;
}
}
input.close();
for (Integer hitza : list)
output.print(hitza + " ");
output.close();
}
} |
package com.univocity.envlp.wallet.persistence.model;
import java.time.*;
public class AddressAllocation {
private long walletId;
private long accountIndex;
private long derivationIndex;
private String paymentAddress;
private boolean available;
private LocalDateTime createdAt;
private LocalDateTime claimedAt;
public long getWalletId() {
return walletId;
}
public void setWalletId(long walletId) {
this.walletId = walletId;
}
public long getAccountIndex() {
return accountIndex;
}
public void setAccountIndex(long accountIndex) {
this.accountIndex = accountIndex;
}
public long getDerivationIndex() {
return derivationIndex;
}
public void setDerivationIndex(long derivationIndex) {
this.derivationIndex = derivationIndex;
}
public String getPaymentAddress() {
return paymentAddress;
}
public void setPaymentAddress(String paymentAddress) {
this.paymentAddress = paymentAddress;
}
public boolean isAvailable() {
return available;
}
public void setAvailable(boolean available) {
this.available = available;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getClaimedAt() {
return claimedAt;
}
public void setClaimedAt(LocalDateTime claimedAt) {
this.claimedAt = claimedAt;
}
}
|
#!/bin/bash
# The only case where this script would fail is:
# mkfs.vfat /dev/mmcblk1 then repartitioning to create an empty ext2 partition
DEF_UID=$(grep "^UID_MIN" /etc/login.defs | tr -s " " | cut -d " " -f2)
DEF_GID=$(grep "^GID_MIN" /etc/login.defs | tr -s " " | cut -d " " -f2)
DEVICEUSER=$(getent passwd $DEF_UID | sed 's/:.*//')
MNT=/media/sdcard
MOUNT_OPTS="dirsync,noatime,users"
ACTION=$1
DEVNAME=$2
if [ -z "${ACTION}" ]; then
systemd-cat -t mount-sd /bin/echo "ERROR: Action needs to be defined."
exit 1
fi
if [ -z "${DEVNAME}" ]; then
systemd-cat -t mount-sd /bin/echo "ERROR: Device name needs to be defined."
exit 1
fi
systemd-cat -t mount-sd /bin/echo "Called to ${ACTION} ${DEVNAME}"
if [ "$ACTION" = "add" ]; then
eval "$(/sbin/blkid -c /dev/null -o export /dev/$2)"
if [ -z "${TYPE}" ]; then
# In case filesystem type is missing, try reading it.
TYPE=$(lsblk -n -o FSTYPE ${DEVNAME} | tail -n 1)
if [ -z "${TYPE}" ]; then
systemd-cat -t mount-sd /bin/echo "ERROR: Filesystem type missing for ${DEVNAME}."
exit 1
fi
fi
if [ -z "${UUID}" ]; then
# In case device does not have UUID lets create one for it based on
# the card identification.
PKNAME=$(lsblk -n -o PKNAME ${DEVNAME} | tail -n 1)
# If there is no PKNAME try NAME instead.
if [ -z "${PKNAME}" ]; then
PKNAME=$(lsblk -n -o NAME ${DEVNAME} | head -n 1)
fi
if [ -e "/sys/block/${PKNAME}/device/cid" ]; then
CID=$(cat /sys/block/${PKNAME}/device/cid)
if [ -n "${CID}" ]; then
IDNAME=$(lsblk -n -o NAME ${DEVNAME} | tail -1 | cut -d "-" -f2)
UUID="${CID}-${IDNAME}"
fi
fi
if [ -z "${UUID}" ]; then
# Exit here as in the future there might be things like USB OTG disks or
# sdcards attached via adapter that might behave differently and needs special case
# in case such happens fail so we don't break anything.
systemd-cat -t mount-sd /bin/echo "ERROR: Could not find or generate UUID for device ${DEVNAME}."
exit 1
fi
fi
DIR=$(grep -w ${DEVNAME} /proc/mounts | cut -d \ -f 2)
if [ -n "$DIR" ]; then
systemd-cat -t mount-sd /bin/echo "${DEVNAME} already mounted on ${DIR}, ignoring"
exit 0
fi
test -d $MNT/${UUID} || mkdir -p $MNT/${UUID}
chown $DEF_UID:$DEF_GID $MNT $MNT/${UUID}
touch $MNT/${UUID}
case "${TYPE}" in
vfat|exfat)
mount ${DEVNAME} $MNT/${UUID} -o uid=$DEF_UID,gid=$DEF_GID,$MOUNT_OPTS,utf8,flush,discard || /bin/rmdir $MNT/${UUID}
;;
# NTFS support has not been tested but it's being left to please the ego of an engineer!
ntfs)
mount ${DEVNAME} $MNT/${UUID} -o uid=$DEF_UID,gid=$DEF_GID,$MOUNT_OPTS,utf8 || /bin/rmdir $MNT/${UUID}
;;
*)
mount ${DEVNAME} $MNT/${UUID} -o $MOUNT_OPTS || /bin/rmdir $MNT/${UUID}
;;
esac
test -d $MNT/${UUID} && touch $MNT/${UUID}
systemd-cat -t mount-sd /bin/echo "Finished ${ACTION}ing ${DEVNAME} of type ${TYPE} at $MNT/${UUID}"
else
DIR=$(grep -w ${DEVNAME} /proc/mounts | cut -d \ -f 2)
if [ -n "${DIR}" ] ; then
if [ "${DIR##$MNT}" = "${DIR}" ]; then
systemd-cat -t mount-sd /bin/echo "${DEVNAME} mountpoint ${DIR} is not under ${MNT}, ignoring"
exit 0
fi
umount $DIR || umount -l $DIR
systemd-cat -t mount-sd /bin/echo "Finished ${ACTION}ing ${DEVNAME} at ${DIR}"
fi
fi
|
<reponame>Htaung/open_location_code_master
// Copyright 2015 <NAME>. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the 'License');
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an 'AS IS' BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package olc
import (
"errors"
"strings"
)
// Decode decodes an Open Location Code into the location coordinates.
// Returns a CodeArea object that includes the coordinates of the bounding
// box - the lower left, center and upper right.
func Decode(code string) (CodeArea, error) {
var area CodeArea
if err := CheckFull(code); err != nil {
return area, err
}
// Strip out separator character (we've already established the code is
// valid so the maximum is one), padding characters and convert to upper
// case.
code = stripCode(code)
n := len(code)
if n < 2 {
return area, errors.New("code too short")
}
if n <= pairCodeLen {
area = decodePairs(code)
return area, nil
}
area = decodePairs(code[:pairCodeLen])
grid := decodeGrid(code[pairCodeLen:])
debug("Decode %s + %s area=%s grid=%s", code[:pairCodeLen], code[pairCodeLen:], area, grid)
return CodeArea{
LatLo: area.LatLo + grid.LatLo,
LngLo: area.LngLo + grid.LngLo,
LatHi: area.LatLo + grid.LatHi,
LngHi: area.LngLo + grid.LngHi,
Len: area.Len + grid.Len,
}, nil
}
// decodePairs decodes an OLC code made up of alternating latitude and longitude
// characters, encoded using base 20.
func decodePairs(code string) CodeArea {
latLo, latHi := decodePairsSequence(code, 0)
lngLo, lngHi := decodePairsSequence(code, 1)
return CodeArea{
LatLo: latLo - latMax, LatHi: latHi - latMax,
LngLo: lngLo - lngMax, LngHi: lngHi - lngMax,
Len: len(code),
}
}
// This decodes the latitude or longitude sequence of a lat/lng pair encoding.
// Starting at the character at position offset, every second character is
// decoded and the value returned.
//
// Returns a pair of the low and high values.
// The low value comes from decoding the characters.
// The high value is the low value plus the resolution of the last position.
// Both values are offset into positive ranges and will need to be corrected
// before use.
func decodePairsSequence(code string, offset int) (lo, hi float64) {
var value float64
i := -1
for j := offset; j < len(code); j += 2 {
i++
value += float64(strings.IndexByte(Alphabet, code[j])) * pairResolutions[i]
}
//debug("decodePairsSequence code=%s offset=%s i=%d value=%v pairRes=%f", code, offset, i, value, pairResolutions[i])
return value, value + pairResolutions[i]
}
// decodeGrid decodes an OLC code using the grid refinement method.
// The code input argument shall be a valid OLC code sequence that is only
// the grid refinement portion!
//
// This is the portion of a code starting at position 11.
func decodeGrid(code string) CodeArea {
var latLo, lngLo float64
var latPlaceValue, lngPlaceValue float64 = gridSizeDegrees, gridSizeDegrees
//debug("decodeGrid(%s)", code)
fGridRows, fGridCols := float64(gridRows), float64(gridCols)
for _, r := range code {
i := strings.IndexByte(Alphabet, byte(r))
row := i / gridCols
col := i % gridCols
latPlaceValue /= fGridRows
lngPlaceValue /= fGridCols
//debug("decodeGrid i=%d row=%d col=%d larVal=%f lngVal=%f lat=%.10f, lng=%.10f", i, row, col, latPlaceValue, lngPlaceValue, latLo, lngLo)
latLo += float64(row) * latPlaceValue
lngLo += float64(col) * lngPlaceValue
}
//Log.Debug("decodeGrid", "code", code, "latVal", fmt.Sprintf("%f", latPlaceValue), "lngVal", fmt.Sprintf("%f", lngPlaceValue), "lat", fmt.Sprintf("%.10f", latLo), "lng", fmt.Sprintf("%.10f", lngLo))
return CodeArea{
LatLo: latLo, LatHi: latLo + latPlaceValue,
LngLo: lngLo, LngHi: lngLo + lngPlaceValue,
Len: len(code),
}
}
|
package org.ednovo.gooru.core.api.model;
import java.io.Serializable;
public class AssessmentHint implements Serializable,Comparable<AssessmentHint> {
/**
*
*/
private static final long serialVersionUID = 5773944979028571352L;
private Integer hintId;
private String hintText;
private Integer sequence;
private AssessmentQuestion question;
public AssessmentHint() {
}
public Integer getHintId() {
return hintId;
}
public void setHintId(Integer hintId) {
this.hintId = hintId;
}
public String getHintText() {
return hintText;
}
public void setHintText(String hintText) {
this.hintText = hintText;
}
public Integer getSequence() {
return sequence;
}
public void setSequence(Integer sequence) {
this.sequence = sequence;
}
public void setQuestion(AssessmentQuestion question) {
this.question = question;
}
public AssessmentQuestion getQuestion() {
return question;
}
@Override
public int compareTo(AssessmentHint assessmentHint) {
if (assessmentHint != null && getSequence() != null && assessmentHint.getSequence() != null) {
if (getSequence().equals(assessmentHint.getSequence())) {
return 0;
}
return getSequence().compareTo(assessmentHint.getSequence());
}
return 0;
}
}
|
<gh_stars>0
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.springsource.html5expense;
/**
* Various expense report states.
*
* @author <NAME>
*/
public enum State {
/**
* The expense report is new and has not yet been submitted.
*/
NEW,
/**
* The expense report has been submitted and is locked under review.
*/
IN_REVIEW,
/**
* The expense report was rejected and can be modified.
*/
REJECTED,
/**
* The expense report has been approved and is now closed.
*/
APPROVED;
} |
package main
import (
"fmt"
"os"
"time"
"github.com/stigok/go-io-pi"
)
func main() {
path := "/dev/i2c-1"
file, err := os.OpenFile(path, os.O_RDWR, os.ModeCharDevice)
if err != nil {
panic(err)
}
dev := iopi.NewDevice(file, 0x20) // Bus1: 0x20, Bus2: 0x21
err = dev.Init()
if err != nil {
panic(err)
}
defer dev.Close()
dev.SetPortMode(iopi.PortA, iopi.Output)
dev.SetPortMode(iopi.PortB, iopi.Output)
pins := []uint8{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }
fmt.Println("Enabling pins:", pins)
for _, p := range(pins) {
dev.WritePin(p, iopi.High)
time.Sleep(100 * time.Millisecond)
}
fmt.Println("Disabling pins:", pins)
for _, p := range(pins) {
dev.WritePin(p, iopi.Low)
time.Sleep(100 * time.Millisecond)
}
fmt.Println("Exiting!")
}
|
/*
Copyright 2018 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Multitenant autoscaler executable.
package main
import (
"flag"
"fmt"
"log"
"time"
"github.com/knative/pkg/configmap"
"github.com/knative/pkg/controller"
pkgmetrics "github.com/knative/pkg/metrics"
"github.com/knative/pkg/signals"
"github.com/knative/serving/pkg/apis/serving"
"github.com/knative/serving/pkg/autoscaler"
"github.com/knative/serving/pkg/autoscaler/statserver"
informers "github.com/knative/serving/pkg/client/informers/externalversions"
"github.com/knative/serving/pkg/logging"
"github.com/knative/serving/pkg/metrics"
"github.com/knative/serving/pkg/reconciler"
"github.com/knative/serving/pkg/reconciler/autoscaling/hpa"
"github.com/knative/serving/pkg/reconciler/autoscaling/kpa"
"go.uber.org/zap"
"golang.org/x/sync/errgroup"
kubeinformers "k8s.io/client-go/informers"
corev1informers "k8s.io/client-go/informers/core/v1"
corev1listers "k8s.io/client-go/listers/core/v1"
"k8s.io/client-go/tools/clientcmd"
)
const (
statsServerAddr = ":8080"
statsBufferLen = 1000
component = "autoscaler"
)
var (
masterURL = flag.String("master", "", "The address of the Kubernetes API server. Overrides any value in kubeconfig. Only required if out-of-cluster.")
kubeconfig = flag.String("kubeconfig", "", "Path to a kubeconfig. Only required if out-of-cluster.")
)
func main() {
flag.Parse()
logger, atomicLevel := setupLogger()
defer flush(logger)
// Set up signals so we handle the first shutdown signal gracefully.
stopCh := signals.SetupSignalHandler()
// statsCh is the main communication channel between the stats channel and multiscaler.
statsCh := make(chan *autoscaler.StatMessage, statsBufferLen)
defer close(statsCh)
cfg, err := clientcmd.BuildConfigFromFlags(*masterURL, *kubeconfig)
if err != nil {
logger.Fatalw("Error building kubeconfig", zap.Error(err))
}
opt := reconciler.NewOptionsOrDie(cfg, logger, stopCh)
// Watch the logging config map and dynamically update logging levels.
opt.ConfigMapWatcher.Watch(logging.ConfigMapName(), logging.UpdateLevelFromConfigMap(logger, atomicLevel, component))
// Watch the observability config map and dynamically update metrics exporter.
opt.ConfigMapWatcher.Watch(metrics.ObservabilityConfigName, metrics.UpdateExporterFromConfigMap(component, logger))
// Set up informer factories.
servingInformerFactory := informers.NewSharedInformerFactory(opt.ServingClientSet, opt.ResyncPeriod)
kubeInformerFactory := kubeinformers.NewSharedInformerFactory(opt.KubeClientSet, opt.ResyncPeriod)
// Set up informers.
paInformer := servingInformerFactory.Autoscaling().V1alpha1().PodAutoscalers()
sksInformer := servingInformerFactory.Networking().V1alpha1().ServerlessServices()
endpointsInformer := kubeInformerFactory.Core().V1().Endpoints()
serviceInformer := kubeInformerFactory.Core().V1().Services()
hpaInformer := kubeInformerFactory.Autoscaling().V1().HorizontalPodAutoscalers()
collector := autoscaler.NewMetricCollector(statsScraperFactoryFunc(endpointsInformer.Lister()), logger)
// Set up scalers.
// uniScalerFactory depends endpointsInformer to be set.
multiScaler := autoscaler.NewMultiScaler(stopCh, uniScalerFactoryFunc(endpointsInformer, collector), logger)
controllers := []*controller.Impl{
kpa.NewController(&opt, paInformer, sksInformer, serviceInformer, endpointsInformer, multiScaler, collector),
hpa.NewController(&opt, paInformer, sksInformer, hpaInformer),
}
// Set up a statserver.
statsServer := statserver.New(statsServerAddr, statsCh, logger)
defer statsServer.Shutdown(time.Second * 5)
// Start watching the configs.
if err := opt.ConfigMapWatcher.Start(stopCh); err != nil {
logger.Fatalw("Failed to start watching configs", zap.Error(err))
}
// Start all of the informers and wait for them to sync.
if err := controller.StartInformers(
stopCh,
endpointsInformer.Informer(),
hpaInformer.Informer(),
paInformer.Informer(),
serviceInformer.Informer(),
sksInformer.Informer(),
); err != nil {
logger.Fatalw("Failed to start informers", err)
}
go controller.StartAll(stopCh, controllers...)
// Run the controllers and the statserver in a group.
var eg errgroup.Group
eg.Go(func() error {
return statsServer.ListenAndServe()
})
go func() {
for {
sm, ok := <-statsCh
if !ok {
break
}
collector.Record(sm.Key, sm.Stat)
multiScaler.Poke(sm.Key, sm.Stat)
}
}()
egCh := make(chan struct{})
go func() {
if err := eg.Wait(); err != nil {
logger.Errorw("Group error.", zap.Error(err))
}
close(egCh)
}()
select {
case <-egCh:
case <-stopCh:
}
}
func setupLogger() (*zap.SugaredLogger, zap.AtomicLevel) {
loggingConfigMap, err := configmap.Load("/etc/config-logging")
if err != nil {
log.Fatal("Error loading logging configuration:", err)
}
loggingConfig, err := logging.NewConfigFromMap(loggingConfigMap)
if err != nil {
log.Fatal("Error parsing logging configuration:", err)
}
return logging.NewLoggerFromConfig(loggingConfig, component)
}
func uniScalerFactoryFunc(endpointsInformer corev1informers.EndpointsInformer, metricClient autoscaler.MetricClient) func(decider *autoscaler.Decider) (autoscaler.UniScaler, error) {
return func(decider *autoscaler.Decider) (autoscaler.UniScaler, error) {
if v, ok := decider.Labels[serving.ConfigurationLabelKey]; !ok || v == "" {
return nil, fmt.Errorf("label %q not found or empty in Decider %s", serving.ConfigurationLabelKey, decider.Name)
}
if decider.Spec.ServiceName == "" {
return nil, fmt.Errorf("%s decider has empty ServiceName", decider.Name)
}
serviceName := decider.Labels[serving.ServiceLabelKey] // This can be empty.
configName := decider.Labels[serving.ConfigurationLabelKey]
// Create a stats reporter which tags statistics by PA namespace, configuration name, and PA name.
reporter, err := autoscaler.NewStatsReporter(decider.Namespace, serviceName, configName, decider.Name)
if err != nil {
return nil, err
}
return autoscaler.New(decider.Namespace, decider.Name, metricClient, endpointsInformer, decider.Spec, reporter)
}
}
func statsScraperFactoryFunc(endpointsLister corev1listers.EndpointsLister) func(metric *autoscaler.Metric) (autoscaler.StatsScraper, error) {
return func(metric *autoscaler.Metric) (autoscaler.StatsScraper, error) {
return autoscaler.NewServiceScraper(metric, endpointsLister)
}
}
func flush(logger *zap.SugaredLogger) {
logger.Sync()
pkgmetrics.FlushExporter()
}
|
<?php
$arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
$new_arr = array_slice($arr, 2, 5);
print_r($new_arr);
?> |
<gh_stars>100-1000
import { v4 as uuidv4 } from 'uuid';
export class Job {
/**
* Create a new job instance.
*/
constructor(public id: string = uuidv4(), public data: { [key: string]: any; } = {}) {
//
}
}
|
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
set -xeuo pipefail
export PYTHONUNBUFFERED=1
export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}"
export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}"
export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support"
export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml"
cat >~/.condarc <<CONDARC
conda-build:
root-dir: ${FEEDSTOCK_ROOT}/build_artifacts
CONDARC
conda install --yes --quiet conda-forge-ci-setup=3 conda-build pip -c conda-forge
# set up the condarc
setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
source run_conda_forge_build_setup
# make the build number clobber
make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
conda build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \
--clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml"
validate_recipe_outputs "bio-pyvol-feedstock"
if [[ "${UPLOAD_PACKAGES}" != "False" ]]; then
upload_package --validate --feedstock-name="bio-pyvol-feedstock" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}"
fi
touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" |
TERMUX_PKG_HOMEPAGE=https://pidgin.im/
TERMUX_PKG_DESCRIPTION="Multi-protocol instant messaging client"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=2.14.4
TERMUX_PKG_SRCURL=https://sourceforge.net/projects/pidgin/files/Pidgin/${TERMUX_PKG_VERSION}/pidgin-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=58652e692539d4d2cc775649f1d43dd08351607d8dc8fbab97a93629e01225c1
TERMUX_PKG_DEPENDS="gtk2, libgnutls, libidn, libsasl, libsm, libxext, libxss"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
--disable-gevolution
--disable-gstreamer
--disable-gtkspell
--disable-vv
--disable-meanwhile
--disable-avahi
--disable-dbus
--disable-perl
--disable-tcl
--disable-tk
"
|
def removeFromList(list_nums, value):
result = []
for num in list_nums:
if num != value:
result.append(num)
return result |
<?php
function convertBase($number, $base1, $base2) {
$base1_digits = "0123456789ABCDE";
$base2_digits = "0123456789ABCDE";
$result = "";
while($number > 0) {
$result = substr($base2_digits, $number % $base2, 1) . $result;
$number = (int) ($number / $base2);
}
return $result;
}
$number = 10;
$base1 = 10;
$base2 = 2;
$convertedNumber = convertBase($number, $base1, $base2);
echo $convertedNumber; // prints '1010'
?> |
#!/usr/bin/env bash
[ -n "$DEBUG" ] && set -x
set -e
set -o pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROJECT_DIR="$( cd "$SCRIPT_DIR/../../.." && pwd )"
cd "$PROJECT_DIR"
set +e
openssl aes-256-cbc \
-d \
-in ./.circleci/gpg.private.enc -k "${ENCRYPTION_PASSPHRASE}" | gpg --import -
set -e
|
package cyclops.stream.iterator;
import cyclops.reactive.ReactiveSeq;
import cyclops.stream.type.Streamable;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
public class ReversedIterator<U> implements Streamable<U> {
private final List<U> list;
public ReversedIterator(List<U> list) {
this.list = list;
}
public List<U> getValue() {
return list;
}
@Override
public Iterable<U> getStreamable() {
return list;
}
@Override
public ReactiveSeq<U> stream() {
return ReactiveSeq.fromIterator(reversedIterator());
}
public Iterator<U> reversedIterator() {
final ListIterator<U> iterator = list.listIterator(list.size());
return new Iterator<U>() {
@Override
public boolean hasNext() {
return iterator.hasPrevious();
}
@Override
public U next() {
return iterator.previous();
}
};
}
}
|
#!/bin/bash
if [[ "$OSTYPE" == "darwin"* ]]; then
FLOW=$(pwd -P)/$1
else
FLOW=$(readlink -f $1)
fi
cd "$(dirname "${BASH_SOURCE[0]}")"
passed=0
failed=0
skipped=0
for dir in tests/*/
do
dir=${dir%*/}
cd $dir
name=${dir##*/}
exp_file="${name}.exp"
if [ -e ".flowconfig" ] && [ -e $exp_file ]
then
echo "Testing directory: ${name}"
out_file="${name}.out"
$FLOW check . --all --strip-root 1> $out_file
diff_file="${name}.diff"
diff $out_file $exp_file > $diff_file
if [ -s $diff_file ]
then
(( failed++ ))
echo "FAILED: ${name}"
else
(( passed++ ))
echo "PASSED: ${name}"
rm -f $out_file
rm -f $diff_file
fi
else
(( skipped++ ))
echo "Skipping directory: ${name}"
fi
cd ../..
done
echo
echo "Passed: ${passed}, Failed: ${failed}, Skipped: ${skipped}"
exit ${failed}
|
<gh_stars>0
package servlets;
import db.DBManager;
import db.Product;
import db.User;
import java.io.IOException;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import static utilities.Constants.*;
public class AddProductServlet extends HttpServlet {
private DBManager manager;
@Override
public void init() throws ServletException {
this.manager = (DBManager)super.getServletContext().getAttribute(DB_ATTRIBUTE_NAME);
}
/**
* Processes requests for both HTTP
* <code>GET</code> and
* <code>POST</code> methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
Product newProduct = getProductFromRequest(request);
//se ottengo un prodotto corretto lo aggiungo al databse
if (newProduct != null){
Object seller = request.getSession().getAttribute(USER_ATTRIBUTE_NAME);
if (seller == null){
throw new RuntimeException("ERROR: null seller from session");
}
int userId = ((User)seller).getId();
newProduct.setSeller(userId);
try {
manager.addProduct(newProduct);
} catch (SQLException ex) {
Logger.getLogger(AddProductServlet.class.getName()).log(Level.SEVERE, null, ex);
}
//redirezione alla servlet che mostra la lista dei prodotti
response.sendRedirect(SM_SHOW_PRODUCTS);
}
//altrimenti rimando alla form che mostrarà relativo errore
else{
RequestDispatcher reqDis = request.getRequestDispatcher(SM_FORM_ADD_PRODUCT);
reqDis.forward(request, response);
}
}
private Product getProductFromRequest (HttpServletRequest request){
Product newProduct = new Product();
//Product name
String product = request.getParameter(PRODUCTNAME_PARAM_NAME);
if (product.isEmpty()){
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Product " + EMPTY_FIELD);
}
else{
newProduct.setName(product);
}
//Category
String category = request.getParameter(CATEGORY_PARAM_NAME);
newProduct.setCategory(Integer.valueOf(category));
//Photo
String photo = request.getParameter(PHOTO_PARAM_NAME);
int photoId = Integer.valueOf(photo);
newProduct.setPhoto(photoId);
//Quantity
String quantityString = request.getParameter(QUANTITY_PARAM_NAME);
if (quantityString.isEmpty()){
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Quantity " + EMPTY_FIELD);
}
else{
try {
int quantity = Integer.valueOf(quantityString);
if (quantity <= 0){
throw new NumberFormatException();
}
newProduct.setQuantity(quantity);
} catch (NumberFormatException ex) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Quantity" + INVALID_VALUE);
}
}
//UM
String um = request.getParameter(UM_PARAM_NAME);
newProduct.setUm(um);
//Price
String priceString = request.getParameter(PRICE_PARAM_NAME);
if (priceString.isEmpty()){
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Price " + EMPTY_FIELD);
}
else{
try{
double price = Double.valueOf(priceString);
if (price <= 0){
throw new NumberFormatException();
}
newProduct.setPrice(price);
} catch (NumberFormatException ex) {
request.setAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME, "Price" + INVALID_VALUE);
}
}
//se ho settato il messaggio di errore rinornerò null
if (request.getAttribute(ERROR_MESSAGE_ATTRIBUTE_NAME) == null){
return newProduct;
}
else{
return null;
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP
* <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP
* <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
|
/*
*
*/
package net.community.chest.jmx;
import javax.management.ObjectName;
import net.community.chest.lang.StringUtil;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Feb 15, 2011 9:05:43 AM
*/
public class CanonicalObjectNameComparator extends AbstractObjectNameComparator {
/**
*
*/
private static final long serialVersionUID = 3905742363914881483L;
public CanonicalObjectNameComparator (boolean ascending)
{
super(ascending);
}
/*
* @see net.community.chest.util.compare.AbstractComparator#compareValues(java.lang.Object, java.lang.Object)
*/
@Override
public int compareValues (ObjectName v1, ObjectName v2)
{
return StringUtil.compareDataStrings((v1 == null) ? null : v1.getCanonicalName(), (v2 == null) ? null : v2.getCanonicalName(), false);
}
public static final CanonicalObjectNameComparator ASCENDING=new CanonicalObjectNameComparator(true),
DESCENDING=new CanonicalObjectNameComparator(false);
}
|
import React from 'react';
import { ChildrenItem } from '@/pages/List/components/SideTree';
import { Dropdown, Icon, Menu } from 'antd';
import styles from './style.less';
interface FileItemPropsType {
item: ChildrenItem
}
export default function FileItem({ item }: FileItemPropsType) {
const renderContent = () => {
if (item.type === "file" && (item.name.endsWith(".jpg") || item.name.endsWith(".png"))){
return(
<img src={item.path} className={styles.image} alt={item.name} />
)
}
return (
<Icon type="file"/>
)
};
const menu = (
<Menu>
<Menu.Item key="1">aaa</Menu.Item>
<Menu.Item key="2">2nd menu item</Menu.Item>
<Menu.Item key="3">3rd menu item</Menu.Item>
</Menu>
);
return (
<Dropdown overlay={menu} trigger={['contextMenu']}>
<div className={styles.main}>
<div className={styles.content}>
<div>
{renderContent()}
</div>
<div className={styles.name}>
{item.name}
</div>
</div>
</div>
</Dropdown>
);
}
|
#!/bin/bash
# Author: SandersSoft (c) 2020
# HEBitBar-V2
# https://raw.githubusercontent.com/KurtSanders/HEBitBarApp-V2/master/installation/HEBitBarInstall.command
version="4.04"
echo "HEBitBar-V2 Installer/Upgrader (c) SanderSoft"
echo "============================================="
echo "Version ${version}"
# Begin Define Variables #
HEBitBarPlistFilename="${HOME}/Library/Preferences/com.matryer.BitBar"
HEBitBarPlistPluginVar="pluginsDirectory"
HEBitBarGitHubRawHttp="https://raw.githubusercontent.com/KurtSanders/STBitBarApp-V2/master/hubitat"
HEBitBarPluginScriptFilename="HE.5m.sh"
HEBitBarPythonCfgFilename="HE_Python_Logic.cfg"
HEBitBarPythontFilename="HE_Python_Logic.py"
HEBitBarManifestFilename="manifest.json"
BitBarSoftwareURL="https://github.com/matryer/bitbar/releases/download/v1.9.2"
BitBarSoftwareFilename="BitBar-v1.9.2.zip"
downloadsFolder="$HOME/Downloads"
debugMode=false
### TESTING MODE CHECK
if [[ "${1}" == "debug" ]]
then
debugMode=true
echo "DEBUG MODE = ${debugMode}"
cd "${downloadsFolder}" || exit
fi
echo "Checking BitBar software installation and reading the BitBar plist file..."
if [[ ! -f ${HEBitBarPlistFilename}.plist ]]; then
echo "The BitBar plist file '${HEBitBarPlistFilename}' was not found!"
echo "BitBar base software NOT installed... conecting to developers github website and will download BitBar.app to your 'Downloads' directory"
cd "${downloadsFolder}" || exit
curl -s -O -J -L "${BitBarSoftwareURL}/${BitBarSoftwareFilename}"
unzip -qq ${BitBarSoftwareFilename}
echo "Open Finder to Downlaods folder and move BitBar.app to your Applications folder and click on BitBar.app and set BitBar plugin folder and check 'Open at Login'"
echo "After successful install and activation of BitBar.app, rerun this install script"
exit 1
fi
HEBitBarPluginsDirectory="$(defaults read "${HEBitBarPlistFilename}" | grep ${HEBitBarPlistPluginVar} | cut -d\" -f2)"
if [[ ${HEBitBarPluginsDirectory} == "" ]]; then
echo "The BitBar Plugin directory not found in the ${HEBitBarPlistFilename} file, Exiting..."
echo "Launch the BitBar.app application and set BitBar plugin folder in the preferences and 'Open at Login'"
exit 1
fi
echo "The BitBar plugin folder: '${HEBitBarPluginsDirectory}'"
### TESTING MODE CHECK
if [[ "${debugMode}" == "true" ]]
then
HEBitBarPluginsDirectory="${downloadsFolder}"
echo "TESTING DEBUG MODE.. Using ${HEBitBarPluginsDirectory} folder as BitBar Plugins folder"
fi
echo "Changing to the ${HEBitBarPluginsDirectory} folder.."
cd "${HEBitBarPluginsDirectory}" || exit
FILE="${HEBitBarPluginsDirectory}/HE/${HEBitBarPythonCfgFilename}"
echo "Checking for the existance of '${FILE}'"
if [[ -f "$FILE" ]]; then
echo "Found an existing '${HEBitBarPythonCfgFilename}'. Installation script will not overwrite... "
else
echo "Creating ${HEBitBarPluginsDirectory}/HE"
mkdir -p "${HEBitBarPluginsDirectory}/HE"
cd "${HEBitBarPluginsDirectory}/HE" || exit
echo "Creating ${HEBitBarPluginsDirectory}/HE/${HEBitBarPythonCfgFilename}"
curl -s -O -J -L ${HEBitBarGitHubRawHttp}/BitBar%20Plugin/HE/${HEBitBarPythonCfgFilename}
echo "Please edit the '${HEBitBarPluginsDirectory}/HE/${HEBitBarPythonCfgFilename}' file and enter your two API strings and SAVE"
open "${HEBitBarPluginsDirectory}/HE/${HEBitBarPythonCfgFilename}" -a TextEdit
fi
echo "Locating the 'ST.*.sh' files in your BitBar plugin folder...."
shopt -s nullglob
declare -a arrayOfFiles
for file in HE.[0-9][0-9ms]*.sh
do
arrayOfFiles=("${arrayOfFiles[@]}" "$file")
done
if [[ "${#arrayOfFiles[@]}" -eq 0 ]]
then
arrayOfFiles[0]="${HEBitBarPluginScriptFilename}"
elif [[ "${#arrayOfFiles[@]}" -gt 1 ]]
then
echo "I found more than ${#arrayOfFiles[@]} 'ST.*.sh' files in the BitBar Plugin Directory."
echo "Please delete all but 1 ST.*.sh file, exiting installer..."
for i in "${!arrayOfFiles[@]}"; do
# shellcheck disable=SC2003
printf "%s)\t%s\n" "$(expr ${i} + 1) " "${arrayOfFiles[$i]}"
done
exit 1
fi
HEBitBar_User_Plugin_ShellScript=${arrayOfFiles[0]}
echo "The STBitBar BitBar plugin file is: '${HEBitBar_User_Plugin_ShellScript}'"
if [[ "${HEBitBar_User_Plugin_ShellScript}" != "${HEBitBarPluginScriptFilename}" ]]
then
echo "========================================================================================================= "
echo "Warning: The new STBitBar V4 default for automatically polling SmartThings devices is EVERY 1 MINUTE."
echo "Please manually rename the '${HEBitBar_User_Plugin_ShellScript}' in the '${HEBitBarPluginsDirectory}' folder to 'ST.1m.sh'"
echo "This installation script will ask to rename '${HEBitBar_User_Plugin_ShellScript}'. Answer No if you want to keep your current {different} SmartThings polling frequency"
echo "Rename your ${HEBitBar_User_Plugin_ShellScript} to ${HEBitBarPluginScriptFilename}. Are you sure? [y/N]"
read -r response
case "$response" in
[yY][eE][sS] | [yY])
mv -iv "${HEBitBarPluginsDirectory}/${HEBitBar_User_Plugin_ShellScript}" "${HEBitBarPluginsDirectory}/${HEBitBarPluginScriptFilename}"
echo "Renamed ${HEBitBarPluginsDirectory}/${HEBitBar_User_Plugin_ShellScript}" "${HEBitBarPluginsDirectory}/${HEBitBarPluginScriptFilename}"
HEBitBar_User_Plugin_ShellScript="${HEBitBarPluginScriptFilename}"
;;
*)
echo "Keeping the ${HEBitBar_User_Plugin_ShellScript} file..."
;;
esac
fi
echo "Downloading/Updating updated STBitBar-V2 Github's 'ST.1m.sh' as ${HEBitBarPluginsDirectory}/${HEBitBar_User_Plugin_ShellScript} ..."
cd "${HEBitBarPluginsDirectory}" || exit
curl -s -O -J -L "${HEBitBarGitHubRawHttp}/BitBar%20Plugin/${HEBitBarPluginScriptFilename}" "\"${HEBitBar_User_Plugin_ShellScript}\""
chmod +x "${HEBitBar_User_Plugin_ShellScript}"
echo "Downloading/Updating STBitBar-V2 Github's 'ST_Python_Logic.py' to the BitBar plugin '${HEBitBarPluginsDirectory}/HE' folder..."
mkdir -p "${HEBitBarPluginsDirectory}/HE"
cd "${HEBitBarPluginsDirectory}/HE" || exit
curl -s -O -J -L "${HEBitBarGitHubRawHttp}/BitBar%20Plugin/HE/${HEBitBarPythontFilename}"
chmod +x "ST_Python_Logic.py"
echo "Downloading/Updating STBitBar-V2 Github's '${HEBitBarManifestFilename}' to the BitBar plugin '${HEBitBarPluginsDirectory}/HE' folder..."
curl -s -O -J -L "${HEBitBarGitHubRawHttp}/installation/manifest.json"
echo "STBitBar-V2 Install/Update completed..."
|
from typing import List, Tuple, Dict
import database # Assume the existence of a database connection and necessary querying functions
def get_active_orders(table: int) -> List[Dict[str, str]]:
active_orders = database.query("SELECT item_name, quantity, additional_details FROM orders WHERE table_number = ? AND status = 'active'", (table,))
return [{'item_name': order['item_name'], 'quantity': order['quantity'], 'additional_details': order['additional_details']} for order in active_orders] |
<gh_stars>100-1000
/**
* Tests child workflow termination from the parent workflow perspective
* @module
*/
import { WorkflowExecution } from '@temporalio/common';
import { startChild, defineQuery, setHandler } from '@temporalio/workflow';
import { unblockOrCancel } from './unblock-or-cancel';
export const childExecutionQuery = defineQuery<WorkflowExecution | undefined>('childExecution');
export async function childWorkflowTermination(): Promise<void> {
let workflowExecution: WorkflowExecution | undefined = undefined;
setHandler(childExecutionQuery, () => workflowExecution);
const child = await startChild(unblockOrCancel, {});
workflowExecution = { workflowId: child.workflowId, runId: child.firstExecutionRunId };
await child.result();
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
if [[ $DOCKER_NAME_TAG == centos* ]]; then
export LC_ALL=en_US.utf8
fi
if [[ $QEMU_USER_CMD == qemu-s390* ]]; then
export LC_ALL=C
fi
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
export PATH="/usr/local/opt/ccache/libexec:$PATH"
${CI_RETRY_EXE} pip3 install $PIP_PACKAGES
fi
mkdir -p "${BASE_SCRATCH_DIR}"
mkdir -p "${CCACHE_DIR}"
mkdir -p "${PREVIOUS_RELEASES_DIR}"
export ASAN_OPTIONS="detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1"
export LSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/lsan"
export TSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/tsan:log_path=${BASE_SCRATCH_DIR}/sanitizer-output/tsan"
export UBSAN_OPTIONS="suppressions=${BASE_ROOT_DIR}/test/sanitizer_suppressions/ubsan:print_stacktrace=1:halt_on_error=1:report_error_type=1"
env | grep -E '^(BITCOIN_CONFIG|BASE_|QEMU_|CCACHE_|WINEDEBUG|LC_ALL|BOOST_TEST_RANDOM|CONFIG_SHELL|(ASAN|LSAN|TSAN|UBSAN)_OPTIONS|TEST_PREVIOUS_RELEASES|PREVIOUS_RELEASES_DIR)' | tee /tmp/env
if [[ $HOST = *-mingw32 ]]; then
DOCKER_ADMIN="--cap-add SYS_ADMIN"
elif [[ $BITCOIN_CONFIG = *--with-sanitizers=*address* ]]; then # If ran with (ASan + LSan), Docker needs access to ptrace (https://github.com/google/sanitizers/issues/764)
DOCKER_ADMIN="--cap-add SYS_PTRACE"
fi
export P_CI_DIR="$PWD"
if [ -z "$DANGER_RUN_CI_ON_HOST" ]; then
echo "Creating $DOCKER_NAME_TAG container to run in"
${CI_RETRY_EXE} docker pull "$DOCKER_NAME_TAG"
DOCKER_ID=$(docker run $DOCKER_ADMIN -idt \
--mount type=bind,src=$BASE_ROOT_DIR,dst=/ro_base,readonly \
--mount type=bind,src=$CCACHE_DIR,dst=$CCACHE_DIR \
--mount type=bind,src=$DEPENDS_DIR,dst=$DEPENDS_DIR \
--mount type=bind,src=$PREVIOUS_RELEASES_DIR,dst=$PREVIOUS_RELEASES_DIR \
-w $BASE_ROOT_DIR \
--env-file /tmp/env \
--name $CONTAINER_NAME \
$DOCKER_NAME_TAG)
DOCKER_EXEC () {
docker exec $DOCKER_ID bash -c "export PATH=$BASE_SCRATCH_DIR/bins/:\$PATH && cd $P_CI_DIR && $*"
}
else
echo "Running on host system without docker wrapper"
DOCKER_EXEC () {
bash -c "export PATH=$BASE_SCRATCH_DIR/bins/:\$PATH && cd $P_CI_DIR && $*"
}
fi
if [ -n "$DPKG_ADD_ARCH" ]; then
DOCKER_EXEC dpkg --add-architecture "$DPKG_ADD_ARCH"
fi
if [[ $DOCKER_NAME_TAG == centos* ]]; then
${CI_RETRY_EXE} DOCKER_EXEC yum -y install epel-release
${CI_RETRY_EXE} DOCKER_EXEC yum -y install $DOCKER_PACKAGES $PACKAGES
elif [ "$CI_USE_APT_INSTALL" != "no" ]; then
${CI_RETRY_EXE} DOCKER_EXEC apt-get update
${CI_RETRY_EXE} DOCKER_EXEC apt-get install --no-install-recommends --no-upgrade -y $PACKAGES $DOCKER_PACKAGES
fi
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
top -l 1 -s 0 | awk ' /PhysMem/ {print}'
echo "Number of CPUs: $(sysctl -n hw.logicalcpu)"
else
DOCKER_EXEC free -m -h
DOCKER_EXEC echo "Number of CPUs \(nproc\):" \$\(nproc\)
DOCKER_EXEC echo $(lscpu | grep Endian)
DOCKER_EXEC echo "Free disk space:"
DOCKER_EXEC df -h
fi
if [ ! -d ${DIR_QA_ASSETS} ]; then
if [ "$RUN_FUZZ_TESTS" = "true" ]; then
DOCKER_EXEC git clone https://github.com/shirecoin-core/qa-assets ${DIR_QA_ASSETS}
fi
fi
export DIR_FUZZ_IN=${DIR_QA_ASSETS}/fuzz_seed_corpus/
DOCKER_EXEC mkdir -p "${BASE_SCRATCH_DIR}/sanitizer-output/"
if [ -z "$DANGER_RUN_CI_ON_HOST" ]; then
echo "Create $BASE_ROOT_DIR"
DOCKER_EXEC rsync -a /ro_base/ $BASE_ROOT_DIR
fi
if [ "$USE_BUSY_BOX" = "true" ]; then
echo "Setup to use BusyBox utils"
DOCKER_EXEC mkdir -p $BASE_SCRATCH_DIR/bins/
# tar excluded for now because it requires passing in the exact archive type in ./depends (fixed in later BusyBox version)
# find excluded for now because it does not recognize the -delete option in ./depends (fixed in later BusyBox version)
# ar excluded for now because it does not recognize the -q option in ./depends (unknown if fixed)
# shellcheck disable=SC1010
DOCKER_EXEC for util in \$\(busybox --list \| grep -v "^ar$" \| grep -v "^tar$" \| grep -v "^find$"\)\; do ln -s \$\(command -v busybox\) $BASE_SCRATCH_DIR/bins/\$util\; done
# Print BusyBox version
DOCKER_EXEC patch --help
fi
|
#! /bin/bash
# Target VPN server, username, and password are kept in files.
export VPNHOST=$(cat ~/.config/meraki-vpn/default/hostname)
export USERNAME=$(cat ~/.config/meraki-vpn/default/username)
export PASSWORD=$(cat ~/.config/meraki-vpn/default/password)
mkdir -p /tmp/ipsec
envsubst < ipsec.conf > /tmp/ipsec/ipsec.conf
envsubst < xl2tpd.conf > /tmp/ipsec/xl2tpd.conf
envsubst < options.l2tpd.client > /tmp/ipsec/options.l2tpd.client
sudo cp ~/.config/meraki-vpn/default/ipsec.secrets /etc/ipsec.secrets
sudo cp /tmp/ipsec/ipsec.conf /etc/ipsec.conf
sudo cp /tmp/ipsec/xl2tpd.conf /etc/xl2tpd/xl2tpd.conf
sudo cp /tmp/ipsec/options.l2tpd.client /etc/ppp/options.l2tpd.client
sudo mkdir -p /var/run/xl2tpd
sudo touch /var/run/xl2tpd/l2tp-control
sudo systemctl restart strongswan
sudo systemctl restart xl2tpd
sleep 1
sudo ipsec up meraki-vpn
sudo sh -c 'echo "c meraki-vpn" > /var/run/xl2tpd/l2tp-control'
sleep 2 # Wait for the device to be created.
for n in $(seq 4); do ifconfig ppp0; res=$?; if [ $res -ne 0 ]; then sleep 1; else break; fi; done
sleep 4
for ROUTE in $(cat ~/.config/meraki-vpn/default/routes | grep -v "^#")
do
sudo ip route add $ROUTE dev ppp0
done
rm -rf /tmp/ipsec
|
import test from 'tape'
import factory from '../lib/factory'
test('factory creates Element classes with different ownerDocuments', function (t) {
const od1 = factory().Element.prototype.ownerDocument
const od2 = factory().Element.prototype.ownerDocument
t.plan(1)
t.notEqual(od1, od2)
})
test('factory creates rfds with different windows', function (t) {
const win1 = factory().defaultView
const win2 = factory().defaultView
t.plan(1)
t.notEqual(win1, win2)
})
|
<reponame>ArthurFDLR/LowLevel_NeuralNet<gh_stars>1-10
import numpy as np
import time
def Input(expr, op, args, **kwargs):
if op.name in kwargs:
c = kwargs[op.name]
if isinstance(c, (int, float)):
return float(c)
elif hasattr(c, "shape"):
return c.astype(float)
else:
raise Exception("%s: Input must be float or int or ndarray: %s" % (expr, c))
else:
raise Exception("%s: missing input" % expr)
def Input2d(expr, op, args, **kwargs):
if not op.name in kwargs:
raise Exception("%s: missing input" % expr)
imgs = kwargs[op.name]
if not hasattr(imgs, "shape"):
raise Exception("%s: Input must be ndarray: %s" % (expr, imgs))
if any(
[
len(imgs.shape) != 4,
imgs.shape[1] != op.parameters["height"],
imgs.shape[2] != op.parameters["width"],
imgs.shape[3] != op.parameters["in_channels"],
]
):
raise Exception("%s: Invalid input size: %s" % (expr, imgs.shape))
# NHWC => NCHW
return imgs.astype(float).transpose(0, 3, 1, 2)
def Const(expr, op, args, **kwargs):
return op.parameters["value"]
def Neg(expr, op, args, **kwargs):
return -args[0]
def Add(expr, op, args, **kwargs):
a = args[0]
b = args[1]
if not hasattr(a, "shape") and not hasattr(b, "shape"):
return a + b
elif hasattr(a, "shape") and hasattr(b, "shape"):
if a.shape != b.shape:
raise Exception("%s: size mismatch: %s+%s" % (expr, a.shape, b.shape))
return a + b
else:
raise Exception("%s: cannot mix scalar and ndarray" % expr)
def Sub(expr, op, args, **kwargs):
a = args[0]
b = args[1]
if not hasattr(a, "shape") and not hasattr(b, "shape"):
return a - b
elif hasattr(a, "shape") and hasattr(b, "shape"):
if a.shape != b.shape:
raise Exception("%s: size mismatch: %s-%s" % (expr, a.shape, b.shape))
return a - b
else:
raise Exception("%s: cannot mix scalar and ndarray" % expr)
def Mul(expr, op, args, **kwargs):
a = args[0]
b = args[1]
if not hasattr(a, "shape") or not hasattr(b, "shape"):
return a * b
else:
if len(a.shape) != 2 or len(b.shape) != 2:
raise Exception("%s: matmul only: %s*%s" % (expr, a.shape, b.shape))
if a.shape[1] != b.shape[0]:
raise Exception("%s: size mismatch: %s*%s" % (expr, a.shape, b.shape))
return np.matmul(a, b)
# Custom
def Pow(expr, op, args, **kwargs):
a = args[0]
b = args[1]
if hasattr(b, "shape") or b % 1.0:
raise Exception("%s: power type mismatch: %s" % (expr, b.shape))
else:
if not hasattr(a, "shape"):
return a ** b
else:
if len(a.shape) != 2:
raise Exception("%s: numpy.dot() only: %s" % (expr, a.shape))
else:
return np.linalg.matrix_power(np.array(a), int(b))
def Flatten(expr, op, args, **kwargs):
x = args[0]
if not hasattr(x, "shape"):
raise Exception("%s: ndarray only: %s" % (expr, imgs))
return x.reshape((x.shape[0], -1))
def ReLU(expr, op, args, **kwargs):
x = args[0]
return x * (x > 0)
def Linear(expr, op, args, **kwargs):
x = args[0]
if not hasattr(x, "shape"):
raise Exception("%s: ndarray only: %s" % (expr, x))
if "weight" not in op.parameters or "bias" not in op.parameters:
raise Exception("%s: missing weight or bias" % expr)
weight = op.parameters["weight"]
bias = op.parameters["bias"]
if not hasattr(weight, "shape") or not hasattr(bias, "shape"):
raise Exception("%s: ndarray only for weight or bias" % expr)
in_features = op.parameters["in_features"]
out_features = op.parameters["out_features"]
if any(
[
len(x.shape) != 2,
x.shape[1] != in_features,
weight.shape != (out_features, in_features),
bias.shape != (out_features,),
]
):
raise Exception(
"%s: size mismatch: %s*%s+%s" % (expr, weight.shape, x.shape, bias.shape)
)
return np.einsum("ni,oi->no", x, weight, optimize="optimal") + bias.reshape(
(1, out_features)
)
def MaxPool2d(expr, op, args, **kwargs):
x = args[0]
if not hasattr(x, "shape"):
raise Exception("%s: ndarray only: %s" % (expr, x))
kernel_size = op.parameters["kernel_size"]
stride = op.parameters["stride"]
if kernel_size != stride:
raise Exception("%s: kernel_size != stride" % expr)
if any([len(x.shape) != 4, x.shape[2] % stride != 0, x.shape[3] % stride != 0]):
raise Exception("%s: size mismatch: %s" % (expr, x.shape))
new_shape = (
x.shape[0],
x.shape[1],
x.shape[2] // stride,
stride,
x.shape[3] // stride,
stride,
)
return np.nanmax(x.reshape(new_shape), axis=(3, 5))
def Conv2d(expr, op, args, **kwargs):
x = args[0]
if not hasattr(x, "shape"):
raise Exception("%s: ndarray only: %s" % (expr, x))
if "weight" not in op.parameters or "bias" not in op.parameters:
raise Exception("%s: missing weight or bias" % expr)
weight = op.parameters["weight"]
bias = op.parameters["bias"]
in_channels = op.parameters["in_channels"]
out_channels = op.parameters["out_channels"]
kernel_size = op.parameters["kernel_size"]
padding = op.parameters["padding"]
if any(
[
len(x.shape) != 4,
x.shape[1] != in_channels,
weight.shape != (out_channels, in_channels, kernel_size, kernel_size),
bias.shape != (out_channels,),
]
):
raise Exception("%s: size mismatch: %s" % (expr, x.shape))
if padding != 0:
tmp = np.zeros(
(x.shape[0], x.shape[1], x.shape[2] + 2 * padding, x.shape[3] + 2 * padding)
)
tmp[:, :, 1:-2, 1:-2] = x
x = tmp
conv_shape = x.shape[:2] + (
x.shape[2] + 1 - kernel_size,
x.shape[3] + 1 - kernel_size,
kernel_size,
kernel_size,
)
conv_strides = x.strides + x.strides[2:]
conv = np.lib.stride_tricks.as_strided(
x, shape=conv_shape, strides=conv_strides, writeable=False
)
return np.einsum(
"nihwyx,oiyx->nohw", conv, weight, optimize="optimal"
) + bias.reshape((1, out_channels, 1, 1))
class Eval:
def __init__(self, program):
self.program = program
def __call__(self, **kwargs):
start = time.time()
values = {}
for expr in self.program:
args = [values[ex.id] for ex in expr.inputs]
if expr.op.op_type not in globals():
raise Exception("%s: not implemented" % expr)
values[expr.id] = globals()[expr.op.op_type](expr, expr.op, args, **kwargs)
# print("numpy op", expr.op.op_type, "time %.2f" % (time.time()-start))
res = values[self.program[-1].id]
t = time.time() - start
if t > 0.1:
print("numpy time %.2f" % t)
return res
class Builder:
def __init__(self):
self.program = []
def append(self, expr):
self.program.append(expr)
def build(self):
return Eval(self.program)
|
require 'spec_helper'
describe 'nfs::idmapd::client' do
context 'with default parameters' do
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::idmapd::client') }
it { is_expected.to create_class('nfs::idmapd::config') }
it do
is_expected.to create_exec('enable_nfsidmap_request_key').with( {
:unless => "/usr/bin/grep -v '#' /etc/request-key.conf | grep -q 'nfsidmap -t 600'",
:command => "/usr/bin/sed -r -i '/^create[[:space:]]+id_resolver[[:space:]]/d' /etc/request-key.conf;/usr/bin/sed -i '/^negate/i create\tid_resolver\t*\t*\t\t/usr/sbin/nfsidmap -t 600 %k %d' /etc/request-key.conf"
} )
end
end
end
|
/*
* Copyright 2017 HugeGraph Authors
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.baidu.hugegraph.manager;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import org.apache.commons.io.FileUtils;
import com.baidu.hugegraph.api.API;
import com.baidu.hugegraph.base.Printer;
import com.baidu.hugegraph.base.ToolClient;
import com.baidu.hugegraph.cmd.SubCommands;
import com.baidu.hugegraph.driver.TraverserManager;
import com.baidu.hugegraph.exception.ToolsException;
import com.baidu.hugegraph.structure.constant.HugeType;
import com.baidu.hugegraph.structure.graph.Edge;
import com.baidu.hugegraph.structure.graph.Edges;
import com.baidu.hugegraph.structure.graph.Shard;
import com.baidu.hugegraph.structure.graph.Vertex;
import com.baidu.hugegraph.structure.graph.Vertices;
import com.baidu.hugegraph.structure.schema.EdgeLabel;
import com.baidu.hugegraph.structure.schema.IndexLabel;
import com.baidu.hugegraph.structure.schema.PropertyKey;
import com.baidu.hugegraph.structure.schema.VertexLabel;
import com.baidu.hugegraph.util.E;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
public class BackupManager extends BackupRestoreBaseManager {
private static final String SHARDS_SUFFIX = "_shards";
private static final String ALL_SHARDS = "_all" + SHARDS_SUFFIX;
private static final String TIMEOUT_SHARDS = "_timeout" + SHARDS_SUFFIX;
private static final String LIMIT_EXCEED_SHARDS = "_limit_exceed" + SHARDS_SUFFIX;
private static final String FAILED_SHARDS = "_failed" + SHARDS_SUFFIX;
public static final int BACKUP_DEFAULT_TIMEOUT = 120;
private static final String BACKEND = "backend";
private static final Set<String> BACKENDS_NO_PAGING =
ImmutableSet.of("memory");
private static final String PAGE_NONE = "";
private static final AtomicInteger nextId = new AtomicInteger(0);
private static final ThreadLocal<Integer> suffix =
ThreadLocal.withInitial(nextId::getAndIncrement);
private long splitSize;
private String backend;
public BackupManager(ToolClient.ConnectionInfo info) {
super(info, "backup");
this.backend = this.client.graphs().getGraph(this.graph()).get(BACKEND);
}
public void init(SubCommands.Backup backup) {
super.init(backup);
this.removeShardsFilesIfExists();
this.ensureDirectoryExist(true);
long splitSize = backup.splitSize();
E.checkArgument(splitSize >= 1024 * 1024,
"Split size must >= 1M, but got %s", splitSize);
this.splitSize(splitSize);
}
public void splitSize(long splitSize) {
this.splitSize = splitSize;
}
public long splitSize() {
return this.splitSize;
}
public void backup(List<HugeType> types) {
this.startTimer();
for (HugeType type : types) {
switch (type) {
case VERTEX:
this.backupVertices();
break;
case EDGE:
this.backupEdges();
break;
case PROPERTY_KEY:
this.backupPropertyKeys();
break;
case VERTEX_LABEL:
this.backupVertexLabels();
break;
case EDGE_LABEL:
this.backupEdgeLabels();
break;
case INDEX_LABEL:
this.backupIndexLabels();
break;
default:
throw new AssertionError(String.format(
"Bad backup type: %s", type));
}
}
this.shutdown(this.type());
this.printSummary();
}
protected void backupVertices() {
Printer.print("Vertices backup started");
Printer.printInBackward("Vertices has been backup: ");
List<Shard> shards = retry(() ->
this.client.traverser().vertexShards(splitSize()),
"querying shards of vertices");
this.writeShards(this.allShardsLog(HugeType.VERTEX), shards);
for (Shard shard : shards) {
this.backupVertexShardAsync(shard);
}
this.awaitTasks();
this.postProcessFailedShard(HugeType.VERTEX);
Printer.print("%d", this.vertexCounter.get());
Printer.print("Vertices backup finished: %d",
this.vertexCounter.get());
}
protected void backupEdges() {
Printer.print("Edges backup started");
Printer.printInBackward("Edges has been backup: ");
List<Shard> shards = retry(() ->
this.client.traverser().edgeShards(splitSize()),
"querying shards of edges");
this.writeShards(this.allShardsLog(HugeType.EDGE), shards);
for (Shard shard : shards) {
this.backupEdgeShardAsync(shard);
}
this.awaitTasks();
this.postProcessFailedShard(HugeType.EDGE);
Printer.print("%d", this.edgeCounter.get());
Printer.print("Edges backup finished: %d", this.edgeCounter.get());
}
protected void backupPropertyKeys() {
Printer.print("Property key backup started");
List<PropertyKey> pks = this.client.schema().getPropertyKeys();
this.propertyKeyCounter.getAndAdd(pks.size());
this.backup(HugeType.PROPERTY_KEY, pks);
Printer.print("Property key backup finished: %d",
this.propertyKeyCounter.get());
}
protected void backupVertexLabels() {
Printer.print("Vertex label backup started");
List<VertexLabel> vls = this.client.schema().getVertexLabels();
this.vertexLabelCounter.getAndAdd(vls.size());
this.backup(HugeType.VERTEX_LABEL, vls);
Printer.print("Vertex label backup finished: %d",
this.vertexLabelCounter.get());
}
protected void backupEdgeLabels() {
Printer.print("Edge label backup started");
List<EdgeLabel> els = this.client.schema().getEdgeLabels();
this.edgeLabelCounter.getAndAdd(els.size());
this.backup(HugeType.EDGE_LABEL, els);
Printer.print("Edge label backup finished: %d",
this.edgeLabelCounter.get());
}
protected void backupIndexLabels() {
Printer.print("Index label backup started");
List<IndexLabel> ils = this.client.schema().getIndexLabels();
this.indexLabelCounter.getAndAdd(ils.size());
this.backup(HugeType.INDEX_LABEL, ils);
Printer.print("Index label backup finished: %d",
this.indexLabelCounter.get());
}
private void backupVertexShardAsync(Shard shard) {
this.submit(() -> {
try {
backupVertexShard(shard);
} catch (Throwable e) {
this.logExceptionWithShard(e, HugeType.VERTEX, shard);
}
});
}
private void backupEdgeShardAsync(Shard shard) {
this.submit(() -> {
try {
backupEdgeShard(shard);
} catch (Throwable e) {
this.logExceptionWithShard(e, HugeType.EDGE, shard);
}
});
}
private void backupVertexShard(Shard shard) {
String desc = String.format("backing up vertices[shard:%s]", shard);
Vertices vertices = null;
String page = this.initPage();
TraverserManager g = client.traverser();
do {
String p = page;
try {
if (page == null) {
vertices = retry(() -> g.vertices(shard), desc);
} else {
vertices = retry(() -> g.vertices(shard, p), desc);
}
} catch (ToolsException e) {
this.exceptionHandler(e, HugeType.VERTEX, shard);
}
if (vertices == null) {
return;
}
List<Vertex> vertexList = vertices.results();
if (vertexList == null || vertexList.isEmpty()) {
return;
}
this.backup(HugeType.VERTEX, suffix.get(), vertexList);
this.vertexCounter.getAndAdd(vertexList.size());
Printer.printInBackward(this.vertexCounter.get());
} while ((page = vertices.page()) != null);
}
private void backupEdgeShard(Shard shard) {
String desc = String.format("backing up edges[shard %s]", shard);
Edges edges = null;
String page = this.initPage();
TraverserManager g = client.traverser();
do {
try {
String p = page;
if (page == null) {
edges = retry(() -> g.edges(shard), desc);
} else {
edges = retry(() -> g.edges(shard, p), desc);
}
} catch (ToolsException e) {
this.exceptionHandler(e, HugeType.EDGE, shard);
}
if (edges == null) {
return;
}
List<Edge> edgeList = edges.results();
if (edgeList == null || edgeList.isEmpty()) {
return;
}
this.backup(HugeType.EDGE, suffix.get(), edgeList);
this.edgeCounter.getAndAdd(edgeList.size());
Printer.printInBackward(this.edgeCounter.get());
} while ((page = edges.page()) != null);
}
private void backup(HugeType type, List<?> list) {
String file = type.string();
this.write(file, type, list);
}
private void backup(HugeType type, int number, List<?> list) {
String file = type.string() + number;
int size = list.size();
for (int start = 0; start < size; start += BATCH) {
int end = Math.min(start + BATCH, size);
this.write(file, type, list.subList(start, end));
}
}
private String initPage() {
return BACKENDS_NO_PAGING.contains(this.backend) ? null : PAGE_NONE;
}
private void exceptionHandler(ToolsException e, HugeType type,
Shard shard) {
String message = e.getMessage();
switch (type) {
case VERTEX:
E.checkState(message.contains("backing up vertices"),
"Unexpected exception %s", e);
break;
case EDGE:
E.checkState(message.contains("backing up edges"),
"Unexpected exception %s", e);
break;
default:
throw new AssertionError(String.format(
"Only VERTEX or EDGE exception is expected, " +
"but got '%s' exception", type));
}
if (isLimitExceedException(e)) {
this.logLimitExceedShard(type, shard);
} else if (isTimeoutException(e)) {
this.logTimeoutShard(type, shard);
} else {
this.logExceptionWithShard(e, type, shard);
}
}
private void logTimeoutShard(HugeType type, Shard shard) {
String file = type.string() + TIMEOUT_SHARDS;
this.writeShard(Paths.get(this.logDir(), file).toString(), shard);
}
private void logLimitExceedShard(HugeType type, Shard shard) {
String file = type.string() + LIMIT_EXCEED_SHARDS;
this.writeShard(Paths.get(this.logDir(), file).toString(), shard);
}
private void logExceptionWithShard(Object e, HugeType type, Shard shard) {
String fileName = type.string() + FAILED_SHARDS;
String filePath = Paths.get(this.logDir(), fileName).toString();
try (FileWriter writer = new FileWriter(filePath, true)) {
writer.write(shard.toString() + "\n");
writer.write(exceptionStackTrace(e) + "\n");
} catch (IOException e1) {
Printer.print("Failed to write shard '%s' with exception '%s'",
shard, e);
}
}
private void postProcessFailedShard(HugeType type) {
this.processTimeoutShards(type);
this.processLimitExceedShards(type);
}
private void processTimeoutShards(HugeType type) {
Path path = Paths.get(this.logDir(), type.string() + TIMEOUT_SHARDS);
File shardFile = path.toFile();
if (!shardFile.exists() || shardFile.isDirectory()) {
return;
}
Printer.print("Timeout occurs when backup %s shards in file '%s', " +
"try to use global option --timeout to increase " +
"connection timeout(default is 120s for backup) or use " +
"option --split-size to decrease split size",
type, shardFile);
}
private void processLimitExceedShards(HugeType type) {
Path path = Paths.get(this.logDir(),
type.string() + LIMIT_EXCEED_SHARDS);
File shardFile = path.toFile();
if (!shardFile.exists() || shardFile.isDirectory()) {
return;
}
Printer.print("Limit exceed occurs when backup %s shards in file '%s'",
type, shardFile);
}
private List<Shard> readShards(File file) {
E.checkArgument(file.exists() && file.isFile() && file.canRead(),
"Need to specify a readable filter file rather than:" +
" %s", file.toString());
List<Shard> shards = new ArrayList<>();
try (InputStream is = new FileInputStream(file);
InputStreamReader isr = new InputStreamReader(is, API.CHARSET);
BufferedReader reader = new BufferedReader(isr)) {
String line;
while ((line = reader.readLine()) != null) {
shards.addAll(this.readList("shards", Shard.class, line));
}
} catch (IOException e) {
throw new ToolsException("IOException occur while reading %s",
e, file.getName());
}
return shards;
}
private void writeShard(String file, Shard shard) {
this.writeShards(file, ImmutableList.of(shard));
}
private void writeShards(String file, List<Shard> shards) {
this.writeLog(file, "shards", shards);
}
private void writeLog(String file, String type, List<?> list) {
Lock lock = locks.lock(file);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream(LBUF_SIZE);
FileOutputStream fos = new FileOutputStream(file, false)) {
String key = String.format("{\"%s\": ", type);
baos.write(key.getBytes(API.CHARSET));
this.client.mapper().writeValue(baos, list);
baos.write("}\n".getBytes(API.CHARSET));
fos.write(baos.toByteArray());
} catch (Exception e) {
Printer.print("Failed to serialize %s: %s", type, e);
} finally {
lock.unlock();
}
}
private String allShardsLog(HugeType type) {
String shardsFile = type.string() + ALL_SHARDS;
return Paths.get(this.logDir(), shardsFile).toString();
}
private void removeShardsFilesIfExists() {
File logDir = new File(this.logDir());
E.checkArgument(logDir.exists() && logDir.isDirectory(),
"The log directory '%s' not exists or is file",
logDir);
for (File file : logDir.listFiles()) {
if (file.getName().endsWith(SHARDS_SUFFIX)) {
try {
FileUtils.forceDelete(file);
} catch (IOException e) {
throw new ToolsException("Failed to delete shard file " +
"'%s'", file);
}
}
}
}
private static boolean isTimeoutException(ToolsException e) {
return e.getCause() != null && e.getCause().getCause() != null &&
e.getCause().getCause().getMessage().contains("Read timed out");
}
private static boolean isLimitExceedException(ToolsException e) {
return e.getCause() != null &&
e.getCause().getMessage().contains("Too many records");
}
private static String exceptionStackTrace(Object e) {
if (!(e instanceof Throwable)) {
return e.toString();
}
Throwable t = (Throwable) e;
StringBuilder sb = new StringBuilder();
sb.append(t.getMessage()).append("\n");
if (t.getCause() != null) {
sb.append(t.getCause().toString()).append("\n");
}
for (StackTraceElement element : t.getStackTrace()) {
sb.append(element).append("\n");
}
return sb.toString();
}
}
|
package services
import java.time.{Duration, LocalTime}
import domains.Lap
import org.scalatestplus.play.PlaySpec
import utils.JsonSerializers
class LapServiceSpec extends PlaySpec {
val service = new LapService()
"LapService" must {
"parse a lap log text to Lap" in {
val result = service.parse("01:59:30.100\t052 - Joao\t1\t30:00.190\t100.10")
result.timestamp.toString mustBe "01:59:30.100"
result.driverId mustBe "052"
result.driverName mustBe "Joao"
result.lapNumber mustBe 1
JsonSerializers.durationToTime(result.lapTime) mustBe "00:30:00.190"
result.averageSpeed mustBe 100.10
}
"group laps by driver" in {
val laps = List(
Lap(LocalTime.now(), "01", "ronaldo", 1, Duration.ZERO, 0),
Lap(LocalTime.now(), "02", "joao", 1, Duration.ZERO, 0),
Lap(LocalTime.now(), "01", "ronaldo", 2, Duration.ZERO, 0)
)
val result = service.drivers(laps)
result.length mustBe 2
val driver01 = result.find(_.id == "01")
driver01.isDefined mustBe true
driver01.get.laps.length mustBe 2
val driver02 = result.find(_.id == "02")
driver02.isDefined mustBe true
driver02.get.laps.length mustBe 1
}
}
}
|
#!/bin/bash
# install mongodb via helm 3
helm --namespace default upgrade --install test-mongodb stable/mongodb -f values.yaml
|
The purpose of a hash algorithm is to map data of any size to a fixed length string of characters. This is often used to validate the integrity of a file or piece of data, as the resulting string is unique and produces the same result if the data is the same. Hash algorithms can also be used to encrypt and decrypt data, as the output is different each time. |
#!/usr/bin/env bash
# Usage:
# ./hack/release/bump_version.sh 0.8.0 0.8.1
oldv=$1
newv=$2
echo "old version: ${oldv}, new version: ${newv}"
sed -i.bak -e "s/${oldv}+git/${newv}/g" version/version.go
sed -i.bak -e "s/${oldv}/${newv}/g" example/deployment.yaml
sed -i.bak -e "s/${oldv}/${newv}/g" example/etcd-backup-operator/deployment.yaml
sed -i.bak -e "s/${oldv}/${newv}/g" example/etcd-restore-operator/deployment.yaml
rm version/version.go.bak
rm example/deployment.yaml.bak
rm example/etcd-backup-operator/deployment.yaml.bak
rm example/etcd-restore-operator/deployment.yaml.bak
|
<reponame>risq/radio
import transceiver from '../../src/transceiver';
import Channel from '../../src/channel';
const OriginalPromiseConstructor = transceiver.Promise;
const FakePromiseConstructor = () => {};
const channel = transceiver.channel('test');
const data = {
hello: 'world'
};
const cb = sinon.spy(() => {
return data;
});
const name = 'name';
const event = 'event';
describe('transceiver', () => {
beforeEach(() => {
cb.reset();
});
describe('.channel(String name)', () => {
beforeEach(() => {
spy(transceiver, 'channel');
});
it('should have been run once', () => {
transceiver.channel('test');
expect(transceiver.channel).to.have.been.calledOnce;
});
it('should have returned a channel instance', () => {
transceiver.channel('test');
expect(transceiver.channel).to.have.returned(sinon.match.instanceOf(Channel));
});
it('should have thrown an error if channel name is missing', () => {
expect(() => {
transceiver.channel();
}).to.always.throw(Error);
});
it('should have thrown an error if channel name is not a string', () => {
expect(() => {
transceiver.channel(4);
}).to.always.throw(Error);
});
});
describe('.setPromise(Promise promise)', () => {
beforeEach(() => {
spy(transceiver, 'setPromise');
});
afterEach(() => {
transceiver.setPromise(OriginalPromiseConstructor);
});
it('should have been run once', () => {
transceiver.setPromise(null);
expect(transceiver.setPromise).to.have.been.calledOnce;
});
it('should have changed promise constructor of each channel', () => {
transceiver.channel(name);
transceiver.setPromise(FakePromiseConstructor);
expect(transceiver.channel(name).Promise).to.equals(FakePromiseConstructor);
});
});
describe('Shorthands', () => {
describe('.request(String channelName, ...args)', () => {
it('should have called channel.request(...args) with given arguments', () => {
spy(channel, 'request');
transceiver.reply('test', name, cb);
transceiver.request('test', name);
expect(channel.request).to.have.been.calledWithExactly(name);
});
});
describe('.reply(String channelName, ...args)', () => {
it('should have called channel.reply(...args) with given arguments', () => {
spy(channel, 'reply');
transceiver.reply('test', name, cb);
expect(channel.reply).to.have.been.calledWithExactly(name, cb);
});
});
describe('.replyPromise(String channelName, ...args)', () => {
it('should have called channel.replyPromise(...args) with given arguments', () => {
spy(channel, 'replyPromise');
transceiver.replyPromise('test', name, cb);
expect(channel.replyPromise).to.have.been.calledWithExactly(name, cb);
});
});
describe('.all(String channelName, ...args)', () => {
it('should have called channel.all(...args) with given arguments', () => {
spy(channel, 'all');
transceiver.all('test', []);
expect(channel.all).to.have.been.calledWithExactly([]);
});
});
describe('.race(String channelName, ...args)', () => {
it('should have called channel.race(...args) with given arguments', () => {
spy(channel, 'race');
transceiver.race('test', []);
expect(channel.race).to.have.been.calledWithExactly([]);
});
});
describe('.requestArray(String channelName, ...args)', () => {
it('should have called channel.requestArray(...args) with given arguments', () => {
spy(channel, 'requestArray');
transceiver.requestArray('test', []);
expect(channel.requestArray).to.have.been.calledWithExactly([]);
});
});
describe('.requestProps(String channelName, ...args)', () => {
it('should have called channel.requestProps(...args) with given arguments', () => {
spy(channel, 'requestProps');
transceiver.requestProps('test', []);
expect(channel.requestProps).to.have.been.calledWithExactly([]);
});
});
describe('.emit(String channelName, ...args)', () => {
it('should have called channel.emit(...args) with given arguments', () => {
spy(channel, 'emit');
transceiver.emit('test', event);
expect(channel.emit).to.have.been.calledWithExactly(event);
});
});
describe('.on(String channelName, ...args)', () => {
it('should have called channel.on(...args) with given arguments', () => {
spy(channel, 'on');
transceiver.on('test', event, cb);
expect(channel.on).to.have.been.calledWithExactly(event, cb);
});
});
describe('.once(String channelName, ...args)', () => {
it('should have called channel.once(...args) with given arguments', () => {
spy(channel, 'once');
transceiver.once('test', event, cb);
expect(channel.once).to.have.been.calledWithExactly(event, cb);
});
});
describe('.off(String channelName, ...args)', () => {
it('should have called channel.off(...args) with given arguments', () => {
spy(channel, 'off');
transceiver.off('test', event, cb);
expect(channel.off).to.have.been.calledWithExactly(event, cb);
});
});
describe('.reset(String channelName)', () => {
it('should have called channel.reset()', () => {
spy(channel, 'reset');
transceiver.reset('test');
expect(channel.reset).to.have.been.called;
});
});
});
});
|
<reponame>sgkandale/garbage-lb
import React from 'react'
import { Typography } from '@mui/material'
export default function Test() {
return <Typography variant="body1" >
Test
</Typography>
} |
package no.mnemonic.commons.utilities;
import org.junit.Test;
import java.util.concurrent.atomic.LongAdder;
import static org.junit.Assert.*;
public class ObjectUtilsTest {
@Test
public void notNullReturnsValueOnNotNull() throws Exception {
Object value = new Object();
assertEquals(value, ObjectUtils.notNull(value, "notThrown"));
assertEquals(value, ObjectUtils.notNull(value, new IllegalArgumentException()));
}
@Test
public void notNullWithMessageThrowsExceptionOnNull() {
try {
ObjectUtils.notNull(null, "test");
fail();
} catch (Exception e) {
assertEquals("test", e.getMessage());
}
}
@Test(expected = IllegalArgumentException.class)
public void notNullWithExceptionThrowsExceptionOnNull() throws Exception {
ObjectUtils.notNull(null, new IllegalArgumentException());
}
@Test(expected = IllegalArgumentException.class)
public void notNullFailsWithNullException() throws Exception {
ObjectUtils.notNull(new Object(), (Exception) null);
}
@Test
public void ifNullReturnsValueOnNotNull() {
Object value = new Object();
assertEquals(value, ObjectUtils.ifNull(value, "defaultValue"));
assertEquals(value, ObjectUtils.ifNull(value, () -> "defaultValue"));
}
@Test
public void ifNullReturnsDefaultValueOnNull() {
assertEquals("defaultValue", ObjectUtils.ifNull(null, "defaultValue"));
assertEquals("defaultValue", ObjectUtils.ifNull(null, () -> "defaultValue"));
}
@Test(expected = IllegalArgumentException.class)
public void ifNullWithoutSupplierThrowsException() {
ObjectUtils.ifNull(42, null);
}
@Test
public void ifNotNullReturnsConvertedValue() {
assertEquals("convertedValue", ObjectUtils.ifNotNull(42, (v) -> "convertedValue"));
assertEquals("convertedValue", ObjectUtils.ifNotNull(42, (v) -> "convertedValue", "nullValue"));
}
@Test
public void ifNotNullReturnsNullValue() {
assertNull(ObjectUtils.ifNotNull(null, (v) -> "convertedValue"));
assertEquals("nullValue", ObjectUtils.ifNotNull(null, (v) -> "convertedValue", "nullValue"));
}
@Test(expected = IllegalArgumentException.class)
public void ifNotNullWithoutConverterThrowsException() {
ObjectUtils.ifNotNull(42, null);
}
@Test(expected = IllegalArgumentException.class)
public void ifNotNullWithNullValueWithoutConverterThrowsException() {
ObjectUtils.ifNotNull(42, null, "nullValue");
}
@Test(expected = IllegalArgumentException.class)
public void ifNotNullDoWithNullConsumerThrowsException() {
ObjectUtils.ifNotNullDo(42, null);
}
@Test
public void ifNotNullDoWithConsumer() {
LongAdder adder = new LongAdder();
ObjectUtils.ifNotNullDo(42L, adder::add);
assertEquals(42, adder.longValue());
}
@Test
public void ifNotNullDoWithNullValue() {
LongAdder adder = new LongAdder();
ObjectUtils.ifNotNullDo(null, adder::add);
assertEquals(0, adder.longValue());
}
}
|
import { BaseTranslator, TranslateCtx } from 'ai18n-type';
import * as _fs from 'fs';
import * as _prompts from 'prompts';
export default class ManualTranslator extends BaseTranslator {
// for test mock
protected prompts = _prompts;
protected fs = _fs;
/** 平台翻译 */
protected async translatePlatform(ctx: TranslateCtx) {
let msg = '';
const filename = 'translate-manual.txt';
const mappers: Map<string, string>[] = [];
ctx.list.forEach(d => {
msg += d.uglify.text + '\n';
mappers.push(d.uglify.mapper);
});
this.fs.writeFileSync(filename, msg, { encoding: 'utf-8' });
const answer = await this.prompts({
type: 'confirm',
name: 'confirm',
message: `已生成 ${filename}, 将此文件复制到翻译引擎处理,然后将翻译结果回填覆盖。按 y 继续。`,
});
if (answer.confirm) {
const content = this.fs.readFileSync(filename, { encoding: 'utf-8' });
content.split('\n').forEach((message, i) => {
const mapper = mappers[i];
if (!mapper) return;
for (const [marker, nonCnStr] of mapper.entries()) {
message = message.replace(marker, nonCnStr);
}
ctx.list[i].result = { message };
});
}
this.fs.unlinkSync(filename);
}
/** 手动翻译 */
protected async translateManual(ctx: TranslateCtx) {
for (const t of ctx.list) {
const answer = await this.prompts({
type: 'text',
name: 'message',
message: `[${t.from}->${t.to}] ${t.text}`,
});
t.result = { message: answer.message };
}
}
async translate(ctx: TranslateCtx) {
const s = await this.prompts({
type: 'select',
name: 'transType',
message: `共 ${ctx.list.length} 个待翻译文案,请选择翻译方式`,
choices: [
{ title: '手动', value: 'manual' },
{ title: '翻译平台', description: '由翻译平台处理', value: 'platform' },
],
});
if (s.transType === 'manual') return this.translateManual(ctx);
else if (s.transType === 'platform') return this.translatePlatform(ctx);
}
}
|
<gh_stars>0
/**
* 登录检查中间件
* 用于在获取数据时标记登录与否
* 无论登录与否,都会调用下一个中间件!
*/
const jwt = require('jsonwebtoken')
const { webSecret } = require('../config')
const checklogin = async (ctx, next) => {
// 获取并处理请求头中的token
const token = String(ctx.request.headers.authorization || ' ').split(' ')[1]
// 验证token
await jwt.verify(token, webSecret, async (err, userInfo) => {
// token无效时, userInfo是undefined
ctx.state.user = userInfo || {}
if (err) {
ctx.state.user.haslogin = false
} else {
ctx.state.user.haslogin = true
}
await next()
})
}
module.exports = checklogin
|
CUDA_VISIBLE_DEVICES=1 python ./tools/train_net_vov.py \
--config-file configs/vovnet/mask_rcnn_V_57_FPN_1x_3dce.yaml \
--num-gpus 1 SOLVER.IMS_PER_BATCH 2 \
DATALOADER.NUM_WORKERS 1
#VIS_PERIOD 10 |
<filename>web-utils/src/main/java/elasta/webutils/impl/JsonArrayRequestHandlerImpl.java
package elasta.webutils.impl;
import elasta.eventbus.SimpleEventBus;
import elasta.webutils.*;
import elasta.webutils.model.UriAndHttpMethodPair;
import io.vertx.core.eventbus.Message;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.RoutingContext;
import java.util.Objects;
/**
* Created by sohan on 5/10/2017.
*/
final public class JsonArrayRequestHandlerImpl implements JsonArrayRequestHandler {
private final RequestHandler requestHandler;
public JsonArrayRequestHandlerImpl(
JsonArrayRequestConverter jsonArrayRequestConverter,
UriToEventAddressTranslator uriToEventAddressTranslator,
ResponseGenerator responseGenerator,
SimpleEventBus eventBus
) {
this.requestHandler = new RequestHandlerImpl(
jsonArrayRequestConverter,
uriToEventAddressTranslator,
responseGenerator,
eventBus
);
}
@Override
public void handle(RoutingContext context) {
requestHandler.handle(context);
}
}
|
<reponame>noear/solon_demo
package jobapp.controller;
import org.noear.solon.extend.cron4j.Cron4j;
import java.util.Date;
@Cron4j(cron5x = "*/1 * * * *")
public class Cron4jRun2 implements Runnable {
@Override
public void run() {
System.out.println("我是定时任务: Cron4jRun2(*/1 * * * *) -- " + new Date());
}
}
|
/*
*
*/
package net.community.chest.swing.component.label;
import javax.swing.Icon;
import javax.swing.JLabel;
import net.community.chest.awt.attributes.Iconable;
import net.community.chest.awt.attributes.Textable;
import net.community.chest.swing.component.scroll.HorizontalPolicy;
import net.community.chest.swing.component.scroll.ScrolledComponent;
import net.community.chest.swing.component.scroll.VerticalPolicy;
/**
* <P>Copyright GPLv2</P>
*
* @param <C> Type of {@link JLabel} being scrolled
* @author <NAME>.
* @since Apr 1, 2009 9:43:14 AM
*/
public class ScrollableLabel<C extends JLabel> extends ScrolledComponent<C>
// NOTE !!! we do not declare FontControl, Tooltiped, Backgrounded, etc. since these are implemented by the scroll pane itself
implements Textable, Iconable {
/**
*
*/
private static final long serialVersionUID = 970917226146274773L;
public ScrollableLabel (Class<C> vc, C view, VerticalPolicy vp, HorizontalPolicy hp)
{
super(vc, view, vp, hp);
}
public ScrollableLabel (Class<C> vc, C view)
{
this(vc, view, VerticalPolicy.BYNEED, HorizontalPolicy.BYNEED);
}
public ScrollableLabel (Class<C> vc, VerticalPolicy vp, HorizontalPolicy hp)
{
this(vc, null, vp, hp);
}
public ScrollableLabel (Class<C> vc)
{
this(vc, null);
}
@SuppressWarnings("unchecked")
public ScrollableLabel (C view, VerticalPolicy vp, HorizontalPolicy hp)
{
this((null == view) ? null : (Class<C>) view.getClass(), vp, hp);
}
public ScrollableLabel (C view)
{
this(view, VerticalPolicy.BYNEED, HorizontalPolicy.BYNEED);
}
/*
* @see net.community.chest.awt.attributes.Iconable#getIcon()
*/
@Override
public Icon getIcon ()
{
final JLabel l=getAssignedValue();
return (null == l) ? null : l.getIcon();
}
/*
* @see net.community.chest.awt.attributes.Iconable#setIcon(javax.swing.Icon)
*/
@Override
public void setIcon (Icon i)
{
final JLabel l=getAssignedValue();
if (l != null)
l.setIcon(i);
}
/*
* @see net.community.chest.awt.attributes.Textable#getText()
*/
@Override
public String getText ()
{
final JLabel l=getAssignedValue();
return (null == l) ? null : l.getText();
}
/*
* @see net.community.chest.awt.attributes.Textable#setText(java.lang.String)
*/
@Override
public void setText (String t)
{
final JLabel l=getAssignedValue();
if (l != null)
l.setText(t);
}
}
|
import React from 'react';
import { mount } from 'enzyme';
import renderer from 'react-test-renderer';
import Column from '.';
const element = <Column mobile={12}><span>Subcomponent</span></Column>;
const component = mount(element);
describe('Column', () => {
it('should render a column div', () => {
const tree = renderer
.create(element)
.toJSON();
expect(tree).toMatchSnapshot();
});
it('should render children properly', () => {
const child = component.find('span');
expect(child).toHaveLength(1);
});
it('should have a mobile prop', () => {
expect(component.prop('mobile')).toBe(12);
});
it('should render with default alignment', () => {
expect(component.prop('align')).toBe('auto');
});
it('should render with custom aligment', () => {
component.setProps({ align: 'center' });
expect(component.prop('align')).toBe('center');
});
});
|
import DataGrid from './DataGrid';
import SimpleDataGrid from './SimpleDataGrid';
import { COLUMN_TYPES } from './constants';
import { ColumnDefinition } from './types';
export { DataGrid, SimpleDataGrid, COLUMN_TYPES };
export type { ColumnDefinition };
|
package cn.zqgx.moniter.center.server.portal.core.annotaion;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Documented
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface ZqgxTaskMethod {
String taskId();
String taskName();
boolean taskSwitch();
String detail() default "";
}
|
<reponame>dwimberger/censo
Template.Settings.helpers({
myId: function() {
return Meteor.userId();
},
botName: function() {
return Meteor.settings.public.botName;
},
telegramId: function() {
if(
Meteor.user() &&
Meteor.user().services &&
Meteor.user().services.telegram) {
return Meteor.user().services.telegram.username;
}
},
languages() {
const obj = TAPi18n.getLanguages();
const languages = [];
for (const key in obj) {
if (key) languages.push({ code: key, labels: obj[key] });
}
if (languages) return languages;
},
currentLanguage() {
const currentLanguageCode = TAPi18n.getLanguage();
const appLanguages = TAPi18n.getLanguages();
for (const code in appLanguages) {
if (code === currentLanguageCode) return appLanguages[code].name;
}
},
selectedLanguage(lang) {
return(lang === TAPi18n.getLanguage())? 'selected':'';
}
});
Template.Settings.events({
'change #language': function (event) {
event.preventDefault();
const lang = $("#language").val();
Meteor.call('updateUserLanguage', lang);
if(Meteor.isClient) {
$.getJSON(lang + '.table.i18n.json', function(data) {
$.fn.dataTable.defaults.oLanguage = data;
TAPi18n.setLanguage(lang);
});
}
}
});
|
# -----------------------------------------------------------------------------
#
# Package : hpack.js
# Version : 2.1.6
# Source repo : https://github.com/indutny/hpack.js
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=hpack.js
PACKAGE_VERSION=2.1.6
PACKAGE_URL=https://github.com/indutny/hpack.js
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.