text
stringlengths 1
1.05M
|
|---|
<reponame>gemini133/mango
/*
MANGO Multimedia Development Platform
Copyright (C) 2012-2020 Twilight Finland 3D Oy Ltd. All rights reserved.
*/
#include <mango/core/endian.hpp>
#include <mango/image/compression.hpp>
// https://www.khronos.org/registry/OpenGL/extensions/3DFX/3DFX_texture_compression_FXT1.txt
namespace
{
using namespace mango;
struct BlockFXT
{
u8 data[16];
u32 getMode() const
{
// mode is stored in the 3 last bits of FXT block
return data[15] >> 5;
}
};
struct BlockHI
{
u8 indices[12];
u32 blue0 : 5;
u32 green0 : 5;
u32 red0 : 5;
u32 blue1 : 5;
u32 green1 : 5;
u32 red1 : 5;
u32 mode : 2;
};
struct BlockCHROMA
{
u8 indices[8];
u64 blue0 : 5;
u64 green0 : 5;
u64 red0 : 5;
u64 blue1 : 5;
u64 green1 : 5;
u64 red1 : 5;
u64 blue2 : 5;
u64 green2 : 5;
u64 red2 : 5;
u64 blue3 : 5;
u64 green3 : 5;
u64 red3 : 5;
u64 unused : 1;
u64 mode : 3;
};
struct BlockMIXED
{
u8 indices[8];
u64 blue0 : 5;
u64 green0 : 5;
u64 red0 : 5;
u64 blue1 : 5;
u64 green1 : 5;
u64 red1 : 5;
u64 blue2 : 5;
u64 green2 : 5;
u64 red2 : 5;
u64 blue3 : 5;
u64 green3 : 5;
u64 red3 : 5;
u64 alpha : 1;
u64 lsb1 : 1;
u64 lsb3 : 1;
u64 mode : 1;
};
struct BlockALPHA
{
u8 indices[8];
u64 blue0 : 5;
u64 green0 : 5;
u64 red0 : 5;
u64 blue1 : 5;
u64 green1 : 5;
u64 red1 : 5;
u64 blue2 : 5;
u64 green2 : 5;
u64 red2 : 5;
u64 alpha0 : 5;
u64 alpha1 : 5;
u64 alpha2 : 5;
u64 lerp : 1;
u64 mode : 3;
};
inline u32 expand5to8(u32 v)
{
return (v << 3) | (v >> 2);
}
inline u32 expand6to8(u32 v)
{
return (v << 2) | (v >> 4);
}
void decode_hi(u8* out, size_t stride, const BlockHI& block, u8 alphaMask)
{
u32 b0 = expand5to8(block.blue0);
u32 g0 = expand5to8(block.green0);
u32 r0 = expand5to8(block.red0);
u32 b1 = expand5to8(block.blue1);
u32 g1 = expand5to8(block.green1);
u32 r1 = expand5to8(block.red1);
ColorBGRA color[8];
color[0] = ColorBGRA(b0, g0, r0, 0xff);
color[1] = ColorBGRA((5 * b0 + 1 * b1 + 3) / 6, (5 * g0 + 1 * g1 + 3) / 6, (5 * r0 + 1 * r1 + 3) / 6, 0xff);
color[2] = ColorBGRA((4 * b0 + 2 * b1 + 3) / 6, (4 * g0 + 2 * g1 + 3) / 6, (4 * r0 + 2 * r1 + 3) / 6, 0xff);
color[3] = ColorBGRA((3 * b0 + 3 * b1 + 3) / 6, (3 * g0 + 3 * g1 + 3) / 6, (3 * r0 + 3 * r1 + 3) / 6, 0xff);
color[4] = ColorBGRA((2 * b0 + 4 * b1 + 3) / 6, (2 * g0 + 4 * g1 + 3) / 6, (2 * r0 + 4 * r1 + 3) / 6, 0xff);
color[5] = ColorBGRA((1 * b0 + 5 * b1 + 3) / 6, (1 * g0 + 5 * g1 + 3) / 6, (1 * r0 + 5 * r1 + 3) / 6, 0xff);
color[6] = ColorBGRA(b1, g1, r1, 0xff);
color[7] = ColorBGRA(0, 0, 0, alphaMask);
u64 indices0 = uload64le(block.indices + 0);
u64 indices1 = uload64le(block.indices + 6);
for (int y = 0; y < 4; ++y)
{
ColorBGRA* dest = reinterpret_cast<ColorBGRA *>(out + y * stride);
// left 4x4 block
dest[0] = color[(indices0 >> 0) & 7];
dest[1] = color[(indices0 >> 3) & 7];
dest[2] = color[(indices0 >> 6) & 7];
dest[3] = color[(indices0 >> 9) & 7];
indices0 >>= 12;
// right 4x4 block
dest[4] = color[(indices1 >> 0) & 7];
dest[5] = color[(indices1 >> 3) & 7];
dest[6] = color[(indices1 >> 6) & 7];
dest[7] = color[(indices1 >> 9) & 7];
indices1 >>= 12;
}
}
void decode_chroma(u8* out, size_t stride, const BlockCHROMA& block, u8 alphaMask)
{
u32 b0 = expand5to8(block.blue0);
u32 g0 = expand5to8(block.green0);
u32 r0 = expand5to8(block.red0);
u32 b1 = expand5to8(block.blue1);
u32 g1 = expand5to8(block.green1);
u32 r1 = expand5to8(block.red1);
u32 b2 = expand5to8(block.blue2);
u32 g2 = expand5to8(block.green2);
u32 r2 = expand5to8(block.red2);
u32 b3 = expand5to8(block.blue3);
u32 g3 = expand5to8(block.green3);
u32 r3 = expand5to8(block.red3);
ColorBGRA color[4];
color[0] = ColorBGRA(b0, g0, r0, 0xff);
color[1] = ColorBGRA(b1, g1, r1, 0xff);
color[2] = ColorBGRA(b2, g2, r2, 0xff);
color[3] = ColorBGRA(b3, g3, r3, 0xff);
u32 indices0 = uload32le(block.indices + 0);
u32 indices1 = uload32le(block.indices + 4);
for (int y = 0; y < 4; ++y)
{
ColorBGRA* dest = reinterpret_cast<ColorBGRA *>(out + y * stride);
// left 4x4 block
dest[0] = color[(indices0 >> 0) & 3];
dest[1] = color[(indices0 >> 2) & 3];
dest[2] = color[(indices0 >> 4) & 3];
dest[3] = color[(indices0 >> 6) & 3];
indices0 >>= 8;
// right 4x4 block
dest[4] = color[(indices1 >> 0) & 3];
dest[5] = color[(indices1 >> 2) & 3];
dest[6] = color[(indices1 >> 4) & 3];
dest[7] = color[(indices1 >> 6) & 3];
indices1 >>= 8;
}
}
void decode_alpha(u8* out, size_t stride, const BlockALPHA& block, u8 alphaMask)
{
u32 b0 = expand5to8(block.blue0);
u32 g0 = expand5to8(block.green0);
u32 r0 = expand5to8(block.red0);
u32 a0 = expand5to8(block.alpha0) | alphaMask;
u32 b1 = expand5to8(block.blue1);
u32 g1 = expand5to8(block.green1);
u32 r1 = expand5to8(block.red1);
u32 a1 = expand5to8(block.alpha1) | alphaMask;
u32 b2 = expand5to8(block.blue2);
u32 g2 = expand5to8(block.green2);
u32 r2 = expand5to8(block.red2);
u32 a2 = expand5to8(block.alpha2) | alphaMask;
ColorBGRA color[8];
if (!block.lerp)
{
// colors for left 4x4 block
color[0] = ColorBGRA(b0, g0, r0, a0);
color[1] = ColorBGRA(b1, g1, r1, a1);
color[2] = ColorBGRA(b2, g2, r2, a2);
color[3] = ColorBGRA(0, 0, 0, alphaMask);
// colors for right 4x4 block
color[4] = color[0];
color[5] = color[1];
color[6] = color[2];
color[7] = color[3];
}
else
{
// colors for left 4x4 block
color[0] = ColorBGRA(b0, g0, r0, a0);
color[1] = ColorBGRA((2 * b0 + b1 + 1) / 3, (2 * g0 + g1 + 1) / 3, (2 * r0 + r1 + 1) / 3, (2 * a0 + a1 + 1) / 3);
color[2] = ColorBGRA((b0 + 2 * b1 + 1) / 3, (g0 + 2 * g1 + 1) / 3, (r0 + 2 * r1 + 1) / 3, (a0 + 2 * a1 + 1) / 3);
color[3] = ColorBGRA(b1, g1, r1, a1);
// colors for right 4x4 block
color[4] = ColorBGRA(b2, g2, r2, a2);
color[5] = ColorBGRA((2 * b2 + b1 + 1) / 3, (2 * g2 + g1 + 1) / 3, (2 * r2 + r1 + 1) / 3, (2 * a2 + a1 + 1) / 3);
color[6] = ColorBGRA((b2 + 2 * b1 + 1) / 3, (g2 + 2 * g1 + 1) / 3, (r2 + 2 * r1 + 1) / 3, (a2 + 2 * a1 + 1) / 3);
color[7] = ColorBGRA(b1, g1, r1, a1);
}
u32 indices0 = uload32le(block.indices + 0);
u32 indices1 = uload32le(block.indices + 4);
for (int y = 0; y < 4; ++y)
{
ColorBGRA* dest = reinterpret_cast<ColorBGRA *>(out + y * stride);
// left 4x4 block
dest[0] = color[0 + ((indices0 >> 0) & 3)];
dest[1] = color[0 + ((indices0 >> 2) & 3)];
dest[2] = color[0 + ((indices0 >> 4) & 3)];
dest[3] = color[0 + ((indices0 >> 6) & 3)];
indices0 >>= 8;
// right 4x4 block
dest[4] = color[4 + ((indices1 >> 0) & 3)];
dest[5] = color[4 + ((indices1 >> 2) & 3)];
dest[6] = color[4 + ((indices1 >> 4) & 3)];
dest[7] = color[4 + ((indices1 >> 6) & 3)];
indices1 >>= 8;
}
}
void decode_mixed(u8* out, size_t stride, const BlockMIXED& block, u8 alphaMask)
{
ColorBGRA color[8];
if (!block.alpha)
{
u32 bit01 = (block.indices[0] >> 1) & 1;
u32 bit33 = (block.indices[4] >> 1) & 1;
u32 b0 = expand5to8(block.blue0);
u32 g0 = expand5to8(u32(block.green0 << 1) | u32(block.lsb1 ^ bit01));
u32 r0 = expand5to8(block.red0);
u32 b1 = expand5to8(block.blue1);
u32 g1 = expand6to8(u32((block.green1 << 1) | block.lsb1));
u32 r1 = expand5to8(block.red1);
u32 b2 = expand5to8(block.blue2);
u32 g2 = expand5to8(u32((block.green2 << 1) | (block.lsb3 ^ bit33)));
u32 r2 = expand5to8(block.red2);
u32 b3 = expand5to8(block.blue3);
u32 g3 = expand6to8(u32((block.green3 << 1) | block.lsb3));
u32 r3 = expand5to8(block.red3);
// colors for left 4x4 block
color[0] = ColorBGRA(b0, g0, r0, 0xff);
color[1] = ColorBGRA((2 * b0 + b1 + 1) / 3, (2 * g0 + g1 + 1) / 3, (2 * r0 + r1 + 1) / 3, 0xff);
color[2] = ColorBGRA((b0 + 2 * b1 + 1) / 3, (g0 + 2 * g1 + 1) / 3, (r0 + 2 * r1 + 1) / 3, 0xff);
color[3] = ColorBGRA(b1, g1, r1, 0xff);
// colors for right 4x4 block
color[4] = ColorBGRA(b2, g2, r2, 0xff);
color[1] = ColorBGRA((2 * b2 + b3 + 1) / 3, (2 * g2 + g3 + 1) / 3, (2 * r2 + r3 + 1) / 3, 0xff);
color[2] = ColorBGRA((b2 + 2 * b3 + 1) / 3, (g2 + 2 * g3 + 1) / 3, (r2 + 2 * r3 + 1) / 3, 0xff);
color[7] = ColorBGRA(b3, g3, r3, 0xff);
}
else
{
u32 b0 = expand5to8(block.blue0);
u32 g0 = expand5to8(block.green0);
u32 r0 = expand5to8(block.red0);
u32 b1 = expand5to8(block.blue1);
u32 g1 = expand6to8(u32((block.green1 << 1) | block.lsb1));
u32 r1 = expand5to8(block.red1);
u32 b2 = expand5to8(block.blue2);
u32 g2 = expand5to8(block.green2);
u32 r2 = expand5to8(block.red2);
u32 b3 = expand5to8(block.blue3);
u32 g3 = expand6to8(u32((block.green3 << 1) | block.lsb3));
u32 r3 = expand5to8(block.red3);
// colors for left 4x4 block
color[0] = ColorBGRA(b0, g0, r0, 0xff);
color[1] = ColorBGRA((b0 + b1) / 2, (g0 + g1) / 2, (r0 + r1) / 2, 0xff);
color[2] = ColorBGRA(b1, g1, r1, 0xff);
color[3] = ColorBGRA(0, 0, 0, alphaMask);
// colors for right 4x4 block
color[4] = ColorBGRA(b2, g2, r2, 0xff);
color[5] = ColorBGRA((b2 + b3) / 2, (g2 + g3) / 2, (r2 + r3) / 2, 0xff);
color[6] = ColorBGRA(b3, g3, r3, 0xff);
color[7] = ColorBGRA(0, 0, 0, alphaMask);
}
u32 indices0 = uload32le(block.indices + 0);
u32 indices1 = uload32le(block.indices + 4);
for (int y = 0; y < 4; ++y)
{
ColorBGRA* dest = reinterpret_cast<ColorBGRA *>(out + y * stride);
// left 4x4 block
dest[0] = color[0 + ((indices0 >> 0) & 3)];
dest[1] = color[0 + ((indices0 >> 2) & 3)];
dest[2] = color[0 + ((indices0 >> 4) & 3)];
dest[3] = color[0 + ((indices0 >> 6) & 3)];
indices0 >>= 8;
// right 4x4 block
dest[4] = color[4 + ((indices1 >> 0) & 3)];
dest[5] = color[4 + ((indices1 >> 2) & 3)];
dest[6] = color[4 + ((indices1 >> 4) & 3)];
dest[7] = color[4 + ((indices1 >> 6) & 3)];
indices1 >>= 8;
}
}
void decode_fxt1(u8* out, size_t stride, const BlockFXT& block, u8 alphaMask)
{
u32 mode = block.getMode();
switch (mode)
{
case 0:
case 1:
// 00x
decode_hi(out, stride, reinterpret_cast<const BlockHI &>(block), alphaMask);
break;
case 2:
// 010
decode_chroma(out, stride, reinterpret_cast<const BlockCHROMA &>(block), alphaMask);
break;
case 3:
// 011
decode_alpha(out, stride, reinterpret_cast<const BlockALPHA &>(block), alphaMask);
break;
default:
// 1xx
decode_mixed(out, stride, reinterpret_cast<const BlockMIXED &>(block), alphaMask);
break;
}
}
} // namespace
namespace mango
{
void decode_block_fxt1_rgb(const TextureCompressionInfo& info, u8* out, const u8* in, size_t stride)
{
MANGO_UNREFERENCED(info);
const BlockFXT& block = *reinterpret_cast<const BlockFXT *>(in);
decode_fxt1(out, stride, block, 0xff);
}
void decode_block_fxt1_rgba(const TextureCompressionInfo& info, u8* out, const u8* in, size_t stride)
{
MANGO_UNREFERENCED(info);
const BlockFXT& block = *reinterpret_cast<const BlockFXT *>(in);
decode_fxt1(out, stride, block, 0);
}
} // namespace mango
|
// imports
import { actionCreators as userActions } from "./user";
// actions
const SET_BUCKET = "SET_BUCKET";
const LIKE_BUCKET = "LIKE_BUCKET";
const UNLIKE_BUCKET = "UNLIKE_BUCKET";
const POST_BUCKET = "POST_BUCKET";
// action creators
function setBucket(bucket) {
return{
type: SET_BUCKET,
bucket
};
}
function doLikeBucket(bucketId){
return{
type: LIKE_BUCKET,
bucketId
}
}
function doUnLikeBucket(bucketId) {
return{
type: UNLIKE_BUCKET,
bucketId
}
}
function postbucket(username) {
return{
type: POST_BUCKET,
username
}
}
// API actions
function getBucket(){
return(dispatch, getState) => {
const {user:{token}}= getState();
fetch("/buckets/", {
method: "GET",
headers: {
Authorization: `JWT ${token}`
}
})
.then(response => {
return response.json();
})
.then(json => {
dispatch(setBucket(json));
});
}
}
function likeBucket(bucketId) {
return(dispatch, getState) => {
dispatch(doLikeBucket(bucketId));
const {user:{token}}= getState();
fetch(`/buckets/${bucketId}/likes/`,{
method: "POST",
headers: {
Authorization: `JWT ${token}`
}
})
.then(response => {
if(response.status === 401){
dispatch(userActions.logout());
} else if(!response.ok){
dispatch(unlikeBucket(bucketId));
}
});
}
}
function unlikeBucket(bucketId) {
return(dispatch, getState) => {
dispatch(doUnLikeBucket(bucketId));
const { user: {token}} = getState();
fetch(`/buckets/${bucketId}/unlikes/`,{
method: "DELETE",
headers:{
Authorization: `JWT ${token}`
}
}).then(response => {
if(response.status === 401) {
dispatch(userActions.logout());
}else if (!response.ok){
dispatch(doLikeBucket(bucketId));
}
});
}
}
function postBucket(username, file, location, caption, tags) {
return dispatch => {
fetch("/buckets/post/", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
username,
file,
location,
caption,
tags
})
})
.then(response => response.json())
.then(json => {
if (json.token) {
dispatch(postbucket());
}
})
.catch(err => console.log(err));
};
}
// initial state
const initialState = {
};
// reducer
function reducer(state = initialState, action) {
switch (action.type) {
case SET_BUCKET:
return applySetBucket(state, action);
case LIKE_BUCKET:
return applyLikeBucket(state, action);
case UNLIKE_BUCKET:
return applyUnLikeBucket(state, action);
case POST_BUCKET:
return applyPostBucket(state, action);
default:
return state;
}
}
// reducer functions
function applySetBucket(state, action) {
const { bucket } = action;
return {
...state,
bucket
};
}
function applyLikeBucket(state, action) {
const { bucketId } = action;
const { bucket } = state;
const updatedBucket = bucket.map(bucket => {
if(bucket.id ===bucketId){
return { ...bucket, is_liked: true, like_count: bucket.like_count+1};
}
return bucket;
});
return {...state, bucket: updatedBucket};
}
function applyUnLikeBucket(state, action) {
const { bucketId } = action;
const { bucket } = state;
const updatedBucket = bucket.map(bucket => {
if (bucket.id === bucketId) {
return { ...bucket, is_liked: false, like_count: bucket.like_count - 1 };
}
return bucket;
});
return { ...state, bucket: updatedBucket };
}
function applyPostBucket(state, action) {
const {username} = action;
const {bucket} = state;
}
// exports
const actionCreators = {
getBucket,
likeBucket,
unlikeBucket,
postBucket,
};
export { actionCreators};
// export reducer by default
export default reducer;
|
import math
import queue as Q
from time import process_time
from vx.com.py.proximity.Proximity import *
class KNNFE:
def __init__(self):
pass
@staticmethod
def execute(nneighbors, clusters, X, proxtype):
start = process_time()
n = len(clusters);
#pmat = PMatrix(n);
ne = [Q.PriorityQueue() for i in range(n)]
for i in range(n):
ci = clusters[i].centroid
for j in range(i+1, n):
cj = clusters[j].centroid
d = X.proximity_row_ij(ci, cj, proxtype)
# print("dddddddddddddddd",d)
ne[i].put((d, j))
ne[j].put((d, i))
# for c in range(X.cols()):
# print (X.getValue(ci, c), X.getValue(cj, c), ci, cj);
# print("X")
ner = [ [] for i in range(n)]
for i in range(n):
q = ne[i]
while not q.empty():
d, j = q.get()
ner[i].append([j,d])
if len(ner[i])==nneighbors:
break;
del ne
#print ("nernernernernernernernernernerner",ner, nneighbors)
end = process_time()
print ("time KNNFE: {:.5f}".format(end-start))
return ner
|
#!/bin/bash
set -e
# create minion_backend for Mojolicious integration
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB"<<-EOSQL
CREATE USER minion;
CREATE DATABASE minion_backend;
GRANT ALL PRIVILEGES ON DATABASE minion_backend TO minion;
EOSQL
echo "Mojolicious Minion backend database created"
|
class Media {
constructor(medias) {
this._imgMedia = medias.image
this._mediaId = medias.id
this._mediaPhotographer = medias.photographerId
this._mediaTitle = medias.title
this._mediaLikes = medias.likes
this._mediaDate = medias.date
this._mediaPrice = medias.price
this._mediaVideo = medias.video
}
get imgMedia() {
return this._image
}
get mediaVideo(){
return this._video
}
get mediaId() {
return this._id
}
get mediaPhotographer() {
return this._photographerId
}
get mediaTitle() {
return this._title
}
get mediaLikes() {
return this._likes
}
get mediaDate() {
return this._date
}
get mediaPrice() {
return this._price
}
}
|
class PlayersService {
private readonly logger = new Logger(PlayersService.name);
async createPlayer(createPlayerDTO: CreatePlayerDTO) {
// Create player logic
this.logPlayerCreation(createPlayerDTO); // Call the custom logging method
// Additional player creation logic
}
private logPlayerCreation(createPlayerDTO: CreatePlayerDTO) {
// Extract relevant information from createPlayerDTO
const { playerName, playerDetails } = createPlayerDTO;
// Log player creation details
this.logger.log(`Player created - Name: ${playerName}, Details: ${playerDetails}`);
}
}
|
@app.route('/customers/<int:customer_id>', methods=['POST'])
def save_customer_data(customer_id):
data = request.get_json()
customer = Customer(id=customer_id, name=data['name'], address=data['address'])
db.session.add(customer)
db.session.commit()
return jsonify({'message': 'Customer data successfully saved!'}), 201
|
package controllers;
import play.mvc.*;
import views.html.home.*;
import views.html.*;
import views.html.home.home;
public class HomeController extends Controller {
public Result index(String user,String company) {
return ok(home.render(user,company));
}
}
|
// Function for searching an element from array
function searchArray(myArray,searchValue) {
// length of the array
let len = myArray.length;
//loop through array and check if element is in array
for (let i=0; i<len; i++) {
//compare each element of array
//with search value
if (myArray[i] == searchValue)
return true;
}
return false;
}
// Driver Code
let myArray = [1,2,3,4,5];
let searchValue = 3;
let result = searchArray(myArray, searchValue);
if(result)
console.log("Element Found");
else
console.log("Element not Found");
|
<gh_stars>0
import React from 'react';
export default class ArrowDown extends React.Component {
render() {
const { width, height, color } = this.props;
return (
<svg width={width} height={height} viewBox="0 0 140 140" version="1.1" >
<g id="Icons" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g transform="translate(-4048.000000, -1059.000000)" fillRule="nonzero" id="icon_Group_del">
<g transform="translate(4048.000000, 1059.000000)">
<circle id="椭圆_2" fill="#1793E6" cx="70" cy="70" r="70"></circle>
<rect id="矩形_6" fill="#FFFFFF" x="30" y="60" width="80" height="20" rx="1"></rect>
</g>
</g>
</g>
</svg>
)
}
}
|
import random
def random_permutation(input_array):
perm = []
while(len(input_array) > 0):
el = random.choice(input_array)
perm.append(el)
input_array.remove(el)
return perm
|
from typing import List, Tuple
class CyclicDependencyError(Exception):
pass
def determine_execution_order(dependency_pairs: List[Tuple[int, int]]) -> List[int]:
graph = {}
in_degree = {}
# Build the graph and calculate in-degrees
for pair in dependency_pairs:
task1, task2 = pair
if task1 not in graph:
graph[task1] = []
in_degree[task1] = 0
if task2 not in graph:
graph[task2] = []
in_degree[task2] = 0
graph[task1].append(task2)
in_degree[task2] += 1
# Topological sort using Kahn's algorithm
queue = [task for task in graph if in_degree[task] == 0]
execution_order = []
while queue:
task = queue.pop(0)
execution_order.append(task)
for dependent_task in graph[task]:
in_degree[dependent_task] -= 1
if in_degree[dependent_task] == 0:
queue.append(dependent_task)
if len(execution_order) != len(graph):
raise CyclicDependencyError("Cyclic dependency detected")
return execution_order
|
#!/bin/bash
PYTORCH=/opt/pytorch/pytorch
WORKSPACE=/workspace
DESTINATION=/docker
PROFILE_PYTHON_DIR=batchnorm3d-channels-last
unset PYTORCH_VERSION
unset PYTORCH_BUILD_VERSION
CURRENT_COMMIT='7317dba25c9b0cee7e2b46c0b32587107265e1c5'
function build {
pushd $PYTORCH;
git submodule update --init --recursive;
pip install -r requirements.txt;
for i in `seq 5`; do pip uninstall torch -y; python setup.py clean; done;
CUDA_HOME="/usr/local/cuda" CMAKE_PREFIX_PATH="$(dirname $(which conda))/../" \
NCCL_INCLUDE_DIR="/usr/include/" NCCL_LIB_DIR="/usr/lib/" USE_SYSTEM_NCCL=1 USE_OPENCV=1 \
TORCH_CUDA_ARCH_LIST='7.0 8.0' python setup.py develop;
popd;
}
function ccache {
apt-get update;
apt-get install -y cmake;
mkdir -p ~/ccache;
pushd ~/ccache;
rm -rf ccache;
git clone https://github.com/ccache/ccache.git;
mkdir -p ccache/build;
pushd ccache/build;
cmake -DCMAKE_INSTALL_PREFIX=${HOME}/ccache \
-DENABLE_TESTING=OFF -DZSTD_FROM_INTERNET=ON ..;
make -j$(nproc) install;
popd;
popd;
mkdir -p ~/ccache/lib;
mkdir -p ~/ccache/cuda;
ln -s ~/ccache/bin/ccache ~/ccache/lib/cc;
ln -s ~/ccache/bin/ccache ~/ccache/lib/c++;
ln -s ~/ccache/bin/ccache ~/ccache/lib/gcc;
ln -s ~/ccache/bin/ccache ~/ccache/lib/g++;
ln -s ~/ccache/bin/ccache ~/ccache/cuda/nvcc;
~/ccache/bin/ccache -M 25Gi;
export PATH=~/ccache/lib:$PATH;
export CUDA_NVCC_EXECUTABLE=~/ccache/cuda/nvcc;
which gcc;
}
ccache
cd $PYTORCH
git remote add gh https://github.com/xwang233/pytorch
git fetch gh $CURRENT_COMMIT
git checkout $CURRENT_COMMIT
build
mkdir -p $WORKSPACE
cd $WORKSPACE
git clone --recursive https://github.com/xwang233/code-snippet.git
cd code-snippet/$PROFILE_PYTHON_DIR
python s.py
cd $PYTORCH
git checkout HEAD~1
build
cd $WORKSPACE
cd code-snippet/$PROFILE_PYTHON_DIR
python s.py
tmpdir=`date +%s`
mkdir -p $tmpdir
cd $tmpdir
mv ../res* .
ln -s ../parse.py .
python parse.py
ln -s *.md readme.md
cd ..
# copy this $tmpdir to save the benchmark results
# cp -r $tmpdir $DESTINATION
cd ..
cp -r $PROFILE_PYTHON_DIR $DESTINATION
|
#!/bin/sh
SVC=./bin/apisvc
while [ ! -f $SVC ]; do sleep 1; done
while true; do
$SVC &
PID=$!
filewatch -t 3 -filenames $SVC
kill $PID
echo "$(date '+%Y/%m/%d %H:%M:%S') APIService: killed"
done
|
<reponame>GaganSD/Air-Pollution-Visualizer<filename>aqmaps/src/main/java/uk/ac/ed/inf/aqmaps/DroneBackend.java<gh_stars>0
/*
* Written by <NAME>
* for Informatics Large Practical coursework 2
* Student number: s1854008
*/
package uk.ac.ed.inf.aqmaps;
import java.awt.geom.Line2D;
import java.awt.geom.Point2D;
import java.io.IOException;
import com.mapbox.geojson.Feature;
import com.mapbox.geojson.FeatureCollection;
import com.mapbox.geojson.GeoJson;
import com.mapbox.geojson.Geometry;
import com.mapbox.geojson.LineString;
import com.mapbox.geojson.Point;
import java.util.*;
import org.jgrapht.alg.tour.*;
import org.jgrapht.graph.*;
public class DroneBackend implements DroneBackendTemplate {
public static double DIST_DRONE_CAN_MOVE = 0.0003;
// private instance variables with getters and setters
// provided at the bottom
private Drone currDrone;
private Point startPositions;
private ArrayList<SensorData> sensorsList;
private int seed;
private final String RGB = "rgb-string";
private ArrayList<SensorData> orgSensorsList;
private int nodeCount;
private ArrayList<SensorData> shortestPathSensors = new ArrayList<SensorData>();
private ArrayList<WordsData> wordsDataList;
private ArrayList<Integer> directions = new ArrayList<>();
private ArrayList<String> locationStrings = new ArrayList<String>();
private ArrayList<Point> droneRoute;
private NoFlyZone noFlyZone;
private int stepsCompleted = 0;
private GeoJson geoJsonFeatures;
private int sensorsNotVisitedCount = 0;
private boolean visited = false;
public DroneBackend(Drone drone, Point startPositions, ArrayList<WordsData> wordsDataList,
ArrayList<SensorData> sensorsList, NoFlyZone noFlyZone, int seed, ArrayList<SensorData> orgSensorsList) {
this.startPositions = startPositions;
this.wordsDataList = wordsDataList;
this.sensorsList = sensorsList;
this.noFlyZone = noFlyZone;
this.currDrone = drone;
this.seed = seed;
this.orgSensorsList = orgSensorsList;
// populate the sensors location with the w3words given to the drone backend.
for (int i = 0; i < currDrone.getSensors().size(); i++) {
var correspondingWord = currDrone.getW3Words().get(i);
var sensorLocation = correspondingWord.getCoordinates().getPoint();
// in-place
currDrone.getSensors().get(i).setWhere(sensorLocation);
}
}
public static class Builder {
private Point startPositions;
private ArrayList<WordsData> wordsDataList;
private ArrayList<SensorData> sensorsList;
private ArrayList<SensorData> orgSensorsList;
private NoFlyZone noFlyZone;
private Drone currDrone;
private int seed;
public Builder() {
}
public Builder connectDrone(Drone droneX) {
this.currDrone = droneX;
return this;
}
public Builder setStartPosition(Double startlng, Double startlat) {
this.startPositions = Point.fromLngLat(startlng, startlat);
return this;
}
public Builder setSensorsList(ArrayList<SensorData> sensorsList) {
this.sensorsList = sensorsList;
return this;
}
public Builder setW3WordsData(ArrayList<WordsData> wordsList) {
this.wordsDataList = wordsList;
return this;
}
public Builder setNoFlyZones(NoFlyZone noflyzones) {
this.noFlyZone = noflyzones;
return this;
}
public Builder setSeed(int seed) {
this.seed = seed;
return this;
}
@SuppressWarnings("unchecked")
public Builder copySensors(ArrayList<SensorData> orgSensorsList) {
this.orgSensorsList = (ArrayList<SensorData>) orgSensorsList.clone();
return this;
}
public DroneBackend build() {
return new DroneBackend(this.currDrone, this.startPositions, this.wordsDataList, this.sensorsList,
this.noFlyZone, this.seed, this.orgSensorsList);
}
}
// -----------------------------------------------------------------------
// -----------------------------------------------------------------------
// Main methods that can be accessed.
/**
* in-place function that generates the route to sensors. Updates the
* droneRoute.
*
* @throws IOException
* @throws InterruptedException
*/
@Override
public void generateRoute(String algorithm) throws IOException, InterruptedException {
try {
Random randomGenerator = new Random(this.seed);
var routePoints = this.generateSensorsToVisit(algorithm);
// if the generated route is greater than allowed limit
// remove some sensors.
while (stepsCompleted > Drone.MAX_MOVES) {
var i = randomGenerator.nextInt(wordsDataList.size());
// Randomly delete nodes. Get sensors.
wordsDataList.remove(i);
sensorsList.remove(i);
stepsCompleted = 0;
// generate route points again!
routePoints = this.generateSensorsToVisit(algorithm);
}
this.droneRoute = routePoints;
// We now have the drone routes, so we convert it to features
this.setGeoJsonFeatures(convertToFeatures());
} catch (Exception e) {
System.out.println("Something went wrong while simulating the routes!");
e.printStackTrace();
System.out.println("\n\nThe app will now exit.");
System.exit(1);
}
}
// -----------------------------------------------------------------------
// -----------------------------------------------------------------------
// Private methods used as helper functions belong here
/**
* @param TSPalgorithm - String
* @return an array of points of sensors to visit
*/
private ArrayList<Point> generateSensorsToVisit(String algorithm) {
ArrayList<Point> sensorsList = new ArrayList<Point>();
ArrayList<SensorData> routeList;
// Build the entire graph with vertices as sensors and edges calculated
var graph = buildGraph(currDrone.getSensors());
// These algorithms may fail due to several reasons such as imperfect matching
// and the graph not being
// suitable for the algorithm. The developer is responsible for the algorithm,
// however, the application takes safety checks and uses the greedy/nearest neighbour
// heuristic as a back up. Here we use an algorithm to remove edges & only get the
// shortest path through the TSP.
try {
switch (algorithm) {
case "Greedy": {
var chosenTSPAlgorithm = new GreedyHeuristicTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "HeldKarp": {
var chosenTSPAlgorithm = new HeldKarpTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "NearestInsertion": {
var chosenTSPAlgorithm = new NearestInsertionHeuristicTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "NearestNeighbor": {
var chosenTSPAlgorithm = new NearestNeighborHeuristicTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "PalmerHamiltonian": {
var chosenTSPAlgorithm = new PalmerHamiltonianCycle<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "RandomTour": {
var chosenTSPAlgorithm = new RandomTourTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "GreedyHeuristic": {
var chosenTSPAlgorithm = new RandomTourTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} case "TwoOptHeuristic": {
System.out.println("aa");
var chosenTSPAlgorithm = new TwoOptHeuristicTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
} default: {
var chosenTSPAlgorithm = new ChristofidesThreeHalvesApproxMetricTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
}
}
} catch (Exception e) {
if (algorithm.equals("Greedy")) {
System.out.println("The chosen algorithm failed on this set of sensors with an error" + e
+ ". \nA NearestNeighborHeuristicTSP is implemented as a backup.");
var chosenTSPAlgorithm = new NearestNeighborHeuristicTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
}
else {
System.out.println("The chosen algorithm failed on this set of sensors with an error" + e
+ ". \nA Greedy heuristic is implemented as a backup.");
var chosenTSPAlgorithm = new GreedyHeuristicTSP<SensorData, DefaultEdge>();
var tour = chosenTSPAlgorithm.getTour(graph);
routeList = (ArrayList<SensorData>) tour.getVertexList();
}
}
this.shortestPathSensors = routeList;
var routeSize = this.shortestPathSensors.size();
// Add drone's current point. As we would like to come back to the original point.
sensorsList.add(currDrone.getCurrentLocation());
// Here we adjust the paths given to us by the algorithm as we used Euclidean
// distance between the sensors & didn't account the no fly zones earlier.
// Here we also consider the confinement region given to us
for (int i = 0; i < routeSize; i++) {
var currSensor = this.shortestPathSensors.get(i);
var locs = buildValidRoute(currSensor.getWhere());
sensorsList.addAll(locs);
locationStrings.add(currSensor.getLocation());
if (i == this.shortestPathSensors.size() - 1) {
locs = buildValidRoute(startPositions);
sensorsList.addAll(locs);
locationStrings.add(null);
}
}
return sensorsList;
}
/**
* Helper method.
*
* This method builds a valid route from the drone's current
* location to the location given as the argument.
*
* @param Point - Location the drone should go to.
* @return
*/
private ArrayList<Point> buildValidRoute(Point reachState) {
// get currDrone location
var currDroneLoc = currDrone.getCurrentLocation();
var nextLoc = reachState;
// set lat & long
var currLng = currDroneLoc.longitude();
var currLat = currDroneLoc.latitude();
var nextLng = nextLoc.longitude();
var nextLat = nextLoc.latitude();
// get the direction to move to reach towards the final state
double towards = getPossibleMove(reachState);
var power = 0;
var times = 1;
var pointsRoutes = new ArrayList<Point>();
Point currPoint;
// while the two points are not in range
var distance = Point2D.distance(currLng, currLat, nextLng, nextLat);
// we keep appending new points until the distance between the point (sensor's
// location)
// and the drone's location is within the detector's range.
while (distance > Drone.DETECTOR_RANGE) {
// the angle should be a multiple of 10 or else the
// drone won't be able to move this way.
while (towards % 10 != 0) {
// we update our angle until this is satisfied.
towards = towards + (0 - towards % 10);
currDroneLoc = currDrone.getCurrentLocation();
currPoint = newLoc(currDroneLoc, towards);
if (validRoute(currDroneLoc, currPoint)) {
// valid route checks if the new point isn't in the no flyzone
var increment = -(Math.pow(-1, power)) * times * 10;
towards = (towards + increment) % 360;
}
}
// angle shouldn't be negative.
// change to its corresponding positive angle.
if (towards < 0) {
towards = 360 - towards;
}
// get a new valid location at the angle.
currDroneLoc = currDrone.getCurrentLocation();
currPoint = newLoc(currDroneLoc, towards);
// check if the new location's route is valid and by checking against
// no fly zone.
// check if the location has already been visited.
var allClear = validRoute(currDroneLoc, currPoint) || pointsRoutes.contains(currPoint);
while (allClear) {
// one of the above is true. change it!
var increment = -(Math.pow(-1, power)) * times;
power += 1;
times += 1;
//
increment = increment * 10; // angle should be multiple of 10
towards = (towards + increment) % 360; // should be less than 360
currDroneLoc = currDrone.getCurrentLocation();
currPoint = newLoc(currDroneLoc, towards);
// check again!
allClear = validRoute(currDroneLoc, currPoint) || pointsRoutes.contains(currPoint);
// continue if its still not valid.
}
// negative angle check.
if (towards < 0) {
towards = 360 - towards;
}
// reset!
power = 0;
times = 1;
// angle should be a multiple of 10, floating point numbers aren't!
var towardsInt = (int) towards;
// append the next move!
directions.add(towardsInt);
pointsRoutes.add(currPoint);
// update the drone's location with the new valid location
currDrone.updateCurrentLocation(currPoint);
currDroneLoc = currDrone.getCurrentLocation();
nextLoc = reachState;
// update variables & calculate distance
currLng = currDroneLoc.longitude();
currLat = currDroneLoc.latitude();
nextLng = nextLoc.longitude();
nextLat = nextLoc.latitude();
distance = Point2D.distance(currLng, currLat, nextLng, nextLat);
// increment counter
// increment
if (distance > Drone.DETECTOR_RANGE) {
locationStrings.add(null);
}
stepsCompleted++;
// get the next angle the drone should possiblty turn to reach the state.
towards = getPossibleMove(reachState);
}
return pointsRoutes;
}
/**
* Given a point, this method will give us the direction the drone must move
* towards to reach the point.
*
* @param Point location
* @return returns angle in double
*/
private double getPossibleMove(Point p2Loc) {
// get variables
var droneCurrLocation = currDrone.getCurrentLocation();
var droneCurrLng = droneCurrLocation.longitude();
var droneCurrLat = droneCurrLocation.latitude();
var p2Lng = p2Loc.longitude();
var p2Lat = p2Loc.latitude();
double diffLng = p2Lng - droneCurrLng;
double diffLat = p2Lat - droneCurrLat;
double angle = 0;
if (diffLat > 0) {
var radians = Math.atan2(diffLat, diffLng);
angle = Math.toDegrees(radians);
}
if (diffLat < 0) {
var radians = Math.atan2(diffLat, diffLng);
angle = 360 - (-Math.toDegrees(radians)) % 360;
}
return angle;
}
/**
* This method answers the question: if the drone will goes to the point will it
* go through a forbidden region?
*
* @param drone
* @param possibleNextLoc
* @return
*/
private Boolean validRoute(Point currLoc, Point possibleNextLoc) {
var locOutside = false;
var NORTHEAST = NoFlyZone.NORTHEAST;
var SOUTHEAST = NoFlyZone.SOUTHEAST;
var SOUTHWEST = NoFlyZone.SOUTHWEST;
var NORTHWEST = NoFlyZone.NORTHWEST;
// Check if the lines intersect for our any of our confinement zone.
var sideCheck = Line2D.linesIntersect(NORTHEAST.longitude(), NORTHEAST.latitude(), SOUTHEAST.longitude(),
SOUTHEAST.latitude(), currLoc.longitude(), currLoc.latitude(), possibleNextLoc.longitude(),
possibleNextLoc.latitude());
locOutside = locOutside || sideCheck;
sideCheck = Line2D.linesIntersect(SOUTHWEST.longitude(), SOUTHWEST.latitude(), NORTHWEST.longitude(),
NORTHWEST.latitude(), currLoc.longitude(), currLoc.latitude(), possibleNextLoc.longitude(),
possibleNextLoc.latitude());
locOutside = locOutside || sideCheck;
sideCheck = Line2D.linesIntersect(NORTHWEST.longitude(), NORTHWEST.latitude(), NORTHEAST.longitude(),
NORTHEAST.latitude(), currLoc.longitude(), currLoc.latitude(), possibleNextLoc.longitude(),
possibleNextLoc.latitude());
locOutside = locOutside || sideCheck;
sideCheck = Line2D.linesIntersect(SOUTHEAST.longitude(), SOUTHEAST.latitude(), SOUTHWEST.longitude(),
SOUTHWEST.latitude(), currLoc.longitude(), currLoc.latitude(), possibleNextLoc.longitude(),
possibleNextLoc.latitude());
locOutside = locOutside || sideCheck;
// for each edge/vertex of each polygon, check whether the the drone's current
// location goes through the drone.
for (var currForbiddenRegion : noFlyZone.noFlyZone) {
var currZone = new ArrayList<Point>();
currZone.addAll(currForbiddenRegion.coordinates().get(0));
int currZonePoints = currZone.size();
// get adjacent zones and current zones & check edges
for (int i = 0; i < currZonePoints - 1; i++) {
// ith node location
var currLocT = currZone.get(i);
// i +1th node location
var nextLocT = currZone.get(i + 1);
// intersect check
var possibleLocs = Line2D.linesIntersect(currLocT.longitude(), currLocT.latitude(),
nextLocT.longitude(), nextLocT.latitude(), currLoc.longitude(), currLoc.latitude(),
possibleNextLoc.longitude(), possibleNextLoc.latitude());
locOutside = possibleLocs || locOutside;
}
}
return locOutside;
}
/**
* This method is used to find the nearest next point to the current location
* from the angle.
*
* @param droneCurrLocation
* @param angle
* @return
*/
private Point newLoc(Point droneCurrLocation, double angle) {
var droneCurrLng = droneCurrLocation.longitude();
var droneCurrLat = droneCurrLocation.latitude();
double newLat = droneCurrLat + Math.sin(angle * (Math.PI / 180)) * DIST_DRONE_CAN_MOVE;
double newLong = droneCurrLng + Math.cos(angle * (Math.PI / 180)) * DIST_DRONE_CAN_MOVE;
Point newLocation = Point.fromLngLat(newLong, newLat);
return newLocation;
}
/**
* Builds the graphs and connects all the edges to each other the given
* arraylist of sensor data.
*
* @param ArrayList<SensorData> sensorsList
* @return returns a DefaultUndirectedWeightedGraph<SensorData, DefaultEdge>
*/
private DefaultUndirectedWeightedGraph<SensorData, DefaultEdge> buildGraph(ArrayList<SensorData> arrSensors) {
var graph = new DefaultUndirectedWeightedGraph<SensorData, DefaultEdge>(DefaultEdge.class);
int count = 0;
// convert array to points
for (int i = 0; i < arrSensors.size(); i++) {
graph.addVertex(arrSensors.get(i));
}
// O(N^2 * M*P)
for (var sensor1 : arrSensors) {
for (var sensor2 : arrSensors) {
var sensor1Loc = sensor1.getWhere();
var sensor2Loc = sensor2.getWhere();
if (sensor1.equals(sensor2)) {
continue;
}
// O(M*P) method
else if (noFlyZone.illegalMove(sensor1Loc, sensor2Loc)) {
// goes through the noflyzone.
// set edgeweight to infinity/ max_val
count++;
graph.addEdge(sensor1, sensor2);
graph.setEdgeWeight(sensor1, sensor2, Double.MAX_VALUE);
} else {
// doesn't pass through forbidden zone
// add sensor & add distance
graph.addEdge(sensor1, sensor2);
count++;
var long1 = sensor1.getWhere().longitude();
var lat1 = sensor1.getWhere().latitude();
var long2 = sensor2.getWhere().longitude();
var lat2 = sensor2.getWhere().latitude();
var dist = Point2D.distance(long1, lat1, long2, lat2);
graph.setEdgeWeight(sensor1, sensor2, dist);
}
}
}
this.setNodeCount(count);
return graph;
}
/**
* @return GeoJson object. converts the route & the sensor locations to GeoJson
* object.
*/
private GeoJson convertToFeatures() throws IOException, InterruptedException {
var visitedSensors = new HashSet<SensorData>();
// Add drone's path as features.
var features = new ArrayList<Feature>();
Feature travelledRoute = Feature.fromGeometry(LineString.fromLngLats(droneRoute));
travelledRoute.addStringProperty("fill", "#000000");
features.add(travelledRoute);
// for each sensor, append it to our features after adding properties (if our
// drone visits that sensor)
for (var currSensor : getshortestPathSensors()) {
var currSensorLoc = currSensor.getWhere();
var currSensorReading = currSensor.getReading();
var currSensorBattery = currSensor.getBattery();
var currSensorLocStr = currSensor.getLocation();
// see if drone visits this sensor at all
for (var currDroneLoc : droneRoute) {
var currLng = currDroneLoc.longitude();
var currLat = currDroneLoc.latitude();
var nextLng = currSensorLoc.longitude();
var nextLat = currSensorLoc.latitude();
var droneSensorDistance = Point2D.distance(currLng, currLat, nextLng, nextLat);
if (droneSensorDistance < DIST_DRONE_CAN_MOVE) {
// Drone goes near the sensor, so it means we can collect its reading.
// So add it to our features.
visitedSensors.add(currSensor);
var sensorGeometry = (Geometry) currSensorLoc;
var sensorFeature = Feature.fromGeometry(sensorGeometry);
// Use appendProperties method to appriopiately add properties.
sensorFeature = appendProperties(currSensorReading, currSensorLocStr, currSensorBattery,
sensorFeature, true);
features.add(sensorFeature);
// since we know that drone visited this sensor, we can stop the search.
break;
} else {
continue;
}
}
}
// here we add gray markers to sensors that the drone couldn't
// reach towards. We do this by keeping a hashset of all the sensors visited
// and adding gray color to sensors not in the hashset.
for (var currSensor : orgSensorsList) {
var currSensorLoc = currSensor.getWhere();
var currSensorReading = currSensor.getReading();
var currSensorBattery = currSensor.getBattery();
var currSensorLocStr = currSensor.getLocation();
// O(1) retrieval (hashmap)
boolean contains = visitedSensors.contains(currSensor);
if (!contains) {
setSensorsNotVisitedCount(getSensorsNotVisitedCount() + 1);
var sensorLocGeometry = (Geometry) currSensorLoc;
var sensorLocFeature = Feature.fromGeometry(sensorLocGeometry);
// Mark as gray
sensorLocFeature = appendProperties(currSensorReading, currSensorLocStr, currSensorBattery,
sensorLocFeature, false);
features.add(sensorLocFeature);
}
}
// convert to feature collection, cast it to GeoJson and return it.
var featuresColl = (GeoJson) FeatureCollection.fromFeatures(features);
return featuresColl;
}
/**
*
* Helper method used by convertToFeatures() method to convert
* each sensor/drone location to a feature.
*
* @param String reading - current reading of the sensor
* @param String loc
* @param double battery
* @param Feature currFeature
* @param boolean visited
* @return
*/
private Feature appendProperties(String reading, String loc, double battery, Feature currFeature, boolean visited) {
// if location isn't visited, then we add a gray marker
if (!visited) {
currFeature.addStringProperty(this.RGB, Properties.GRAY);
currFeature.addStringProperty("marker-color", Properties.GRAY);
return currFeature;
}
// drone was visited, add features!
currFeature.addStringProperty("marker-size", "medium");
currFeature.addStringProperty("location", loc);
// Add Black
// Battery is down, so our reading is wrong. We ignore this & mark a cross
if (battery <= 10 || reading == "NaN" || reading == "null") {
currFeature.addStringProperty("marker-symbol", Properties.MS_CROSS);
currFeature.addStringProperty("marker-color", Properties.BLACK);
currFeature.addStringProperty(this.RGB, Properties.BLACK);
return currFeature;
} else {
// No problem with reading or battery, append properties!
var currReading = Double.parseDouble(reading);
if (currReading >= 0 && currReading < 128) {
currFeature.addStringProperty("marker-symbol", Properties.MS_LIGHTHOUSE);
if (currReading >= 0 && currReading < 32) {
currFeature.addStringProperty("marker-color", Properties.GREEN);
currFeature.addStringProperty(this.RGB, Properties.GREEN);
}
if (currReading >= 32 && currReading < 64) {
currFeature.addStringProperty(this.RGB, Properties.MEDIUM_GREEN);
currFeature.addStringProperty("marker-color", Properties.MEDIUM_GREEN);
}
if (currReading >= 64 && currReading < 96) {
currFeature.addStringProperty(this.RGB, Properties.LIGHT_GREEN);
currFeature.addStringProperty("marker-color", Properties.LIGHT_GREEN);
}
if (currReading >= 96 && currReading < 128) {
currFeature.addStringProperty(this.RGB, Properties.LIME_GREEN);
currFeature.addStringProperty("marker-color", Properties.LIME_GREEN);
}
}
if (currReading >= 128 && currReading < 256) {
currFeature.addStringProperty("marker-symbol", Properties.MS_DANGER);
if (currReading >= 128 && currReading < 160) {
currFeature.addStringProperty(this.RGB, Properties.GOLD);
currFeature.addStringProperty("marker-color", Properties.GOLD);
}
if (currReading >= 160 && currReading < 192) {
currFeature.addStringProperty(this.RGB, Properties.ORANGE);
currFeature.addStringProperty("marker-color", Properties.ORANGE);
}
if (currReading >= 192 && currReading < 224) {
currFeature.addStringProperty(this.RGB, Properties.RED_ORANGE);
currFeature.addStringProperty("marker-color", Properties.RED_ORANGE);
}
if (currReading >= 224 && currReading < 256) {
currFeature.addStringProperty(this.RGB, Properties.RED);
currFeature.addStringProperty("marker-color", Properties.RED);
}
}
}
// Properties has been added. Return the feature!
return currFeature;
}
// -------------------------------------------------------------------
// -------------------------------------------------------------------
// Standard Getters and setters methods for private instance variables.
/**
* @return the shortestPathSensors
*/
public ArrayList<SensorData> getshortestPathSensors() {
return this.shortestPathSensors;
}
/**
* @param shortestPathSensors the shortestPathSensors to set
*/
public void setshortestPathSensors(ArrayList<SensorData> shortestPathSensors) {
this.shortestPathSensors = shortestPathSensors;
}
/**
* @return the locationStrings
*/
public ArrayList<String> getlocationStrings() {
return this.locationStrings;
}
/**
* @param locationStrings the locationStrings to set
*/
public void setLocationStrings(ArrayList<String> locationStrings) {
this.locationStrings = locationStrings;
}
/**
* @return the visited
*/
public boolean getisVisited() {
return this.visited;
}
/**
* @param visited the visited to set
*/
public void setVisited(boolean visited) {
this.visited = visited;
}
/**
* @return the droneRoute
*/
public ArrayList<Point> getdroneRoute() {
return droneRoute;
}
/**
* @param droneRoute the droneRoute to set
*/
public void setdroneRoute(ArrayList<Point> droneRoute) {
this.droneRoute = droneRoute;
}
/**
* @param
*/
public int getStepsTaken() {
return this.stepsCompleted;
}
/**
* @return
*/
public ArrayList<Integer> getDirections() {
return this.directions;
}
/**
* @return the geoJsonFeatures
*/
public GeoJson getGeoJsonFeatures() {
return geoJsonFeatures;
}
/**
* @param geoJsonFeatures the geoJsonFeatures to set
*/
public void setGeoJsonFeatures(GeoJson geoJsonFeatures) {
this.geoJsonFeatures = geoJsonFeatures;
}
/**
* @return the sensorsNotVisitedCount
*/
public int getSensorsNotVisitedCount() {
return sensorsNotVisitedCount;
}
/**
* @param int sensorsNotVisitedCount - update number of sensors that weren't visited
*/
public void setSensorsNotVisitedCount(int sensorsNotVisitedCount) {
this.sensorsNotVisitedCount = sensorsNotVisitedCount;
}
/**
* @return the nodeCount
*/
public int getNodeCount() {
return nodeCount;
}
/**
* @param nodeCount the nodeCount to set
*/
public void setNodeCount(int nodeCount) {
this.nodeCount = nodeCount;
}
}
|
require 'spec_helper'
include Spec::Example
describe Pending do
describe 'in portuguese for example' do
before(:each) do
@keywords = { "pending" => 'pendente|pendencia'}
stub_language!('pt', @keywords)
end
it "should have the translate pending method" do
Pending.translate_pending_keywords
name_methods = methods.to_symbols
[:pendente, :pendencia].each do |translated_pending_method|
name_methods.should include(translated_pending_method)
end
end
end
describe 'in spanish for example' do
before(:each) do
@keywords = { "pending" => 'spec_pendente|pendenciaa'}
stub_language!('es', @keywords)
end
it "should have the translate pending method" do
Pending.translate_pending_keywords
name_methods = methods.to_symbols
[:spec_pendente, :pendenciaa].each do |translated_pending_method|
name_methods.should include(translated_pending_method)
end
end
end
end
|
<reponame>smagill/opensphere-desktop
package io.opensphere.csvcommon.detect.datetime.algorithm;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import io.opensphere.core.common.configuration.date.DateFormat;
import io.opensphere.core.common.configuration.date.DateFormat.Type;
import io.opensphere.core.util.DateTimeFormats;
import io.opensphere.core.util.collections.New;
import io.opensphere.csvcommon.common.datetime.DateColumnResults;
import io.opensphere.csvcommon.detect.ValueWithConfidence;
import io.opensphere.csvcommon.detect.datetime.model.PotentialColumn;
import io.opensphere.csvcommon.detect.datetime.model.SuccessfulFormat;
import io.opensphere.csvcommon.detect.datetime.util.DateDataGenerator;
/**
* Tests the DateRater class.
*
*/
@SuppressWarnings({ "PMD.GodClass", "boxing" })
public class DateRaterTest
{
/**
* Our day format.
*/
private static final String ourDateFormat = "yyyy-M-d";
/**
* Our timestamp format.
*/
private static final String ourTimestampFormat = "yyyy-M-d HH:mm:ss";
/**
* Tests one day column and two time columns that share the same day column.
*
* @throws ParseException Bad parse.
*/
@Test
public void testDayTimeUpTimeDown() throws ParseException
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(1);
PotentialColumn timeColumn1 = new PotentialColumn();
timeColumn1.setColumnIndex(2);
PotentialColumn timeColumn2 = new PotentialColumn();
timeColumn2.setColumnIndex(4);
PotentialColumn timeColumn3 = new PotentialColumn();
timeColumn3.setColumnIndex(3);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(timeColumn1.getColumnIndex(), timeColumn1);
potentials.put(timeColumn2.getColumnIndex(), timeColumn2);
potentials.put(timeColumn3.getColumnIndex(), timeColumn3);
DateFormat dateFormat1 = new DateFormat();
dateFormat1.setType(Type.DATE);
dateFormat1.setSdf(ourDateFormat);
DateFormat timeFormat1 = new DateFormat();
timeFormat1.setType(Type.TIME);
timeFormat1.setSdf("'z'HHmmss");
DateFormat timeFormat2 = new DateFormat();
timeFormat2.setType(Type.TIME);
timeFormat2.setSdf("'z'HHmmss");
List<List<String>> data = DateDataGenerator.generateDayTimeUpTimeDown(dateFormat1, timeFormat1, timeFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat1);
timeColumn1.getFormats().clear();
timeColumn1.getFormats().put(timeFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat2);
timeColumn2.getFormats().clear();
timeColumn2.getFormats().put(timeFormat2.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat1);
timeColumn3.getFormats().clear();
timeColumn3.getFormats().put(timeFormat1.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat1.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(2, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat2.getSdf(), value.getValue().getDownTimeColumn().getSecondaryColumnFormat());
assertEquals(4, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
}
/**
* Tests all known formats for two time columns.
*/
@Test
public void testDoubleDate()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(8);
PotentialColumn dateColumn2 = new PotentialColumn();
dateColumn2.setColumnIndex(9);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(dateColumn2.getColumnIndex(), dateColumn2);
DateFormat dateFormat1 = new DateFormat();
dateFormat1.setSdf(ourTimestampFormat);
DateFormat dateFormat2 = new DateFormat();
dateFormat2.setSdf(ourTimestampFormat);
List<List<String>> data = DateDataGenerator.generateDoubleDate(dateFormat1, dateFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat2);
dateColumn2.getFormats().clear();
dateColumn2.getFormats().put(dateFormat2.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
if (dateFormat1.getSdf().contains("y") && dateFormat2.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(8, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(9, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
}
else if (!dateFormat1.getSdf().contains("y") && dateFormat2.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(9, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else if (!dateFormat2.getSdf().contains("y") && dateFormat1.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(8, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else
{
assertNull(value.getValue().getUpTimeColumn());
}
}
/**
* Tests two time columns with a single up time and a composite down time.
*/
@Test
public void testDoubleDate3()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(0);
PotentialColumn dateColumn2 = new PotentialColumn();
dateColumn2.setColumnIndex(1);
PotentialColumn timeColumn2 = new PotentialColumn();
timeColumn2.setColumnIndex(2);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(dateColumn2.getColumnIndex(), dateColumn2);
potentials.put(timeColumn2.getColumnIndex(), timeColumn2);
DateFormat dateFormat1 = new DateFormat();
dateFormat1.setSdf(ourTimestampFormat);
DateFormat dateFormat2 = new DateFormat();
dateFormat2.setSdf(ourDateFormat);
dateFormat2.setType(Type.DATE);
DateFormat timeFormat2 = new DateFormat();
timeFormat2.setSdf("HH:mm:ss");
timeFormat2.setType(Type.TIME);
List<List<String>> data = DateDataGenerator.generateDoubleDate(dateFormat1, dateFormat2, timeFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat2);
dateColumn2.getFormats().clear();
dateColumn2.getFormats().put(dateFormat2.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat2);
timeColumn2.getFormats().clear();
timeColumn2.getFormats().put(timeFormat2.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
if (dateFormat1.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat2.getSdf(), value.getValue().getDownTimeColumn().getSecondaryColumnFormat());
assertEquals(2, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
}
else
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat2.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(2, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
}
}
/**
* Tests two time columns with two times each of which are represented by
* two individual date and time columns.
*/
@Test
public void testDoubleDate4()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(1);
PotentialColumn timeColumn1 = new PotentialColumn();
timeColumn1.setColumnIndex(4);
PotentialColumn dateColumn2 = new PotentialColumn();
dateColumn2.setColumnIndex(2);
PotentialColumn timeColumn2 = new PotentialColumn();
timeColumn2.setColumnIndex(5);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(timeColumn1.getColumnIndex(), timeColumn1);
potentials.put(dateColumn2.getColumnIndex(), dateColumn2);
potentials.put(timeColumn2.getColumnIndex(), timeColumn2);
DateFormat dateFormat1 = new DateFormat(Type.DATE, ourDateFormat);
DateFormat timeFormat1 = new DateFormat(Type.TIME, "HH:mm:ss");
DateFormat dateFormat2 = new DateFormat(Type.DATE, ourDateFormat);
DateFormat timeFormat2 = new DateFormat(Type.TIME, "HH:mm:ss");
List<List<String>> data = DateDataGenerator.generateDoubleDate(dateFormat1, timeFormat1, dateFormat2, timeFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat1);
timeColumn1.getFormats().clear();
timeColumn1.getFormats().put(timeFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat2);
dateColumn2.getFormats().clear();
dateColumn2.getFormats().put(dateFormat2.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat2);
timeColumn2.getFormats().clear();
timeColumn2.getFormats().put(timeFormat2.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat1.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(4, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(2, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat2.getSdf(), value.getValue().getDownTimeColumn().getSecondaryColumnFormat());
assertEquals(5, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
}
/**
* Tests the case where mock data with a time column and a sparse (20%
* populate) end time column whose end times, either don't exist, they match
* the time column or they are later than the time column.
*/
@Test
public void testDownTimeTime()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(4);
PotentialColumn dateColumn2 = new PotentialColumn();
dateColumn2.setColumnIndex(9);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(dateColumn2.getColumnIndex(), dateColumn2);
DateFormat dateFormat1 = new DateFormat(Type.TIMESTAMP, ourTimestampFormat);
DateFormat dateFormat2 = new DateFormat(Type.TIMESTAMP, ourTimestampFormat);
List<List<String>> data = DateDataGenerator.generateDownTimeTime(dateFormat1, dateFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat2);
dateColumn2.getFormats().clear();
dateColumn2.getFormats().put(dateFormat2.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
if (dateFormat1.getSdf().contains("y") && dateFormat2.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(4, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(9, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
}
else if (!dateFormat1.getSdf().contains("y") && dateFormat2.getSdf().contains("y"))
{
assertNull("Fail for format " + dateFormat1.getSdf() + " and " + dateFormat2.getSdf(),
value.getValue().getUpTimeColumn());
}
else if (!dateFormat2.getSdf().contains("y") && dateFormat1.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(4, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else
{
assertNull(value.getValue().getUpTimeColumn());
}
}
/**
* Tests the HHmmss format.
*/
@Test
public void testHHmmss()
{
DateFormat format = new DateFormat();
format.setSdf("HHmmss");
format.setType(Type.TIME);
DateFormat dateFormat = new DateFormat();
dateFormat.setSdf("yyyyMMdd");
dateFormat.setType(Type.DATE);
DateRater rater = new DateRater();
PotentialColumn column = new PotentialColumn();
column.setColumnIndex(9);
PotentialColumn dateColumn = new PotentialColumn();
dateColumn.setColumnIndex(8);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(column.getColumnIndex(), column);
potentials.put(dateColumn.getColumnIndex(), dateColumn);
List<List<String>> data = DateDataGenerator.generateHHmmss();
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(format);
column.getFormats().clear();
column.getFormats().put(format.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat);
dateColumn.getFormats().clear();
dateColumn.getFormats().put(dateFormat.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(8, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(format.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(9, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
/**
* Tests two time columns with end time first.
*/
@Test
public void testReverseDoubleDate2()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(4);
PotentialColumn dateColumn2 = new PotentialColumn();
dateColumn2.setColumnIndex(7);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(dateColumn2.getColumnIndex(), dateColumn2);
DateFormat dateFormat1 = new DateFormat(Type.TIMESTAMP, ourTimestampFormat);
DateFormat dateFormat2 = new DateFormat(Type.TIMESTAMP, ourTimestampFormat);
List<List<String>> data = DateDataGenerator.generateReverseDoubleDate(dateFormat1, dateFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat2);
dateColumn2.getFormats().clear();
dateColumn2.getFormats().put(dateFormat2.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
if (dateFormat1.getSdf().contains("y") && dateFormat2.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(4, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(7, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
}
else if (!dateFormat1.getSdf().contains("y") && dateFormat2.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(7, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else if (!dateFormat2.getSdf().contains("y") && dateFormat1.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(4, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else
{
assertNull(value.getValue().getUpTimeColumn());
}
}
/**
* Tests two time columns with a composite up time and a single column for
* down time.
*/
@Test
public void testReverseDoubleDate3()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn1 = new PotentialColumn();
dateColumn1.setColumnIndex(3);
PotentialColumn timeColumn1 = new PotentialColumn();
timeColumn1.setColumnIndex(4);
PotentialColumn dateColumn2 = new PotentialColumn();
dateColumn2.setColumnIndex(7);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn1.getColumnIndex(), dateColumn1);
potentials.put(timeColumn1.getColumnIndex(), timeColumn1);
potentials.put(dateColumn2.getColumnIndex(), dateColumn2);
DateFormat dateFormat1 = new DateFormat(Type.DATE, ourDateFormat);
DateFormat timeFormat1 = new DateFormat(Type.TIME, "HH:mm:ss");
DateFormat dateFormat2 = new DateFormat(Type.TIMESTAMP, ourTimestampFormat);
List<List<String>> data = DateDataGenerator.generateReverseDoubleDate(dateFormat1, timeFormat1, dateFormat2);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat1);
dateColumn1.getFormats().clear();
dateColumn1.getFormats().put(dateFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat1);
timeColumn1.getFormats().clear();
timeColumn1.getFormats().put(timeFormat1.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat2);
dateColumn2.getFormats().clear();
dateColumn2.getFormats().put(dateFormat2.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
if (dateFormat2.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(3, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat1.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(4, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertEquals(Type.TIMESTAMP, value.getValue().getDownTimeColumn().getDateColumnType());
assertEquals(dateFormat2.getSdf(), value.getValue().getDownTimeColumn().getPrimaryColumnFormat());
assertEquals(7, value.getValue().getDownTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getDownTimeColumn().getSecondaryColumnIndex());
}
else
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat1.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(3, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat1.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(4, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
}
}
/**
* Tests a single date consisting of two columns.
*/
@Test
public void testSingleCompoundDate()
{
DateRater rater = new DateRater();
PotentialColumn dateColumn = new PotentialColumn();
dateColumn.setColumnIndex(0);
PotentialColumn timeColumn = new PotentialColumn();
timeColumn.setColumnIndex(1);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(dateColumn.getColumnIndex(), dateColumn);
potentials.put(timeColumn.getColumnIndex(), timeColumn);
DateFormat dateFormat = new DateFormat(Type.DATE, ourDateFormat);
DateFormat timeFormat = new DateFormat(Type.TIME, "HH:mm:ss");
List<List<String>> data = DateDataGenerator.generateSingleCompoundDate(dateFormat, timeFormat);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat);
dateColumn.getFormats().clear();
dateColumn.getFormats().put(dateFormat.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(timeFormat);
timeColumn.getFormats().clear();
timeColumn.getFormats().put(timeFormat.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(timeFormat.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
/**
* Tests all known formats for a single date column.
*/
@Test
public void testSingleDate()
{
DateRater rater = new DateRater();
PotentialColumn column = new PotentialColumn();
column.setColumnIndex(1);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(column.getColumnIndex(), column);
DateFormat format = new DateFormat(Type.TIMESTAMP, ourTimestampFormat);
List<List<String>> data = DateDataGenerator.generateSingleDate(format);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(format);
column.getFormats().clear();
column.getFormats().put(format.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
if (format.getSdf().contains("y"))
{
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(format.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
else
{
assertNull(value.getValue().getUpTimeColumn());
}
}
/**
* Tests a Single date with a very long decimal seconds string.
*/
@Test
public void testSingleDateLotsOfDecimalSeconds()
{
DateFormat format = new DateFormat();
format.setSdf("yyyy:MM:d::HH:mm:ss.SSS");
format.setType(Type.TIMESTAMP);
DateRater rater = new DateRater();
PotentialColumn column = new PotentialColumn();
column.setColumnIndex(1);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(column.getColumnIndex(), column);
List<List<String>> data = DateDataGenerator.generateSingleDateLotsOfDecimalSeconds();
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(format);
column.getFormats().clear();
column.getFormats().put(format.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(format.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
/**
* Tests the yyyyMMdd format.
*/
@Test
public void testSingleDateyyyyMMdd()
{
DateFormat format = new DateFormat();
format.setSdf("yyyyMMdd");
format.setType(Type.DATE);
DateRater rater = new DateRater();
PotentialColumn column = new PotentialColumn();
column.setColumnIndex(1);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(column.getColumnIndex(), column);
List<List<String>> data = DateDataGenerator.generateSingleDateyyyyMMdd();
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(format);
column.getFormats().clear();
column.getFormats().put(format.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.DATE, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(format.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
/**
* Tests the yyyy-MM-dd HH:mm:ss format.
*/
@Test
public void testYearMonthDayTime()
{
DateFormat format = new DateFormat();
format.setSdf(DateTimeFormats.DATE_TIME_FORMAT);
format.setType(Type.TIMESTAMP);
DateRater rater = new DateRater();
PotentialColumn column = new PotentialColumn();
column.setColumnIndex(0);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(column.getColumnIndex(), column);
List<List<String>> data = DateDataGenerator.generateYearMonthDayTime();
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(format);
column.getFormats().clear();
column.getFormats().put(format.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(format.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(-1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
/**
* Tests the zHHmmss.SS format.
*/
@Test
public void testZ()
{
DateFormat format = new DateFormat();
format.setSdf("'z'HHmmss.SS");
format.setType(Type.TIME);
DateFormat dateFormat = new DateFormat();
dateFormat.setSdf("yyyyMMdd");
dateFormat.setType(Type.DATE);
DateRater rater = new DateRater();
PotentialColumn column = new PotentialColumn();
column.setColumnIndex(1);
PotentialColumn dateColumn = new PotentialColumn();
dateColumn.setColumnIndex(0);
Map<Integer, PotentialColumn> potentials = New.map();
potentials.put(column.getColumnIndex(), column);
potentials.put(dateColumn.getColumnIndex(), dateColumn);
List<List<String>> data = DateDataGenerator.generateSingleCompoundDate(dateFormat, format);
SuccessfulFormat successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(format);
column.getFormats().clear();
column.getFormats().put(format.getSdf(), successfulFormat);
successfulFormat = new SuccessfulFormat();
successfulFormat.setNumberOfSuccesses(data.size());
successfulFormat.setFormat(dateFormat);
dateColumn.getFormats().clear();
dateColumn.getFormats().put(dateFormat.getSdf(), successfulFormat);
ValueWithConfidence<DateColumnResults> value = rater.rateAndPick(potentials, data);
assertEquals(Type.TIMESTAMP, value.getValue().getUpTimeColumn().getDateColumnType());
assertEquals(dateFormat.getSdf(), value.getValue().getUpTimeColumn().getPrimaryColumnFormat());
assertEquals(0, value.getValue().getUpTimeColumn().getPrimaryColumnIndex());
assertEquals(format.getSdf(), value.getValue().getUpTimeColumn().getSecondaryColumnFormat());
assertEquals(1, value.getValue().getUpTimeColumn().getSecondaryColumnIndex());
assertNull(value.getValue().getDownTimeColumn());
}
}
|
#!/bin/bash
#
# Copyright 2017 the Velero contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
if [[ ${1:-} == '--verify' ]]; then
# List file diffs that need formatting updates
MODE='-d'
ACTION='Verifying'
else
# Write formatting updates to files
MODE='-w'
ACTION='Updating'
fi
if ! command -v goimports > /dev/null; then
echo 'goimports is missing - please run "go get golang.org/x/tools/cmd/goimports"'
exit 1
fi
files="$(find . -type f -name '*.go' -not -path './.go/*' -not -path './vendor/*' -not -path './site/*' -not -path '*/generated/*' -not -name 'zz_generated*' -not -path '*/mocks/*')"
echo "${ACTION} gofmt"
for file in ${files}; do
output=$(gofmt "${MODE}" -s "${file}")
if [[ -n "${output}" ]]; then
VERIFY_FMT_FAILED=1
echo "${output}"
fi
done
if [[ -n "${VERIFY_FMT_FAILED:-}" ]]; then
echo "${ACTION} gofmt - failed! Please run 'make update'."
else
echo "${ACTION} gofmt - done!"
fi
echo "${ACTION} goimports"
for file in ${files}; do
output=$(goimports "${MODE}" -local github.com/adi-bhardwaj/velero-modified "${file}")
if [[ -n "${output}" ]]; then
VERIFY_IMPORTS_FAILED=1
echo "${output}"
fi
done
if [[ -n "${VERIFY_IMPORTS_FAILED:-}" ]]; then
echo "${ACTION} goimports - failed! Please run 'make update'."
else
echo "${ACTION} goimports - done!"
fi
if [[ -n "${VERIFY_FMT_FAILED:-}" || -n "${VERIFY_IMPORTS_FAILED:-}" ]]; then
exit 1
fi
|
#!/usr/bin/env bash
set -euo pipefail
DIR="$(cd "$(dirname "${0}")/.." && pwd)"
cd "${DIR}"
check_env() {
echo "Checking that ${1}=${2}"
if [ "${!1}" != "${2}" ]; then
echo "Expected '${1}' to be '${2}' but was '${!1}'" >&2
exit 1
fi
}
check_which() {
check_command_output "${1}" command -v "$(basename "${1}")"
}
check_command_output() {
echo "Checking that '${*:2}' results in '${1}'"
command_output="$("${@:2}")"
if [ "${command_output}" != "${1}" ]; then
echo "Expected: '${1}' Got: '${command_output}'" >&2
exit 1
fi
}
check_command_output_file() {
tmp_file1="$(mktemp)"
tmp_file2="$(mktemp)"
trap 'rm -rf "${tmp_file1}"' EXIT
trap 'rm -rf "${tmp_file2}"' EXIT
echo "Checking that '${*:2}' results in the contents of '${1}'"
cat "${1}" | sort > "${tmp_file1}"
"${@:2}" | sort > "${tmp_file2}"
if ! diff "${tmp_file1}" "${tmp_file2}"; then
echo "Diff detected" >&2
exit 1
fi
}
check_command_success() {
echo "Checking that '${*}' is successful"
if ! "${@}"; then
echo "Expected '${*}' to be successful but had error" >&2
exit 1
fi
}
check_dir_not_exists() {
echo "Checking that '${1}' does not exist"
if [ -d "${1}" ]; then
echo "Expected '${1}' to not exist" >&2
exit 1
fi
}
check_env GOGO_PROTOBUF_VERSION 1.2.1
check_env GOLANG_PROTOBUF_VERSION 1.3.1
check_env GRPC_VERSION 1.19.1
check_env GRPC_GATEWAY_VERSION 1.8.5
check_env GRPC_WEB_VERSION 1.0.4
check_env PROTOBUF_VERSION 3.6.1
check_env TWIRP_VERSION 5.7.0
check_env YARPC_VERSION 1.37.3
check_env PROTOTOOL_PROTOC_BIN_PATH /usr/bin/protoc
check_env PROTOTOOL_PROTOC_WKT_PATH /usr/include
check_command_output "libprotoc 3.6.1" protoc --version
check_command_output_file etc/wkt.txt find /usr/include -type f
check_which /usr/bin/protoc
check_which /usr/bin/grpc_cpp_plugin
check_which /usr/bin/grpc_csharp_plugin
check_which /usr/bin/grpc_node_plugin
check_which /usr/bin/grpc_objective_c_plugin
check_which /usr/bin/grpc_php_plugin
check_which /usr/bin/grpc_python_plugin
check_which /usr/bin/grpc_ruby_plugin
check_which /usr/local/bin/protoc-gen-go
check_which /usr/local/bin/protoc-gen-gofast
check_which /usr/local/bin/protoc-gen-gogo
check_which /usr/local/bin/protoc-gen-gogofast
check_which /usr/local/bin/protoc-gen-gogofaster
check_which /usr/local/bin/protoc-gen-gogoslick
check_which /usr/local/bin/protoc-gen-grpc-gateway
check_which /usr/local/bin/protoc-gen-grpc-web
check_which /usr/local/bin/protoc-gen-swagger
check_which /usr/local/bin/protoc-gen-twirp
check_which /usr/local/bin/protoc-gen-twirp_python
check_which /usr/local/bin/protoc-gen-yarpc-go
check_which /usr/local/bin/prototool
check_command_success protoc -o /dev/null $(find proto -name '*.proto')
check_command_success rm -rf gen
check_command_success prototool compile proto
check_command_success prototool lint proto
check_command_success prototool format -l proto
check_command_success prototool generate proto
check_command_success rm -rf gen
check_dir_not_exists /root/.cache
|
#! /bin/sh
filterfilelist() {
grep -vE '(framebuffer_font|core/queue.h)'
}
lines() {
filelist=$1
if [ -d "${filelist}" ]; then
filelist=`makefilelist "${filelist}"`
fi
wc -l $filelist | sort -n | sed 's;total$;'"$1"' - &;'
}
makefilelist() {
directory=$1
find "${directory}" -type f -name '*.[chS]' | filterfilelist | xargs
}
if [ $# -eq 0 ]; then
lines .
else
for arg in "$@"; do
lines $arg
done
fi
|
package main
import (
"encoding/json"
"gitee.com/ddkwork/libraryGo/log"
"gitee.com/ddkwork/libraryGo/net/clientAndServer/server"
"net"
)
type Login struct {
head string
Name string
Password string
}
func main() {
s := server.New()
if !s.ListenAndServer(net.JoinHostPort("localhost", "9999")) {
return
}
go func() {
for {
receive := s.Receive()
packetHeadLen := len("type1")
head := receive.Bytes()[:packetHeadLen]
body := receive.Bytes()[packetHeadLen:]
log.Default.Json(string(head), string(body))
l := new(Login)
if err := json.Unmarshal(body, l); err == nil {
log.Default.Struct(l)
s.Replay("server replay: i am receive your message")
}
}
}()
select {}
}
|
touch files.lock
./startVehicle
|
#!/bin/sh
if [ "$#" = "0" ]; then
echo "SINTAX: $0 <POLICY=IfNotPresent|Always>"
exit 1
fi
POLICY=$1
update_policy()
{
FILEPATH=$1
cat $FILEPATH | sed "s/imagePullPolicy: \w\+/imagePullPolicy: $POLICY/" > ./tmp
mv ./tmp $FILEPATH
}
update_policy "hadoop_primary.yaml"
update_policy "hadoop_worker_csd.yaml"
update_policy "hadoop_worker_host.yaml"
update_policy "spark_primary.yaml"
update_policy "spark_worker_csd.yaml"
update_policy "spark_worker_host.yaml"
echo "Done!"
|
import unittest
from actors.actor_target import ActorTarget
from actors.projectile import Projectile
from world.area_builder import AreaBuilder
class ProjectileCollisionTests(unittest.TestCase):
def test_two_projectiles_destroy_each_other(self):
key = "*"
right_projectile = Projectile((-1, 0), key)
left_projectile = Projectile((1, 0), key)
area = AreaBuilder().rectangle(10, 10)\
.with_actor(right_projectile, 5, 5)\
.with_actor(left_projectile, 4, 5)\
.to_area()
area = area.update()
self.assertFalse(area.print_to(ActorTarget(key)).found(), "Both projectiles were not destroyed.")
if __name__ == '__main__':
unittest.main()
|
def reverse_words(sentence):
word_list = sentence.split(" ")
reversed_list = word_list[::-1]
return " ".join(reversed_list)
reversed_sentence = reverse_words("Hello world!")
print(reversed_sentence)
|
<filename>src/icons/Info.tsx
import * as React from 'react'
const Info = props => (
<svg viewBox="0 0 512 512" {...props}>
<circle cx={256} cy={378.5} r={25} />
<path d="M256 0C114.516 0 0 114.497 0 256c0 141.484 114.497 256 256 256 141.484 0 256-114.497 256-256C512 114.516 397.503 0 256 0zm0 472c-119.377 0-216-96.607-216-216 0-119.377 96.607-216 216-216 119.377 0 216 96.607 216 216 0 119.377-96.607 216-216 216z" />
<path d="M256 128.5c-44.112 0-80 35.888-80 80 0 11.046 8.954 20 20 20s20-8.954 20-20c0-22.056 17.944-40 40-40s40 17.944 40 40-17.944 40-40 40c-11.046 0-20 8.954-20 20v50c0 11.046 8.954 20 20 20s20-8.954 20-20v-32.531c34.466-8.903 60-40.26 60-77.469 0-44.112-35.888-80-80-80z" />
</svg>
)
export default Info
|
<filename>src/main/java/com/example/demo/controller/UserController.java
package com.example.demo.controller;
import com.example.demo.api.UserService;
import com.example.demo.po.User;
import com.example.demo.dto.UserDTO;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* @author yuanxin
* @create 2020/11/11 11:25
*/
@RestController
@RequestMapping("/User")
public class UserController {
final UserService userService;
@Autowired
public UserController(UserService userService) {
this.userService = userService;
}
@RequestMapping(value = "/getAllUser", method = RequestMethod.GET)
public List<User> findAll() {
return userService.findAll();
}
@RequestMapping(value = "/getAllUserDto", method = RequestMethod.GET)
public UserDTO dto() {
List<User> user = userService.findAll();
return new UserDTO(HttpStatus.OK, user, "succeed");
}
@RequestMapping(value = "/getUserByUid", method = RequestMethod.GET)
public User getOneById(
@RequestParam("id") int id) {
return userService.getUserByUserUid(id);
}
@RequestMapping(value = "/getUserNameByUid", method = RequestMethod.GET)
public String getUserName(
@RequestParam("id") int uid) {
return userService.getUserNameByUid(uid);
}
@RequestMapping(value = "/getUserByUserName", method = RequestMethod.GET)
public User getUserByUserName(
@RequestParam("username") String userName) {
return userService.getUserByUserName(userName);
}
@RequestMapping(value = "/deleteUserByUid", method = RequestMethod.DELETE)
public long delete(
@RequestParam("id") int uid) {
return userService.deleteOneByUid(uid);
}
@RequestMapping(value = "/updateUserByRequestParam", method = RequestMethod.PUT)
public long updateUserByRequestParam(
@RequestParam(value = "username", required = false, defaultValue = "") String userName,
@RequestParam(value = "password", required = false, defaultValue = "") String password,
@RequestParam("id") int uid) {
User user = userService.getUserByUserUid(uid);
if (user != null) {
if (!"".equals(userName)) {
user.setUserName(userName);
}
if (!"".equals(password)) {
user.setPassword(password);
}
return userService.updateUserByUid(user);
}
return 0;
}
@RequestMapping(value = "/updateUserByRequestBody", method = RequestMethod.PUT)
public long updateUserByRequestBody(
@RequestBody User user) {
if (user.getUid() != 0) {
User u = userService.getUserByUserUid(user.getUid());
if (u != null) {
if ("".equals(user.getUserName()) && "".equals(user.getPassword())) {
return 0;
}
if (!"".equals(user.getUserName())) {
u.setUserName(user.getUserName());
}
if (!"".equals(user.getPassword())) {
u.setPassword(user.getPassword());
}
return userService.updateUserByUid(u);
}
}
return 0;
}
@RequestMapping(value = "/addNewUserByRequestBody", method = RequestMethod.POST)
public long addNewUserByRequestBody(
@RequestBody User user
) {
if (!"".equals(user.getPassword()) && !"".equals(user.getUserName())) {
return userService.insertNewUser(user);
}
return 0;
}
@RequestMapping(value = "/addNewUserByRequestParam", method = RequestMethod.POST)
public long addNewUserByRequestParam(
@RequestParam(value = "username") String userName,
@RequestParam(value = "password") String password
) {
User user = new User();
user.setUserName(userName);
user.setPassword(password);
return userService.insertNewUser(user);
}
@RequestMapping(value = "/insetMultiUser", method = RequestMethod.POST)
public long insetMultiUser(
@RequestBody List<User> list) {
list.removeIf(it -> "".equals(it.getUserName()) || "".equals(it.getPassword()));
if (list.size() != 0) {
return userService.insertMultiUsers(list);
}
return 0;
}
@RequestMapping(value = "/deleteMultiUserByUid", method = RequestMethod.DELETE)
public long deleteMultiUserByUid(
@RequestBody List<Long> list) {
list.removeIf(it -> it <= 0);
if (list.size() != 0) {
return userService.deleteMultiUsersByUid(list);
}
return 0;
}
@RequestMapping(value = "/updateMultiUser", method = RequestMethod.PUT)
public long updateMultiUser(
@RequestBody List<User> list) {
list.removeIf(
it ->
it.getUid() <= 0 ||
("".equals(it.getUserName()) && "".equals(it.getPassword()))
);
if (list.size() != 0) {
for (User it : list
) {
User u = userService.getUserByUserUid(it.getUid());
if (u != null) {
if ("".equals(it.getUserName())) {
it.setUserName(u.getUserName());
}
if ("".equals(it.getPassword())) {
it.setPassword(u.getPassword());
}
}
}
return userService.updateMultiUser(list);
}
return 0;
}
}
|
function sortByProperty(arr, propertyName) {
return arr.sort((a, b) => {
if (a[propertyName] < b[propertyName]){
return -1
}
else if (a[propertyName] > b[propertyName]){
return 1
}
else {
return 0
}
});
}
const sortedArray = sortByProperty([{name: "John", age: 10}, {name: "Alice", age: 9}], 'name');
console.log(sortedArray);
|
#!/bin/bash
# Copyright 2019 Kyoto University (Hirofumi Inaguma)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
model=
model1=
model2=
model3=
model_bwd=
gpu=
stdout=false
n_threads=1
### path to save preproecssed data
data=/n/work2/inaguma/corpus/tedlium2
unit=
metric=edit_distance
batch_size=1
beam_width=10
min_len_ratio=0.0
max_len_ratio=1.0
length_penalty=0.0
length_norm=false
coverage_penalty=0.0
coverage_threshold=0.0
gnmt_decoding=false
eos_threshold=1.0
lm=
lm_second=
lm_bwd=
lm_weight=0.3
lm_second_weight=0.3
lm_bwd_weight=0.3
ctc_weight=0.0 # 1.0 for joint CTC-attention means decoding with CTC
softmax_smoothing=1.0
resolving_unk=false
fwd_bwd_attention=false
bwd_attention=false
reverse_lm_rescoring=false
asr_state_carry_over=false
lm_state_carry_over=true
n_average=10 # for Transformer
oracle=false
block_sync=false # for MoChA
block_size=40 # for MoChA
mma_delay_threshold=-1
. ./cmd.sh
. ./path.sh
. utils/parse_options.sh
set -e
set -u
set -o pipefail
if [ -z ${gpu} ]; then
# CPU
n_gpus=0
export OMP_NUM_THREADS=${n_threads}
else
n_gpus=$(echo ${gpu} | tr "," "\n" | wc -l)
fi
for set in dev_sp test_sp; do
recog_dir=$(dirname ${model})/decode_${set}_beam${beam_width}_lp${length_penalty}_cp${coverage_penalty}_${min_len_ratio}_${max_len_ratio}
if [ ! -z ${unit} ]; then
recog_dir=${recog_dir}_${unit}
fi
if [ ${length_norm} = true ]; then
recog_dir=${recog_dir}_norm
fi
if [ ${metric} != 'edit_distance' ]; then
recog_dir=${recog_dir}_${metric}
fi
if [ ! -z ${lm} ] && [ ${lm_weight} != 0 ]; then
recog_dir=${recog_dir}_lm${lm_weight}
fi
if [ ! -z ${lm_second} ] && [ ${lm_second_weight} != 0 ]; then
recog_dir=${recog_dir}_rescore${lm_second_weight}
fi
if [ ! -z ${lm_bwd} ] && [ ${lm_bwd_weight} != 0 ]; then
recog_dir=${recog_dir}_bwd${lm_bwd_weight}
fi
if [ ${ctc_weight} != 0.0 ]; then
recog_dir=${recog_dir}_ctc${ctc_weight}
fi
if [ ${softmax_smoothing} != 1.0 ]; then
recog_dir=${recog_dir}_smooth${softmax_smoothing}
fi
if [ ${gnmt_decoding} = true ]; then
recog_dir=${recog_dir}_gnmt
fi
if [ ${resolving_unk} = true ]; then
recog_dir=${recog_dir}_resolvingOOV
fi
if [ ${fwd_bwd_attention} = true ]; then
recog_dir=${recog_dir}_fwdbwd
fi
if [ ${bwd_attention} = true ]; then
recog_dir=${recog_dir}_bwd
fi
if [ ${reverse_lm_rescoring} = true ]; then
recog_dir=${recog_dir}_revLM
fi
if [ ${asr_state_carry_over} = true ]; then
recog_dir=${recog_dir}_ASRcarryover
fi
if [ ${block_sync} = true ]; then
recog_dir=${recog_dir}_blocksync${block_size}
fi
if [ ${n_average} != 1 ]; then
recog_dir=${recog_dir}_average${n_average}
fi
if [ ! -z ${lm} ] && [ ${lm_weight} != 0 ] && [ ${lm_state_carry_over} = true ]; then
recog_dir=${recog_dir}_LMcarryover
fi
if [ ${oracle} = true ]; then
recog_dir=${recog_dir}_oracle
fi
if [ ${mma_delay_threshold} != -1 ]; then
recog_dir=${recog_dir}_epswait${mma_delay_threshold}
fi
if [ ! -z ${model3} ]; then
recog_dir=${recog_dir}_ensemble4
elif [ ! -z ${model2} ]; then
recog_dir=${recog_dir}_ensemble3
elif [ ! -z ${model1} ]; then
recog_dir=${recog_dir}_ensemble2
fi
mkdir -p ${recog_dir}
recog_set=${data}/dataset/${set}_wpbpe10000.tsv
CUDA_VISIBLE_DEVICES=${gpu} ${NEURALSP_ROOT}/neural_sp/bin/asr/eval.py \
--recog_n_gpus ${n_gpus} \
--recog_sets ${recog_set} \
--recog_dir ${recog_dir} \
--recog_unit ${unit} \
--recog_metric ${metric} \
--recog_model ${model} ${model1} ${model2} ${model3} \
--recog_model_bwd ${model_bwd} \
--recog_batch_size ${batch_size} \
--recog_beam_width ${beam_width} \
--recog_max_len_ratio ${max_len_ratio} \
--recog_min_len_ratio ${min_len_ratio} \
--recog_length_penalty ${length_penalty} \
--recog_length_norm ${length_norm} \
--recog_coverage_penalty ${coverage_penalty} \
--recog_coverage_threshold ${coverage_threshold} \
--recog_gnmt_decoding ${gnmt_decoding} \
--recog_eos_threshold ${eos_threshold} \
--recog_lm ${lm} \
--recog_lm_second ${lm_second} \
--recog_lm_bwd ${lm_bwd} \
--recog_lm_weight ${lm_weight} \
--recog_lm_second_weight ${lm_second_weight} \
--recog_lm_bwd_weight ${lm_bwd_weight} \
--recog_ctc_weight ${ctc_weight} \
--recog_softmax_smoothing ${softmax_smoothing} \
--recog_resolving_unk ${resolving_unk} \
--recog_fwd_bwd_attention ${fwd_bwd_attention} \
--recog_bwd_attention ${bwd_attention} \
--recog_reverse_lm_rescoring ${reverse_lm_rescoring} \
--recog_asr_state_carry_over ${asr_state_carry_over} \
--recog_lm_state_carry_over ${lm_state_carry_over} \
--recog_block_sync ${block_sync} \
--recog_block_sync_size ${block_size} \
--recog_n_average ${n_average} \
--recog_oracle ${oracle} \
--recog_mma_delay_threshold ${mma_delay_threshold} \
--recog_stdout ${stdout} || exit 1;
if [ ${metric} = 'edit_distance' ]; then
# remove <unk>
cat ${recog_dir}/ref.trn | sed 's:<unk>::g' > ${recog_dir}/ref.trn.filt
cat ${recog_dir}/hyp.trn | sed 's:<unk>::g' > ${recog_dir}/hyp.trn.filt
echo ${set}
sclite -r ${recog_dir}/ref.trn.filt trn -h ${recog_dir}/hyp.trn.filt trn -i rm -o all stdout > ${recog_dir}/result.txt
grep -e Avg -e SPKR -m 2 ${recog_dir}/result.txt > ${recog_dir}/RESULTS
cat ${recog_dir}/RESULTS
fi
done
|
package lifecycle
import (
"context"
"os"
"github.com/go-logr/logr"
"github.com/openshift/special-resource-operator/pkg/clients"
"github.com/openshift/special-resource-operator/pkg/storage"
"github.com/openshift/special-resource-operator/pkg/utils"
appsv1 "k8s.io/api/apps/v1"
v1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/types"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/log/zap"
)
//go:generate mockgen -source=lifecycle.go -package=lifecycle -destination=mock_lifecycle_api.go
type Lifecycle interface {
GetPodFromDaemonSet(context.Context, types.NamespacedName) *v1.PodList
GetPodFromDeployment(context.Context, types.NamespacedName) *v1.PodList
UpdateDaemonSetPods(context.Context, client.Object) error
}
type lifecycle struct {
kubeClient clients.ClientsInterface
log logr.Logger
storage storage.Storage
}
func New(kubeClient clients.ClientsInterface, storage storage.Storage) Lifecycle {
return &lifecycle{
kubeClient: kubeClient,
log: zap.New(zap.UseDevMode(true)).WithName(utils.Print("lifecycle", utils.Green)),
storage: storage,
}
}
func (l *lifecycle) GetPodFromDaemonSet(ctx context.Context, key types.NamespacedName) *v1.PodList {
ds := &appsv1.DaemonSet{}
err := l.kubeClient.Get(ctx, key, ds)
if apierrors.IsNotFound(err) || err != nil {
utils.WarnOnError(err)
return &v1.PodList{}
}
return l.getPodListForUpperObject(ctx, ds.Spec.Selector.MatchLabels, key.Namespace)
}
func (l *lifecycle) GetPodFromDeployment(ctx context.Context, key types.NamespacedName) *v1.PodList {
dp := &appsv1.Deployment{}
err := l.kubeClient.Get(ctx, key, dp)
if apierrors.IsNotFound(err) || err != nil {
utils.WarnOnError(err)
return &v1.PodList{}
}
return l.getPodListForUpperObject(ctx, dp.Spec.Selector.MatchLabels, key.Namespace)
}
func (l *lifecycle) getPodListForUpperObject(ctx context.Context, matchLabels map[string]string, ns string) *v1.PodList {
pl := &v1.PodList{}
opts := []client.ListOption{
client.InNamespace(ns),
client.MatchingLabels(matchLabels),
}
if err := l.kubeClient.List(ctx, pl, opts...); err != nil {
utils.WarnOnError(err)
}
return pl
}
func (l *lifecycle) UpdateDaemonSetPods(ctx context.Context, obj client.Object) error {
l.log.Info("UpdateDaemonSetPods")
key := types.NamespacedName{
Namespace: obj.GetNamespace(),
Name: obj.GetName(),
}
ins := types.NamespacedName{
Namespace: os.Getenv("OPERATOR_NAMESPACE"),
Name: "special-resource-lifecycle",
}
pl := l.GetPodFromDaemonSet(ctx, key)
for _, pod := range pl.Items {
hs, err := utils.FNV64a(pod.GetNamespace() + pod.GetName())
if err != nil {
return err
}
value := "*v1.Pod"
l.log.Info(pod.GetName(), "hs", hs, "value", value)
err = l.storage.UpdateConfigMapEntry(ctx, hs, value, ins)
if err != nil {
utils.WarnOnError(err)
return err
}
}
return nil
}
|
#!/usr/bin/env sh
# shellcheck disable=SC2005,2188
<<'COMMENT'
cron: 16 */2 * * *
new Env('签到依赖');
COMMENT
. utils_env.sh
get_some_path
alpine_pkgs="bash curl gcc git jq libffi-dev make musl-dev openssl-dev perl perl-app-cpanminus perl-dev py3-pip python3 python3-dev wget"
py_reqs="bs4 cryptography dateparser feedparser peewee pyaes pyppeteer requests rsa schedule tomli"
js_pkgs="@iarna/toml axios cron-parser crypto-js got"
pl_mods="File::Slurp JSON5 TOML::Dumper"
install() {
count=0
flag=$1
while true; do
echo ".......... $2 begin .........."
result=$3
if [ "$result" -gt 0 ]; then
flag=0
else
flag=1
fi
if [ $flag -eq "$1" ]; then
echo "---------- $2 succeed ----------"
break
else
count=$((count + 1))
if [ $count -eq 6 ]; then
echo "!! 自动安装失败,请尝试进入容器后执行 $2 !!"
break
fi
echo ".......... retry in 5 seconds .........."
sleep 5
fi
done
}
install_alpine_pkgs() {
apk update
apk_info=" $(apk info) "
for i in $alpine_pkgs; do
if expr "$apk_info" : ".*\s${i}\s.*" >/dev/null; then
echo "$i 已安装"
else
install 0 "apk add $i" "$(apk add --no-cache "$i" | grep -c 'OK')"
fi
done
}
install_py_reqs() {
pip3 install --upgrade pip
pip3_freeze="$(pip3 freeze)"
for i in $py_reqs; do
if expr "$pip3_freeze" : ".*${i}" >/dev/null; then
echo "$i 已安装"
else
install 0 "pip3 install $i" "$(pip3 install "$i" | grep -c 'Successfully')"
fi
done
}
install_js_pkgs_initial() {
if [ -d "${SCR_PATH}/Oreomeow_checkinpanel_master" ]; then
cd "${SCR_PATH}/Oreomeow_checkinpanel_master" &&
cp "${REPO_PATH}/Oreomeow_checkinpanel_master/package.json" "${SCR_PATH}/Oreomeow_checkinpanel_master/package.json"
elif [ -d "/ql/scripts" ] && [ ! -f "/ql/scripts/package.bak.json" ]; then
cd /ql/scripts || exit
rm -rf node_modules
rm -rf .pnpm-store
mv package-lock.json package-lock.bak.json
mv package.json package.bak.json
mv pnpm-lock.yaml pnpm-lock.bak.yaml
install 1 "npm install -g package-merge" "$(echo "$(npm install -g package-merge && npm ls -g package-merge)" | grep -cE '(empty)|ERR')" &&
export NODE_PATH="/usr/local/lib/node_modules" &&
node -e \
"const merge = require('package-merge');
const fs = require('fs');
const dst = fs.readFileSync('/ql/repo/Oreomeow_checkinpanel_master/package.json');
const src = fs.readFileSync('/ql/scripts/package.bak.json');
fs.writeFile('/ql/scripts/package.json', merge(dst, src), function (err) {
if (err) {
console.log(err);
}
console.log('package.json merged successfully!');
});"
fi
npm install
}
install_js_pkgs_each() {
is_empty=$(npm ls "$1" | grep empty)
has_err=$(npm ls "$1" | grep ERR)
if [ "$is_empty" = "" ] && [ "$has_err" = "" ]; then
echo "$1 已正确安装"
elif [ "$has_err" != "" ]; then
uninstall_js_pkgs "$1"
else
install 1 "npm install $1" "$(echo "$(npm install --force "$1" && npm ls --force "$1")" | grep -cE '(empty)|ERR')"
fi
}
uninstall_js_pkgs() {
npm uninstall "$1"
rm -rf "$(pwd)"/node_modules/"$1"
rm -rf /usr/local/lib/node_modules/lodash/*
npm cache clear --force
}
install_js_pkgs_all() {
install_js_pkgs_initial
for i in $js_pkgs; do
install_js_pkgs_each "$i"
done
npm ls --depth 0
}
install_pl_mods() {
if command -v cpm >/dev/null 2>&1; then
echo "App::cpm 已安装"
else
install 1 "cpanm -fn App::cpm" "$(cpanm -fn App::cpm | grep -c "FAIL")"
if ! command -v cpm >/dev/null 2>&1; then
if [ -f ./cpm ]; then
chmod +x cpm && ./cpm --version
else
cp -f /ql/repo/Oreomeow_checkinpanel_master/cpm ./ && chmod +x cpm && ./cpm --version
if [ ! -f ./cpm ]; then
curl -fsSL https://cdn.jsdelivr.net/gh/Oreomeow/checkinpanel/cpm >cpm && chmod +x cpm && ./cpm --version
fi
fi
fi
fi
for i in $pl_mods; do
if [ -f "$(perldoc -l "$i")" ]; then
echo "$i 已安装"
else
install 1 "cpm install -g $i" "$(cpm install -g "$i" | grep -c "FAIL")"
fi
done
}
install_alpine_pkgs
install_py_reqs
install_js_pkgs_all
install_pl_mods
|
<reponame>ubuntudroid/Rialto
package com.stylingandroid.rialto.app;
import android.graphics.Typeface;
import android.os.Bundle;
import android.text.style.CharacterStyle;
import android.text.style.StyleSpan;
import android.text.style.UnderlineSpan;
import android.widget.TextView;
import com.stylingandroid.rialto.format.SpannableFormatterKt;
import kotlin.jvm.functions.Function0;
public class MainActivity extends RialtoActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
registerSpanFactory("format", "bold", new Function0<CharacterStyle>() {
@Override
public CharacterStyle invoke() {
return new StyleSpan(Typeface.BOLD);
}
});
registerSpanFactory("format", "italic", () -> new StyleSpan(Typeface.ITALIC) );
registerSpanFactory("format", "bold_underline", () -> new StyleSpan(Typeface.BOLD));
registerSpanFactory("format", "bold_underline", UnderlineSpan::new);
setContentView(R.layout.activity_main);
TextView textView = findViewById(R.id.format_string);
textView.setText(SpannableFormatterKt.getFormattedText(getResources(), R.string.formatted_italic, "formatted"));
}
}
|
<reponame>DivyeshPadamani/Investmentclub
import { Injectable } from '@angular/core';
import { Router, CanActivate } from '@angular/router';
import { UserService } from '../services/user.service';
import { Observable } from 'rxjs/Observable';
@Injectable()
export class AuthenticationGuard implements CanActivate {
constructor(public user: UserService, public router: Router) { }
canActivate(): boolean | Observable<boolean> {
if (this.user.isLoaded()) {
return !!this.user.isLoggedIn();
}
return new Observable<boolean>((observer) => {
const interval = setInterval(() => {
if (this.user.isLoaded()) {
if (this.user.isLoggedIn()) {
this.router.navigate(['/login']);
}
observer.next(!!this.user.isLoggedIn());
observer.complete();
clearInterval(interval);
}
}, 100);
});
}
}
|
#!/usr/bin/env bash
set -e
PKGS=$(go list ./... | grep -v '/simapp' | grep -v '/cli_test')
set -e
echo "mode: atomic" > coverage.txt
for pkg in ${PKGS[@]}; do
go test -v -timeout 30m -race -coverprofile=profile.out -covermode=atomic -tags='ledger test_ledger_mock' "$pkg"
if [ -f profile.out ]; then
tail -n +2 profile.out >> coverage.txt;
rm profile.out
fi
done
|
# $FreeBSD$
atf_test_case nominal
nominal_head()
{
atf_set "descr" "Basic tests on timeout(1) utility"
}
nominal_body()
{
atf_check \
-o empty \
-e empty \
-s exit:0 \
timeout 5 true
}
atf_test_case time_unit
time_unit_head()
{
atf_set "descr" "Test parsing the default time unit"
}
time_unit_body()
{
atf_check \
-o empty \
-e empty \
-s exit:0 \
timeout 1d true
atf_check \
-o empty \
-e empty \
-s exit:0 \
timeout 1h true
atf_check \
-o empty \
-e empty \
-s exit:0 \
timeout 1m true
atf_check \
-o empty \
-e empty \
-s exit:0 \
timeout 1s true
}
atf_test_case no_timeout
no_timeout_head()
{
atf_set "descr" "Test disabled timeout"
}
no_timeout_body()
{
atf_check \
-o empty \
-e empty \
-s exit:0 \
timeout 0 true
}
atf_test_case exit_numbers
exit_numbers_head()
{
atf_set "descr" "Test exit numbers"
}
exit_numbers_body()
{
atf_check \
-o empty \
-e empty \
-s exit:2 \
-x timeout 5 sh -c \'exit 2\'
atf_check \
-o empty \
-e empty \
-s exit:124 \
timeout .1 sleep 1
# With preserv status exit should be 128 + TERM aka 143
atf_check \
-o empty \
-e empty \
-s exit:143 \
timeout --preserve-status .1 sleep 10
atf_check \
-o empty \
-e empty \
-s exit:124 \
timeout -s1 -k1 .1 sleep 10
atf_check \
-o empty \
-e empty \
-s exit:0 \
-x sh -c 'trap "" CHLD; exec timeout 10 true'
}
atf_test_case with_a_child
with_a_child_head()
{
atf_set "descr" "When starting with a child (coreutils bug#9098)"
}
with_a_child_body()
{
out=$(sleep .1 & exec timeout .5 sh -c 'sleep 2; echo foo')
status=$?
test "$out" = "" && test $status = 124 || atf_fail
}
atf_test_case invalid_timeout
invalid_timeout_head()
{
atf_set "descr" "Invalid timeout"
}
invalid_timeout_body()
{
atf_check \
-o empty \
-e inline:"timeout: invalid duration\n" \
-s exit:125 \
timeout invalid sleep 0
atf_check \
-o empty \
-e inline:"timeout: invalid duration\n" \
-s exit:125 \
timeout --kill-after=invalid 1 sleep 0
atf_check \
-o empty \
-e inline:"timeout: invalid duration\n" \
-s exit:125 \
timeout 42D sleep 0
atf_check \
-o empty \
-e inline:"timeout: invalid duration\n" \
-s exit:125 \
timeout 999999999999999999999999999999999999999999999999999999999999d sleep 0
atf_check \
-o empty \
-e inline:"timeout: invalid duration\n" \
-s exit:125 \
timeout 2.34e+5d sleep 0
}
atf_test_case invalid_signal
invalid_signal_head()
{
atf_set "descr" "Invalid signal"
}
invalid_signal_body()
{
atf_check \
-o empty \
-e inline:"timeout: invalid signal\n" \
-s exit:125 \
timeout --signal=invalid 1 sleep 0
}
atf_test_case invalid_command
invalid_command_head()
{
atf_set "descr" "Invalid command"
}
invalid_command_body()
{
atf_check \
-o empty \
-e inline:"timeout: exec(.): Permission denied\n" \
-s exit:126 \
timeout 10 .
}
atf_test_case no_such_command
no_such_command_head()
{
atf_set "descr" "No such command"
}
no_such_command_body()
{
atf_check \
-o empty \
-e inline:"timeout: exec(enoexists): No such file or directory\n" \
-s exit:127 \
timeout 10 enoexists
}
atf_init_test_cases()
{
atf_add_test_case nominal
atf_add_test_case time_unit
atf_add_test_case no_timeout
atf_add_test_case exit_numbers
atf_add_test_case with_a_child
atf_add_test_case invalid_timeout
atf_add_test_case invalid_signal
atf_add_test_case invalid_command
atf_add_test_case no_such_command
}
|
import unittest
import numpy as np
from pymesh import generate_box_mesh
from pymesh.wires import WireNetwork
from pymesh.wires import Tiler
from pymesh.wires import Parameters
from WireTestCase import WireTestCase
class TilerTest(WireTestCase):
def test_tile_with_bbox(self):
wire_network = self.get_brick5()
params = Parameters(wire_network, 0.5)
bbox_min = np.zeros(3)
bbox_max = np.ones(3) * 5 * 2
reps = np.ones(3) * 2
tiler = Tiler(wire_network)
tiler.tile_with_guide_bbox(bbox_min, bbox_max, reps, params)
tiled_wire_network = tiler.wire_network
self.assertEqual(8 * wire_network.num_edges,
tiled_wire_network.num_edges)
self.assertEqual(8 * wire_network.num_vertices - 12,
tiled_wire_network.num_vertices)
self.assert_array_equal(
[bbox_min, bbox_max], tiled_wire_network.bbox)
self.assertTrue(tiled_wire_network.has_attribute("thickness"))
self.assertTrue(tiled_wire_network.has_attribute("vertex_offset"))
self.assert_array_equal(
np.ones(tiled_wire_network.num_vertices) * 0.5,
tiled_wire_network.get_attribute("thickness").ravel())
def test_tile_with_guide_mesh(self):
wire_network = self.get_brick5()
params = Parameters(wire_network, 0.5)
tiler = Tiler(wire_network)
mesh = generate_box_mesh(np.zeros(3), np.ones(3), subdiv_order=1,
using_simplex=False)
tiler = Tiler(wire_network)
tiler.tile_with_guide_mesh(mesh, params)
tiled_wire_network = tiler.wire_network
self.assertEqual(8 * wire_network.num_edges,
tiled_wire_network.num_edges)
self.assertEqual(8 * wire_network.num_vertices - 12,
tiled_wire_network.num_vertices)
self.assert_array_equal(
mesh.bbox, tiled_wire_network.bbox)
self.assertTrue(tiled_wire_network.has_attribute("thickness"))
self.assertTrue(tiled_wire_network.has_attribute("vertex_offset"))
self.assert_array_equal(
np.ones(tiled_wire_network.num_vertices) * 0.5,
tiled_wire_network.get_attribute("thickness").ravel())
def test_tile_with_mixed_patterns(self):
wire_networks = [
self.get_brick5(),
self.get_cross_3D() ]
params = [Parameters(wire_networks[0], 0.1),
Parameters(wire_networks[1], 0.0)]
params[0].load_default_isotropic_parameters()
params[1].load_default_isotropic_parameters()
max_num_dofs = max(params[0].num_dofs, params[1].num_dofs)
pattern_id = np.array([0, 1, 1, 0, 1, 0, 0, 1])
mesh = generate_box_mesh(np.zeros(3), np.ones(3), subdiv_order=1,
using_simplex=False)
mesh.add_attribute("pattern_id")
mesh.set_attribute("pattern_id", pattern_id)
for i in range(max_num_dofs):
dof_name = "dof_{}".format(i)
dof = np.array([params[j].dofs[i]
if i < params[j].num_dofs else 0
for j in pattern_id ])
mesh.add_attribute(dof_name)
mesh.set_attribute(dof_name, dof)
tiler = Tiler(wire_networks)
tiler.tile_with_mixed_patterns(mesh)
tiled_wire_network = tiler.wire_network
self.assert_array_equal(
mesh.bbox, tiled_wire_network.bbox)
self.assertEqual(
(wire_networks[0].num_edges +
wire_networks[1].num_edges) * 4,
tiled_wire_network.num_edges)
self.assertEqual(
(wire_networks[0].num_vertices +
wire_networks[1].num_vertices) * 4 - 4 * 3,
tiled_wire_network.num_vertices)
|
<gh_stars>10-100
package io.opensphere.core.dialog.alertviewer;
import java.text.SimpleDateFormat;
import java.util.Date;
import io.opensphere.core.dialog.alertviewer.event.Type;
import io.opensphere.core.util.DateTimeFormats;
import net.jcip.annotations.Immutable;
/** Alert bean. */
@Immutable
class Alert
{
/** Standard time format. */
private static final SimpleDateFormat TIME_FORMAT = new SimpleDateFormat(DateTimeFormats.TIME_FORMAT);
/** The level. */
private final Type myLevel;
/** The message. */
private final String myMessage;
/** The time. */
private final long myTime;
/** Whether to make the dialog visible. */
private final boolean myMakeVisible;
/**
* Constructor.
*
* @param level The level
* @param message The message
* @param makeVisible Whether to make the dialog visible
*/
public Alert(Type level, String message, boolean makeVisible)
{
myLevel = level;
myMessage = message;
myTime = System.currentTimeMillis();
myMakeVisible = makeVisible;
}
/**
* Gets the level.
*
* @return the level
*/
public Type getLevel()
{
return myLevel;
}
/**
* Gets the message.
*
* @return the message
*/
public String getMessage()
{
return myMessage;
}
/**
* Gets the time.
*
* @return the time
*/
public long getTime()
{
return myTime;
}
/**
* Gets the makeVisible.
*
* @return the makeVisible
*/
public boolean isMakeVisible()
{
return myMakeVisible;
}
@Override
public String toString()
{
StringBuilder text = new StringBuilder(64);
text.append(format(new Date(myTime)));
text.append(' ');
text.append(myLevel);
text.append(" - ").append(myMessage);
return text.toString();
}
/**
* Formats the date.
*
* @param date the date
* @return the formatted date
*/
private String format(Date date)
{
String text;
synchronized (TIME_FORMAT)
{
text = TIME_FORMAT.format(date);
}
return text;
}
}
|
<filename>src/aiortc/codecs/keypointcodec.py
import audioop
import fractions
import numpy as np
import os
from typing import List, Optional, Tuple
from ..jitterbuffer import JitterFrame
from .base import Decoder, Encoder
from .keypoints_pb2 import KeypointInfo
from ..mediastreams import KeypointsFrame
SCALE_FACTOR = 256//2
NUM_KP = 10
NUM_JACOBIAN_BITS = int(os.environ.get('JACOBIAN_BITS', -1))
INDEX_BITS = 16
DUMMY_PTS = 5
""" custom codec that uses the protobuf module
to generically serialize and de-serialize
keypoints and associated information
(might warrant further optimization, once
we settle on final data format)
"""
def keypoint_dict_to_struct(keypoint_dict):
""" parse a keypoint dictionary form into a keypoint info structure """
keypoint_info_struct = KeypointInfo()
if 'keypoints' in keypoint_dict:
for k in keypoint_dict['keypoints']:
keypoint = keypoint_info_struct.keypoints.add()
keypoint.xloc = k[0]
keypoint.yloc = k[1]
if 'jacobians' in keypoint_dict:
for j in keypoint_dict['jacobians']:
jacobian = keypoint_info_struct.jacobians.add()
jacobian.d11 = j[0][0]
jacobian.d12 = j[0][1]
jacobian.d21 = j[1][0]
jacobian.d22 = j[1][1]
keypoint_info_struct.pts = keypoint_dict['pts']
keypoint_info_struct.index = keypoint_dict['index']
return keypoint_info_struct
def keypoint_struct_to_dict(keypoint_info_struct):
""" parse a keypoint info structure into dictionary form """
keypoint_dict = {}
if len(keypoint_info_struct.keypoints) > 0:
kp_array = []
keypoints = keypoint_info_struct.keypoints
for k in keypoints:
kp_array.append(np.array([k.xloc, k.yloc]))
keypoint_dict['keypoints'] = np.array(kp_array)
if len(keypoint_info_struct.jacobians) > 0:
jacobian_array = []
jacobians = keypoint_info_struct.jacobians
for j in jacobians:
jacobian_array.append(np.array([[j.d11, j.d12], [j.d21, j.d22]]))
keypoint_dict['jacobians'] = np.array(jacobian_array)
keypoint_dict['pts'] = keypoint_info_struct.pts
keypoint_dict['index'] = keypoint_info_struct.index
return keypoint_dict
""" compute the bin corresponding to the jacobian value
based on the Huffman dictionary for the desired
number of bins/bits
"""
def jacobian_to_bin(value, num_bins):
sign = int(value > 0)
value = abs(value)
if value > 3:
bin_num = num_bins - 1
elif value > 2.5:
bin_num = num_bins - 2
elif value > 2:
bin_num = num_bins - 3
else:
bin_num = int(value / 2.0 * (num_bins - 3))
return sign, bin_num
""" compute the approximate jacobian from the bin number
based on the Huffman dictionary for the desired
number of bins/bits
"""
def bin_to_jacobian(bin_num, num_bins):
if bin_num < num_bins - 3:
num_intervals = num_bins - 3
interval_size = 2.0
return (interval_size / num_intervals) * (bin_num + 0.5)
elif bin_num == num_bins - 3:
value = 2.25
elif bin_num == num_bins - 2:
value = 2.75
else:
value = 3
return value
""" custom encoding for keypoint data using lossless
8-bit encoding for keypoint locations and lossy
Huffman binning/encoding of jacobians
"""
def custom_encode(keypoint_dict):
binary_str = ""
num_bins = 2 ** (NUM_JACOBIAN_BITS - 1)
bit_format = f'0{NUM_JACOBIAN_BITS - 1}b'
index = keypoint_dict['index']
index_bit_format = f'0{INDEX_BITS}b'
binary_str += f'{index:{index_bit_format}}'
for k in keypoint_dict['keypoints']:
x = round(k[0] * SCALE_FACTOR + SCALE_FACTOR)
y = round(k[1] * SCALE_FACTOR + SCALE_FACTOR)
binary_str += f'{x:08b}'
binary_str += f'{y:08b}'
for j in keypoint_dict['jacobians']:
flattened_jacobians = j.flatten()
for element in flattened_jacobians:
sign, binary = jacobian_to_bin(element, num_bins)
binary_str += f'{sign}{binary:{bit_format}}'
return int(binary_str, 2).to_bytes(len(binary_str) // 8, byteorder='big')
""" custom decoding for keypoint data using lossless
decoding for 8-bit keypoint locations and lossy
decoding of jacobians based on Huffman bins
"""
def custom_decode(serialized_data):
num_bins = 2**(NUM_JACOBIAN_BITS - 1)
bitstring = ''.join(format(byte, '08b') for byte in serialized_data)
keypoint_dict = {'jacobians': [], 'keypoints': []}
num_read_so_far = 0
x, y = 0, 0
kp_locations = []
jacobians = []
index = int(bitstring[:INDEX_BITS], 2)
keypoint_dict['index'] = index
bitstring = bitstring[INDEX_BITS:]
while len(bitstring) > 0:
num_bits = NUM_JACOBIAN_BITS if num_read_so_far >= 2*NUM_KP else 8
word = bitstring[:num_bits]
bitstring = bitstring[num_bits:]
sign = -1 if word[0] == '0' else 1
bin_number = int(word[1:num_bits], 2)
num_read_so_far += 1
if num_read_so_far <= 2 * NUM_KP:
value = ((int(word, 2) - SCALE_FACTOR) / float(SCALE_FACTOR))
if num_read_so_far % 2 == 0:
kp_locations.append(value)
keypoint_dict['keypoints'].append(np.array(kp_locations))
kp_locations = []
else:
kp_locations.append(value)
else:
value = sign * bin_to_jacobian(bin_number, num_bins)
if num_read_so_far % 4 == 0:
jacobians.append(value)
jacobians = np.array(jacobians).reshape((2, 2))
keypoint_dict['jacobians'].append(jacobians)
jacobians = []
else:
jacobians.append(value)
keypoint_dict['jacobians'] = np.array(keypoint_dict['jacobians'])
keypoint_dict['keypoints'] = np.array(keypoint_dict['keypoints'])
return keypoint_dict
class KeypointsDecoder(Decoder):
@staticmethod
def _convert(data: bytes, width: int) -> bytes:
pass # pragma: no cover
def decode(self, encoded_frame: JitterFrame) -> List[KeypointsFrame]:
keypoint_str = encoded_frame.data
if NUM_JACOBIAN_BITS == -1:
keypoint_info_struct = KeypointInfo()
keypoint_info_struct.ParseFromString(keypoint_str)
assert(keypoint_info_struct.IsInitialized())
keypoint_dict = keypoint_struct_to_dict(keypoint_info_struct)
else:
keypoint_dict = custom_decode(keypoint_str)
keypoint_dict['pts'] = DUMMY_PTS
frame = KeypointsFrame(keypoint_dict, keypoint_dict['pts'], keypoint_dict['index'])
return [frame]
class KeypointsEncoder(Encoder):
@staticmethod
def _convert(data: bytes, width: int) -> bytes:
pass # pragma: no cover
def __init__(self) -> None:
pass
def encode(
self, frame, force_keyframe: bool = False
) -> Tuple[List[bytes], int]:
timestamp = frame.pts
keypoint_dict = frame.data
keypoint_dict['pts'] = frame.pts
keypoint_dict['index'] = frame.index
if NUM_JACOBIAN_BITS == -1:
keypoint_info_struct = keypoint_dict_to_struct(keypoint_dict)
assert(keypoint_info_struct.IsInitialized())
data = keypoint_info_struct.SerializeToString()
else:
data = custom_encode(keypoint_dict)
return [data], timestamp
|
import random
import pandas as pd
import requests
import us
from can_tools.scrapers import variables
from can_tools.scrapers.base import ALL_STATES_PLUS_TERRITORIES, CMU
from can_tools.scrapers.official.base import FederalDashboard
def _lookup(location):
if location == "US":
return 0
return int(us.states.lookup(location).fips)
class CDCStateVaccine(FederalDashboard):
has_location = True
location_type = "state"
source = "https://covid.cdc.gov/covid-data-tracker/#vaccinations"
source_name = "Centers for Disease Control and Prevention"
provider = "cdc"
variables = {
"Doses_Distributed": variables.TOTAL_VACCINE_DISTRIBUTED,
"Administered_Dose1_Recip": variables.INITIATING_VACCINATIONS_ALL,
"Series_Complete_Yes": variables.FULLY_VACCINATED_ALL,
"Doses_Administered": variables.TOTAL_DOSES_ADMINISTERED_ALL,
}
def fetch(self, test=False):
fetcher_url = (
"https://covid.cdc.gov/covid-data-tracker/COVIDData/"
"getAjaxData?id=vaccination_data"
)
response = requests.get(fetcher_url)
return response.json()
def _filter_rows(self, df):
state_abbr_list = [x.abbr for x in ALL_STATES_PLUS_TERRITORIES]
return df.loc[df["Location"].isin(state_abbr_list), :]
def normalize(self, data):
# Read data in
df = pd.DataFrame.from_records(data["vaccination_data"])
# Set date
df["dt"] = pd.to_datetime(df["Date"])
# Only keep states and set fips codes
df = self._filter_rows(df)
df.loc[:, "location"] = df["Location"].map(_lookup)
return self._reshape_variables(df, self.variables)
class CDCUSAVaccine(CDCStateVaccine):
location_type = "nation"
def _filter_rows(self, df):
return df.query("Location == 'US'")
def one_time_backfill_usa_vaccine():
df = pd.read_csv(
"/home/sglyon/Downloads/trends_in_number_of_covid19_vaccinations_in_the_us.csv",
skiprows=2,
)
filtered = df.loc[(df["Date Type"] == "Admin") & (df["Program"] == "US")]
variable_map = {
"People with at least One Dose Cumulative": variables.INITIATING_VACCINATIONS_ALL,
"People Fully Vaccinated Cumulative": variables.FULLY_VACCINATED_ALL,
}
d = CDCUSAVaccine()
cols = list(variable_map.keys()) + ["Date"]
df = (
filtered.loc[:, cols]
.assign(location=0)
.pipe(
d._rename_or_add_date_and_location,
location_column="location",
date_column="Date",
)
.pipe(d._reshape_variables, variable_map)
)
return df
|
<filename>src/actions/place-on-triangle/PlaceOnTriangle.ts<gh_stars>10-100
import Phaser from 'phaser'
export default class PlaceOnTriangle extends Phaser.Scene
{
preload()
{
this.load.image('ball','/assets/sprites/chunk.png')
}
create()
{
const triangle = this.createTriangle('right')
var group = this.add.group()
group.createMultiple({ key: 'ball', frameQuantity: 64 })
Phaser.Actions.PlaceOnTriangle(group.getChildren(), triangle)
}
private createTriangle(type: string)
{
switch (type)
{
case 'right':
return Phaser.Geom.Triangle.BuildRight(200, 400, 300, 200)
default:
case 'equilateral':
return Phaser.Geom.Triangle.BuildEquilateral(400, 100, 380)
}
}
}
|
#!/bin/bash
# Are we running as root?
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root. Did you leave out sudo?"
exit
fi
function getSelfSignSSL() {
openssl genrsa -des3 -passout pass:test -out $DOMAIN.key 1024
echo "Create server certificate signing request..."
SUBJECT="/C=US/ST=Mars/L=iTranswarp/O=iTranswarp/OU=iTranswarp/CN=$DOMAIN"
openssl req -passin pass:test -new -subj $SUBJECT -key $DOMAIN.key -out $DOMAIN.csr
echo "Remove password..."
mv $DOMAIN.key $DOMAIN.origin.key
openssl rsa -passin pass:test -in $DOMAIN.origin.key -out $DOMAIN.key
echo "Sign SSL certificate..."
openssl x509 -req -days 3650 -in $DOMAIN.csr -signkey $DOMAIN.key -out $DOMAIN.crt
}
#输入域名
echo "使用域名请先增加A解析"
DOMAIN="{{domain}}"
#使用自签名或者LetsEncrypt
ssl="{{ssl}}"
filepath=$(pwd)
#环境安装
apt-get install -y apache2 certbot openssl python git
a2enmod ssl rewrite proxy proxy_http
a2ensite default-ssl.conf
service apache2 stop
#自签名
if [[ "$ssl" = "1" ]]; then
getSelfSignSSL
cert=${filepath}"/$DOMAIN.crt"
privkey=${filepath}"/$DOMAIN.key"
elif [[ "$ssl" = "2" ]]; then
acme.sh --issue -d $DOMAIN --standalone
acme.sh --install-cert -d $DOMAIN \
--cert-file /opt/$DOMAIN.crt \
--key-file /opt/$DOMAIN.key \
--ca-file /opt/ca.crt
cert="/opt/$DOMAIN.crt"
privkey="/opt/$DOMAIN.key"
else
echo "ssl选择错误"
exit
fi
#修改apache配置端口是80和443
cat > /etc/apache2/sites-enabled/000-default.conf <<EOF
<VirtualHost *:80>
ServerAdmin webmaster@localhost
DocumentRoot /var/www/html
<Directory /var/www/html>
Options Indexes FollowSymLinks MultiViews
AllowOverride All
Order allow,deny
allow from all
</Directory>
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
</VirtualHost>
EOF
cat > /etc/apache2/sites-enabled/default-ssl.conf <<EOF
<IfModule mod_ssl.c>
<VirtualHost _default_:443>
ServerAdmin webmaster@localhost
DocumentRoot /var/www/html
ErrorLog ${APACHE_LOG_DIR}/error.log
CustomLog ${APACHE_LOG_DIR}/access.log combined
<Directory /var/www/html>
Options Indexes FollowSymLinks
AllowOverride All
Require all granted
</Directory>
SSLEngine on
SSLProxyEngine On
SSLProxyVerify none
SSLProxyCheckPeerCN off
SSLProxyCheckPeerName off
EOF
echo " SSLCertificateFile $cert" >>/etc/apache2/sites-enabled/default-ssl.conf
echo " SSLCertificateKeyFile $privkey" >>/etc/apache2/sites-enabled/default-ssl.conf
cat >> /etc/apache2/sites-enabled/default-ssl.conf <<EOF
<FilesMatch "\.(cgi|shtml|phtml|php)$">
SSLOptions +StdEnvVars
</FilesMatch>
<Directory /usr/lib/cgi-bin>
SSLOptions +StdEnvVars
</Directory>
</VirtualHost>
</IfModule>
EOF
git clone https://github.com/Tycx2ry/cs2modrewrite.git
cd cs2modrewrite
c2profile={{c2_profile}}
cs2ServerIP={{cs2_server_ip}}
redirect={{redirect}}
python cs2modrewrite.py -i $c2profile -c $cs2ServerIP -r $redirect > /var/www/html/.htaccess
service apache2 start
# service apache2 force-reload
|
<reponame>yichao0803/crthcrp
package com.bjgoodwill.dao.base;
import java.util.List;
import org.apache.ibatis.annotations.Param;
import com.bjgoodwill.entity.TStudent;
/**
* @author Zhangyichao
*/
public interface TStudentBaseMapper {
int insertTStudent(TStudent object);
int updateTStudent(TStudent object);
List<TStudent> queryTStudent(TStudent object);
TStudent queryTStudentLimit1(TStudent object);
}
|
#!/bin/bash -l
#PBS -l walltime=23:59:00,nodes=1:ppn=24:gpus=2,mem=16gb
#PBS -m abe
#PBS -N 120018129_pgml_source
#PBS -o 120018129_pgml_source.stdout
#PBS -q k40
source takeme_source.sh
source activate mtl_env
python train_source_model.py 120018129
|
#!/bin/bash -e
DIAGRAMS="data/dqa/diagrams.json"
DIAGRAM_FEATURES="data/dqa/diagram_features_synthetic.json"
OUT_DIR="experiments/dqa/output/"
EXPERIMENT_NAME="distances_learned"
EXPERIMENT_DIR="$OUT_DIR/$EXPERIMENT_NAME/"
MATCHING_MODEL="$EXPERIMENT_DIR/matching_model.ser"
INDEPENDENT_MODEL="$EXPERIMENT_DIR/independent_model.ser"
BINARY_MATCHING_MODEL="$EXPERIMENT_DIR/binary_matching_model.ser"
mkdir -p $EXPERIMENT_DIR
echo "Training binary_matching model..."
sbt "run-main org.allenai.dqa.matching.TrainMatchingCli --binaryFactors --diagrams $DIAGRAMS --diagramFeatures $DIAGRAM_FEATURES --modelOut $BINARY_MATCHING_MODEL" > $EXPERIMENT_DIR/binary_matching_train_log.txt
echo "Testing binary_matching model..."
sbt "run-main org.allenai.dqa.matching.TestMatchingCli --diagrams $DIAGRAMS --diagramFeatures $DIAGRAM_FEATURES --model $BINARY_MATCHING_MODEL --lossJson $EXPERIMENT_DIR/binary_matching_loss.json" > $EXPERIMENT_DIR/binary_matching_test_log.txt
python experiments/dqa/scripts/visualize_loss.py $EXPERIMENT_DIR/binary_matching_loss.json $EXPERIMENT_DIR/binary_matching_loss.html
echo "Training matching model..."
# sbt "run-main org.allenai.dqa.matching.TrainMatchingCli --diagrams $DIAGRAMS --diagramFeatures $DIAGRAM_FEATURES --modelOut $MATCHING_MODEL" > $EXPERIMENT_DIR/matching_train_log.txt
echo "Testing matching model..."
# sbt "run-main org.allenai.dqa.matching.TestMatchingCli --diagrams $DIAGRAMS --diagramFeatures $DIAGRAM_FEATURES --model $MATCHING_MODEL --lossJson $EXPERIMENT_DIR/matching_loss.json" > $EXPERIMENT_DIR/matching_test_log.txt
echo "Training independent model..."
# sbt "run-main org.allenai.dqa.matching.TrainMatchingCli --matchIndependent --diagrams $DIAGRAMS --diagramFeatures $DIAGRAM_FEATURES --modelOut $INDEPENDENT_MODEL" > $EXPERIMENT_DIR/independent_train_log.txt
echo "Testing independent model..."
# sbt "run-main org.allenai.dqa.matching.TestMatchingCli --diagrams $DIAGRAMS --diagramFeatures $DIAGRAM_FEATURES --model $INDEPENDENT_MODEL --lossJson $EXPERIMENT_DIR/independent_loss.json" > $EXPERIMENT_DIR/independent_test_log.txt
|
#! /usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
readonly KUSTOMIZE_VERS="v3.8.6"
readonly KUBECTL_VERS="v1.19.2"
readonly KIND_VERS="v0.9.0"
readonly INTEGRATION_TESTER_VERS="5.0.0"
readonly PROGNAME=$(basename $0)
readonly CURL=${CURL:-curl}
# Google storage is case sensitive, so we we need to lowercase the OS.
readonly OS=$(uname | tr '[:upper:]' '[:lower:]')
usage() {
echo "Usage: $PROGNAME INSTALLDIR"
}
download() {
local -r url="$1"
local -r target="$2"
echo Downloading "$target" from "$url"
${CURL} --progress-bar --location --output "$target" "$url"
}
case "$#" in
"1")
mkdir -p "$1"
readonly DESTDIR=$(cd "$1" && pwd)
;;
*)
usage
exit 64
;;
esac
download \
"https://github.com/kubernetes-sigs/kind/releases/download/${KIND_VERS}/kind-${OS}-amd64" \
"${DESTDIR}/kind"
chmod +x "${DESTDIR}/kind"
download \
"https://storage.googleapis.com/kubernetes-release/release/${KUBECTL_VERS}/bin/${OS}/amd64/kubectl" \
"${DESTDIR}/kubectl"
chmod +x "${DESTDIR}/kubectl"
download \
"https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2F${KUSTOMIZE_VERS}/kustomize_${KUSTOMIZE_VERS}_${OS}_amd64.tar.gz" \
"${DESTDIR}/kustomize.tgz"
tar -C "${DESTDIR}" -xf "${DESTDIR}/kustomize.tgz" kustomize
rm "${DESTDIR}/kustomize.tgz"
download \
"https://github.com/projectcontour/integration-tester/releases/download/v${INTEGRATION_TESTER_VERS}/integration-tester_${INTEGRATION_TESTER_VERS}_${OS}_x86_64.tar.gz" \
"${DESTDIR}/integration-tester.tgz"
tar -C "${DESTDIR}" -xf "${DESTDIR}/integration-tester.tgz"
rm "${DESTDIR}/integration-tester.tgz"
|
// Set up Google Maps
var map;
var numTracks = 0;
var numRoutes = 0;
// http://en.wikipedia.org/wiki/Web_colors
var strokeColors = [
'#ff0000', // red
'#00ff00', // lime
'#0000ff', // blue
'#800000', // maroon
'#000000', // black
'#800080', // purple
'#ff00ff', // fuchsia
'#808000', // olive
'#ffffff', // white
'#00ffff', // aqua
];
var paths = [];
var allLat = [];
var allLon = [];
var heatmap;
function initializeMap() {
var mapOptions = {
center: new google.maps.LatLng(lat_center, lon_center),
zoom: 8
};
map = new google.maps.Map(document.getElementById("map-canvas"), mapOptions);
}
//google.maps.event.addDomListener(window, 'load', initialize);
function drawHeatmap() {
console.log('Drawing heatmap ' + allLat.length);
var coords = [];
$.each(allLat, function(ind, val) {
coords.push(new google.maps.LatLng(val, allLon[ind]));
});
var coordArray = new google.maps.MVCArray(coords);
heatmap = new google.maps.visualization.HeatmapLayer({
data: coordArray
});
heatmap.setMap(map);
}
function toggleHeatmap() {
if (typeof heatmap == 'undefined')
drawHeatmap()
else
heatmap.setMap(heatmap.getMap() ? null : map);
}
// Load GPS
function loadGPSTracks(f) {
$.getJSON(f, function(route_segment_split_gps) {
$.each(route_segment_split_gps, function(route, segment_split_gps) {
routeDiv = $('<div class="routeDiv">' + route + '</div>')
console.log(route);
$.each(segment_split_gps, function (segment, split_gps) {
console.log('\t' + segment);
segmentDiv = $('<div class="segmentDiv">' + segment + '</div>')
$.each(split_gps, function(split, gps) {
if (!('lat' in gps))
return;
console.log('\t\t' + split + ': ' + gps['lat'].length);
var lat = gps['lat'];
var lon = gps['lon'];
// NOTE push.apply(a, b) could fail for long b?
allLat.push.apply(allLat, lat);
allLon.push.apply(allLon, lon);
drawGPSTrack(lat, lon, [route, segment, split]);
segmentDiv.append('<div class="trackPanel"><input type="checkbox" checked="true" onchange="toggleTrack(' + numTracks + ')" /><span style="color:' + strokeColors[numRoutes % strokeColors.length] + '">' + split + '</span></div>');
numTracks++;
});
routeDiv.append(segmentDiv);
});
addTrackPanel(routeDiv);
numRoutes++;
});
//drawHeatmap();
$('#checkAll').click(function() {
if ($(this).html() == 'Check All') {
$('input:checkbox').prop('checked', 'checked');
$(this).html('Uncheck All');
} else {
$('input:checkbox').removeProp('checked');
$(this).html('Check All');
}
$('input:checkbox').trigger('change');
});
});
}
function addTrackPanel(routeDiv) {
$('#trackPanels').append(routeDiv);
}
function toggleTrack(ind) {
paths[ind].setMap(paths[ind].getMap() ? null: map);
}
function drawGPSTrack(lat, lon, pathInfo) {
var coords = [];
$.each(lat, function(ind, val) {
coords.push(new google.maps.LatLng(val, lon[ind]));
});
var path = new google.maps.Polyline({
path: coords,
geodesic: true,
strokeColor: strokeColors[numRoutes],
strokeOpacity: 0.7,
strokeWeight: 3
});
path.setMap(map);
paths.push(path);
google.maps.event.addDomListener(path, 'click', function() {
$('#pathInfo').html(pathInfo.join(' / '));
});
}
// Onload
$(document).ready(function() {
initializeMap();
loadGPSTracks('gps_tracks.json');
});
|
#!/bin/bash
DIR="$(dirname "$(readlink -f "$0")")"
# Set common options for our
export SAMPLE_DIR=samples/net/sockets/echo_server
export PATCHES="${PATCHES:-} ieee802154_rf2xx_size_check.patch wdt_sam_watchdog_callback_check.patch"
export OVERLAYS=overlay-802154.conf
export EXTRA_DEFINES="-DCONFIG_SHELL=n -DCONFIG_NET_SHELL=n -DCONFIG_NET_L2_IEEE802154_SHELL=n -DCONFIG_NET_SHELL_DYN_CMD_COMPLETION=n "
$DIR/docker_build_sample.sh
|
import { Observable } from 'rxjs';
import { FullAppData } from '@openchannel/angular-common-components/src/lib/common-components';
export abstract class AppsSearchService {
abstract loadDefaultApps(existsAppIDs: string[]): Observable<FullAppData[]>;
abstract appsSearch(existsAppIDs: FullAppData[], searchText: string): Observable<FullAppData[]>;
}
|
//
// Ryu
//
// Copyright (C) 2017 <NAME>
// All Rights Reserved.
//
// See the LICENSE file for details about the license covering
// this source code file.
//
#include "rect.h"
namespace ryu::core {
rect::rect(
int32_t left,
int32_t top,
int32_t width,
int32_t height) : _top(top),
_left(left),
_width(width),
_height(height) {
}
rect::rect(const SDL_Rect& sdl_rect) {
_left = sdl_rect.x;
_top = sdl_rect.y;
_width = sdl_rect.w;
_height = sdl_rect.h;
}
int32_t rect::top() const {
return _top;
}
void rect::top(int32_t y) {
_top = y;
}
int32_t rect::left() const {
return _left;
}
void rect::left(int32_t x) {
_left = x;
}
void rect::width(int32_t w) {
_width = w;
}
int32_t rect::width() const {
return _width;
}
int32_t rect::right() const {
return _left + _width;
}
int32_t rect::height() const {
return _height;
}
void rect::height(int32_t h) {
_height = h;
}
int32_t rect::bottom() const {
return _top + _height;
}
rect& rect::pos(int32_t left, int32_t top) {
_left = left;
_top = top;
return *this;
}
void rect::inflate(int32_t dx, int32_t dy) {
_left -= dx;
_width += dx;
_top -= dy;
_height += dy;
}
void rect::deflate(int32_t dx, int32_t dy) {
_left += dx;
_width -= dx;
_top += dy;
_height -= dy;
}
bool rect::contains(const rect& rect) const {
return rect.right() < right()
&& rect.left() > left()
&& rect.top() > top()
&& rect.bottom() < bottom();
}
bool rect::intersects(const rect& rect) const {
return left() <= rect.right()
&& right() >= rect.left()
&& top() >= rect.bottom()
&& bottom() <= rect.top();
}
rect& rect::size(int32_t width, int32_t height) {
_width = width;
_height = height;
return *this;
}
bool rect::contains(int32_t x, int32_t y) const {
return x < right() && x > left() && y < bottom() && y > top();
}
rect rect::center_inside(const rect& target, bool scale) {
int32_t width = _width;
int32_t height = _height;
int32_t aspect_ratio = width / height;
int32_t target_width = target.width();
int32_t target_height = target.height();
int32_t target_aspect_ratio = target_width / target_height;
int32_t new_height = height;
int32_t new_width = width;
int32_t xstart = _left;
int32_t ystart = _top;
if (target_aspect_ratio > aspect_ratio) {
if (scale) {
new_width = target_width * (1 / (target_height / height));
xstart = _left - ((new_width / 2) - (width / 2));
} else {
new_height = (int) (width / target_width * target_height);
ystart = ((height - new_height) / 2) + _top;
}
} else if (target_aspect_ratio < aspect_ratio) {
if (scale) {
new_height = target_height * (1 / (target_width / width));
ystart = _top - ((new_height / 2) - (height / 2));
} else {
new_width = (height / target_height * target_width);
xstart = ((width - new_width) / 2) + _left;
}
}
return rect(xstart, ystart, new_width, new_height);
}
}
|
if [ $(gitflow_count_commit_files js) -eq 0 ] ; then
return 0
fi
JSHINT="jshint"
h1 "JSHint module"
ERROR=0
for file in $(gitflow_commit_files js); do
if $JSHINT --config="$HOOKS_DIR"/jshint.json $file 2>&1 | grep 'error' >/dev/null ; then
gitflow_fail $file
$JSHINT --config="$HOOKS_DIR"/jshint.json $file | sed "s/^/ ${GREY}--> /" | sed '$ d' | sed '$ d'
ERROR=1
else
gitflow_ok $file
fi
done
return $ERROR
|
<reponame>nartc/tnc<filename>src/layouts/index.tsx
import CssBaseline from "@material-ui/core/CssBaseline";
import { ThemeProvider } from "@material-ui/core/styles";
import { ReplaceComponentRendererArgs } from "gatsby";
import React, { FC, ReactElement, useMemo } from "react";
import Togglers from "../components/togglers";
import { LanguageChangerProvider } from "../contexts/language-changer-context";
import {
ThemeChangerProvider,
useThemeChangerContext,
} from "../contexts/theme-changer-context";
import useLanguageChange from "../utils/hooks/useLanguageChange";
import buildTheme from "../utils/mui-theme";
type LayoutProps = {
children: ReactElement<ReplaceComponentRendererArgs["props"]>;
};
const Layout: FC<LayoutProps> = ({ children }) => {
const { theme } = useThemeChangerContext();
// usePreferredColorScheme(setTheme);
const muiTheme = useMemo(() => buildTheme(theme), [theme]);
useLanguageChange(children.props);
return (
<ThemeProvider theme={muiTheme}>
<CssBaseline />
<Togglers />
{children}
</ThemeProvider>
);
};
const LayoutWithThemeChanger: FC = ({ children }) => {
const initialLang =
(children as any).key && (children as any).key.includes("/vi")
? "vi"
: "en";
return (
<ThemeChangerProvider>
<LanguageChangerProvider initialLang={initialLang}>
<Layout>{children as any}</Layout>
</LanguageChangerProvider>
</ThemeChangerProvider>
);
};
export default LayoutWithThemeChanger;
|
import os
from dotenv import load_dotenv
import requests
class WeatherFetcher:
def __init__(self, city):
self.city = city
def fetch_weather(self):
load_dotenv() # Load environment variables from .env file
api_key = os.getenv("OPENWEATHERMAP_API_KEY") # Retrieve API key from environment variables
base_url = "http://api.openweathermap.org/data/2.5/weather"
url = f"{base_url}?q={self.city}&units=metric&lang=sp&APPID={api_key}" # Construct the API request URL
response = requests.get(url) # Make a GET request to the OpenWeatherMap API
if response.status_code == 200: # Check if the request was successful
return response.json() # Return the JSON response containing the weather information
else:
return None # Return None if the request was not successful
|
<reponame>dubizzle/helios<gh_stars>0
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.helios.servicescommon.coordination;
import com.google.common.base.Predicate;
import com.google.common.base.Suppliers;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractIdleService;
import com.fasterxml.jackson.core.type.TypeReference;
import com.spotify.helios.servicescommon.DefaultReactor;
import com.spotify.helios.servicescommon.PersistentAtomicReference;
import com.spotify.helios.servicescommon.Reactor;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import static com.google.common.collect.Lists.reverse;
import static com.spotify.helios.servicescommon.coordination.ZooKeeperOperations.check;
import static com.spotify.helios.servicescommon.coordination.ZooKeeperOperations.delete;
public class ZooKeeperPersistentNodeRemover extends AbstractIdleService {
private static final Logger log = LoggerFactory.getLogger(ZooKeeperPersistentNodeRemover.class);
private static final boolean DEFAULT_RECURSIVE = false;
private static final long RETRY_INTERVAL_MILLIS = 5000;
public static final TypeReference<List<String>> PATHS_TYPE =
new TypeReference<List<String>>() {};
private static final List<String> EMPTY_PATHS = Collections.emptyList();
private final ZooKeeperClientProvider provider;
private final Reactor reactor;
private final PersistentAtomicReference<List<String>> front;
private final PersistentAtomicReference<List<String>> back;
private final Predicate<Node> predicate;
private final boolean recursive;
private final Object lock = new Object() {};
public ZooKeeperPersistentNodeRemover(final String name, final ZooKeeperClientProvider provider,
final Path stateFile, final Predicate<Node> predicate)
throws IOException, InterruptedException {
this(name, provider, stateFile, predicate, DEFAULT_RECURSIVE);
}
public ZooKeeperPersistentNodeRemover(final String name, final ZooKeeperClientProvider provider,
final Path stateFile, final Predicate<Node> predicate,
final boolean recursive)
throws IOException, InterruptedException {
this.provider = provider;
this.predicate = predicate;
this.front = PersistentAtomicReference.create(stateFile.toString() + ".front", PATHS_TYPE,
Suppliers.ofInstance(EMPTY_PATHS));
this.back = PersistentAtomicReference.create(stateFile.toString() + ".back", PATHS_TYPE,
Suppliers.ofInstance(EMPTY_PATHS));
this.reactor = new DefaultReactor(name, new Update(), RETRY_INTERVAL_MILLIS);
this.recursive = recursive;
}
public void remove(final String path) throws InterruptedException {
while (true) {
try {
synchronized (lock) {
final Set<String> mutable = Sets.newHashSet(front.get());
mutable.add(path);
front.set(ImmutableList.copyOf(mutable));
}
break;
} catch (IOException e) {
log.error("Error updating front", e);
Thread.sleep(1000);
}
}
reactor.signal();
}
public static ZooKeeperPersistentNodeRemover create(final String name,
final ZooKeeperClientProvider provider,
final Path stateFile,
final Predicate<Node> predicate)
throws IOException, InterruptedException {
return new ZooKeeperPersistentNodeRemover(name, provider, stateFile, predicate);
}
public static ZooKeeperPersistentNodeRemover create(final String name,
final ZooKeeperClientProvider provider,
final Path stateFile,
final Predicate<Node> predicate,
final boolean recursive)
throws IOException, InterruptedException {
return new ZooKeeperPersistentNodeRemover(name, provider, stateFile, predicate, recursive);
}
@Override
protected void startUp() throws Exception {
reactor.startAsync().awaitRunning();
}
@Override
protected void shutDown() throws Exception {
reactor.stopAsync().awaitTerminated();
}
private class Update implements Reactor.Callback {
@Override
public void run(final boolean timeout) throws InterruptedException {
// Drain the front to the backlog
final Set<String> backPaths = Sets.newHashSet(back.get());
synchronized (lock) {
if (!front.get().isEmpty()) {
final List<String> frontPaths = front.get();
backPaths.addAll(frontPaths);
try {
back.set(ImmutableList.copyOf(backPaths));
front.set(EMPTY_PATHS);
} catch (IOException e) {
log.error("Error draining front", e);
throw Throwables.propagate(e);
}
}
}
// Remove all nodes in the backlog
final Set<String> newBackPaths = Sets.newHashSet(backPaths);
final ZooKeeperClient client = provider.get("persistent_remover");
for (final String path : backPaths) {
Node node = null;
try {
node = client.getNode(path);
} catch (KeeperException.NoNodeException ignore) {
// we're done here
newBackPaths.remove(path);
} catch (KeeperException.ConnectionLossException e) {
log.warn("ZooKeeper connection lost while inspecting node: {}", path);
throw Throwables.propagate(e);
} catch (KeeperException e) {
log.error("Failed inspecting node: {}", path);
}
if (node != null) {
try {
final boolean remove;
try {
remove = evaluate(node);
} catch (Exception e) {
log.error("Condition threw exception for node: {}", e, path);
continue;
}
if (remove) {
final List<String> nodes = Lists.newArrayList();
if (recursive) {
nodes.addAll(reverse(client.listRecursive(path)));
} else {
nodes.add(path);
}
client.transaction(check(path, node.getStat().getVersion()),
delete(nodes));
// we're done here
newBackPaths.remove(path);
log.debug("Removed node: {}", path);
}
} catch (KeeperException.BadVersionException | KeeperException.NoNodeException ignore) {
// we're done here
newBackPaths.remove(path);
} catch (KeeperException.ConnectionLossException e) {
log.warn("ZooKeeper connection lost while removing node: {}", path);
throw Throwables.propagate(e);
} catch (KeeperException e) {
log.error("Failed removing node: {}", path, e);
}
}
}
try {
final ImmutableList<String> newBackPathsList = ImmutableList.copyOf(newBackPaths);
if (!back.get().equals(newBackPathsList)) {
back.set(newBackPathsList);
}
} catch (IOException e) {
log.error("Error writing back", e);
throw Throwables.propagate(e);
}
}
}
@SuppressWarnings("ConstantConditions")
private boolean evaluate(final Node node) {
return predicate.apply(node);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.backward_codecs.lucene92;
import org.apache.lucene.codecs.KnnVectorsFormat;
import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat;
/** Implements the Lucene 9.2 index format for backwards compat testing */
public class Lucene92RWCodec extends Lucene92Codec {
private final KnnVectorsFormat defaultKnnVectorsFormat;
private final KnnVectorsFormat knnVectorsFormat =
new PerFieldKnnVectorsFormat() {
@Override
public KnnVectorsFormat getKnnVectorsFormatForField(String field) {
return defaultKnnVectorsFormat;
}
};
/** Instantiates a new codec. */
public Lucene92RWCodec() {
defaultKnnVectorsFormat =
new Lucene92RWHnswVectorsFormat(
Lucene92HnswVectorsFormat.DEFAULT_MAX_CONN,
Lucene92HnswVectorsFormat.DEFAULT_BEAM_WIDTH);
}
@Override
public final KnnVectorsFormat knnVectorsFormat() {
return knnVectorsFormat;
}
}
|
<gh_stars>0
package ltd.dolink.arch.treenode;
import androidx.annotation.IntRange;
import androidx.annotation.NonNull;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
public class SimpleTreeNode<T extends Expandable & Selectable> implements TreeNode<T> {
@IntRange(from = 0)
private final int depth;
@NonNull private final T data;
private final TreeNode<T> parent;
private final List<TreeNode<T>> children;
public SimpleTreeNode(@IntRange(from = 0) int depth, @NonNull T data, TreeNode<?> parent) {
Objects.requireNonNull(data);
this.depth = depth;
this.data = data;
this.parent = (TreeNode<T>) parent;
if (Objects.isNull(parent)) {
if (depth < 0) {
throw new IllegalArgumentException(
String.format("depth must >= 0, current dept %s:", depth));
}
} else {
if (depth <= parent.getDepth()) {
throw new IllegalArgumentException(
String.format(
"dept must < parent dept, current dept:%s, parent dept:%s",
depth, parent.getDepth()));
}
}
this.children = new LinkedList<>();
}
@Override
public boolean canExpand() {
return getData().canExpand();
}
@Override
public boolean isExpanded() {
return getData().isExpanded();
}
@Override
public void setExpand(boolean expand) {
getData().setExpand(expand);
}
@Override
public boolean canSelect() {
return getData().canSelect();
}
@Override
public boolean isSelected() {
return getData().isSelected();
}
@Override
public void setSelect(boolean select) {
getData().setSelect(select);
}
@IntRange(from = 0)
@Override
public int getDepth() {
return depth;
}
@Override
public int getChildCount() {
return getChildren().size();
}
@Override
public TreeNode<T> getParent() {
return parent;
}
@Override
public List<TreeNode<T>> getChildren() {
return children;
}
@NonNull
@Override
public T getData() {
return data;
}
@Override
public String toString() {
return getClass().getSimpleName() + "@" + getData();
}
}
|
document.getElementById('content')!.innerHTML = `
Hello World
`;
|
<gh_stars>0
import { arrayByClass } from '../shared/helpers.mjs';
// Switches
let switches = arrayByClass('switch');
switches.map(switchEl => {
switchEl.addEventListener('click', (e) => {
e.currentTarget.classList.toggle('off');
});
});
|
#!/bin/bash
# Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
source env.sh
docker-compose exec daml-testnode sh -c "cd /data; ./test-all.sh > logs/test-all.log"
docker cp data-uploader:/data/logs .
|
#!/usr/bin/env bash
set -euo pipefail
# Slashing tests
# For testing a particular branch of Geth repo (usually, on Circle CI)
# Usage: ci_test_exit.sh checkout <branch_of_geth_repo_to_test>
# For testing the local Geth dir (usually, for manual testing)
# Usage: ci_test_exit.sh local <location_of_local_geth_dir>
export TS_NODE_FILES=true
if [ "${1}" == "checkout" ]; then
# Test master by default.
BRANCH_TO_TEST=${2:-"master"}
echo "Checking out geth at branch ${BRANCH_TO_TEST}..."
../../node_modules/.bin/mocha -r ts-node/register src/e2e-tests/slashing_tests.ts --branch ${BRANCH_TO_TEST}
elif [ "${1}" == "local" ]; then
export GETH_DIR="${2}"
echo "Testing using local geth dir ${GETH_DIR}..."
../../node_modules/.bin/mocha -r ts-node/register src/e2e-tests/slashing_tests.ts --localgeth ${GETH_DIR}
fi
|
echo $AGENT_BUILDDIRECTORY
SDX_PATH=$AGENT_BUILDDIRECTORY/sdx
echo $SDX_PATH
rm -rf $SDX_PATH
git -c http.https://office.visualstudio.com.extraheader="AUTHORIZATION: bearer $SYSTEM_ACCESSTOKEN" clone https://office.visualstudio.com/ISS/_git/sdx-platform $SDX_PATH
|
<reponame>indragiek/WWDC-2014
//
// INDAboutViewController.h
// <NAME>
//
// Created by <NAME> on 2014-04-09.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#import "INDStaticCollectionViewController.h"
/**
* View controller for the about view.
*/
@interface INDAboutViewController : INDStaticCollectionViewController
@end
|
#!/bin/bash
function show_help_and_exit()
{
echo "Usage ${SCRIPT} [options]"
echo " options with (*) must be provided"
echo " -h -? : get this help"
echo " -a <True|False>: specify if autu-recover is allowed (default: True)"
echo " -b <master_id> : specify name of k8s master group used in k8s inventory, format: k8s_vms{msetnumber}_{servernumber}"
echo " -c <testcases> : specify test cases to execute (default: none, executed all matched)"
echo " -d <dut name> : specify DUT name (default: DUT name associated with testbed in testbed file)"
echo " -e <parameters>: specify extra parameter(s) (default: none)"
echo " -f <tb file> : specify testbed file (default testbed.csv)"
echo " -i <inventory> : specify inventory name"
echo " -k <file log> : specify file log level: error|warning|info|debug (default debug)"
echo " -l <cli log> : specify cli log level: error|warning|info|debug (default warning)"
echo " -m <method> : specify test method group|individual|debug (default group)"
echo " -n <testbed> : specify testbed name (*)"
echo " -o : omit the file logs"
echo " -O : run tests in input order rather than alphabetical order"
echo " -p <path> : specify log path (default: logs)"
echo " -q <n> : test will stop after <n> failures (default: not stop on failure)"
echo " -r : retain individual file log for suceeded tests (default: remove)"
echo " -s <tests> : specify list of tests to skip (default: none)"
echo " -t <topology> : specify toplogy: t0|t1|any|combo like t0,any (*)"
echo " -u : bypass util group"
echo " -x : print commands and their arguments as they are executed"
exit $1
}
function get_dut_from_testbed_file() {
if [[ -z ${DUT_NAME} ]]; then
LINE=`cat $TESTBED_FILE | grep "^$TESTBED_NAME"`
IFS=',' read -ra ARRAY <<< "$LINE"
DUT_NAME=${ARRAY[9]}
fi
}
function validate_parameters()
{
RET=0
if [[ -z ${DUT_NAME} ]]; then
echo "DUT name (-d) is not set.."
RET=1
fi
if [[ -z ${TESTBED_NAME} ]]; then
echo "Testbed name (-n) is not set.."
RET=2
fi
if [[ -z ${TOPOLOGY} && -z ${TEST_CASES} ]]; then
echo "Neither TOPOLOGY (-t) nor test case list (-c) is set.."
RET=3
fi
if [[ ${RET} != 0 ]]; then
show_help_and_exit ${RET}
fi
}
function setup_environment()
{
SCRIPT=$0
FULL_PATH=$(realpath ${SCRIPT})
SCRIPT_PATH=$(dirname ${FULL_PATH})
BASE_PATH=$(dirname ${SCRIPT_PATH})
LOG_PATH="logs"
AUTO_RECOVER="True"
BYPASS_UTIL="False"
CLI_LOG_LEVEL='warning'
EXTRA_PARAMETERS=""
FILE_LOG_LEVEL='debug'
INVENTORY="${BASE_PATH}/ansible/lab,${BASE_PATH}/ansible/veos"
KUBE_MASTER_ID="unset"
OMIT_FILE_LOG="False"
RETAIN_SUCCESS_LOG="False"
SKIP_SCRIPTS=""
SKIP_FOLDERS="ptftests acstests saitests scripts k8s"
TESTBED_FILE="${BASE_PATH}/ansible/testbed.csv"
TEST_CASES=""
TEST_INPUT_ORDER="False"
TEST_METHOD='group'
TEST_MAX_FAIL=0
export ANSIBLE_CONFIG=${BASE_PATH}/ansible
export ANSIBLE_LIBRARY=${BASE_PATH}/ansible/library/
export ANSIBLE_CONNECTION_PLUGINS=${BASE_PATH}/ansible/plugins/connection
}
function setup_test_options()
{
# If a test script is explicitly specified in pytest command line, then use `--ignore` to ignore it will not work
# Below logic is to ensure that SKIP_FOLDERS and SKIP_SCRIPTS take precedence over the specified TEST_CASES.
# If a test script is in both ${TEST_CASES} and ${SKIP_SCRIPTS}, the script will not be executed. This design is
# for the scenario of specifying test scripts using pattern like `subfolder/test_*.py`. The pattern will be
# expanded to matched test scripts by bash. Among the expanded scripts, we may want to skip a few. Then we can
# explicitly specify the script to be skipped.
ignores=$(python -c "print '|'.join('''$SKIP_FOLDERS'''.split())")
if [[ -z ${TEST_CASES} ]]; then
# When TEST_CASES is not specified, find all the possible scripts, ignore the scripts under $SKIP_FOLDERS
all_scripts=$(find ./ -name 'test_*.py' | sed s:^./:: | grep -vE "^(${ignores})")
else
# When TEST_CASES is specified, ignore the scripts under $SKIP_FOLDERS
all_scripts=""
for test_script in ${TEST_CASES}; do
all_scripts="${all_scripts} $(echo ${test_script} | sed s:^./:: | grep -vE "^(${ignores})")"
done
fi
# Ignore the scripts specified in $SKIP_SCRIPTS
if [[ x"${TEST_INPUT_ORDER}" == x"True" ]]; then
TEST_CASES=$(python -c "print '\n'.join([testcase for testcase in list('''$all_scripts'''.split()) if testcase not in set('''$SKIP_SCRIPTS'''.split())])")
else
TEST_CASES=$(python -c "print '\n'.join(set('''$all_scripts'''.split()) - set('''$SKIP_SCRIPTS'''.split()))" | sort)
fi
PYTEST_COMMON_OPTS="--inventory ${INVENTORY} \
--host-pattern ${DUT_NAME} \
--testbed ${TESTBED_NAME} \
--testbed_file ${TESTBED_FILE} \
--log-cli-level ${CLI_LOG_LEVEL} \
--log-file-level ${FILE_LOG_LEVEL} \
--kube_master ${KUBE_MASTER_ID} \
--showlocals \
--assert plain \
--show-capture no \
-rav"
if [[ x"${AUTO_RECOVER}" == x"True" ]]; then
PYTEST_COMMON_OPTS="${PYTEST_COMMON_OPTS} --allow_recover"
fi
for skip in ${SKIP_SCRIPTS} ${SKIP_FOLDERS}; do
PYTEST_COMMON_OPTS="${PYTEST_COMMON_OPTS} --ignore=${skip}"
done
if [[ -d ${LOG_PATH} ]]; then
rm -rf ${LOG_PATH}
fi
if [[ x"${OMIT_FILE_LOG}" == x"True" ]]; then
PRET_LOGGING_OPTIONS=""
POST_LOGGING_OPTIONS=""
TEST_LOGGING_OPTIONS=""
else
mkdir -p ${LOG_PATH}
PRET_LOGGING_OPTIONS="--junit-xml=${LOG_PATH}/pretest.xml --log-file=${LOG_PATH}/pretest.log"
POST_LOGGING_OPTIONS="--junit-xml=${LOG_PATH}/posttest.xml --log-file=${LOG_PATH}/posttest.log"
TEST_LOGGING_OPTIONS="--junit-xml=${LOG_PATH}/tr.xml --log-file=${LOG_PATH}/test.log"
fi
UTIL_TOPOLOGY_OPTIONS="--topology util"
if [[ -z ${TOPOLOGY} ]]; then
TEST_TOPOLOGY_OPTIONS=""
else
TEST_TOPOLOGY_OPTIONS="--topology ${TOPOLOGY}"
fi
PYTEST_UTIL_OPTS=${PYTEST_COMMON_OPTS}
# Max failure only applicable to the test session. Not the preparation and cleanup session.
if [[ ${TEST_MAX_FAIL} != 0 ]]; then
PYTEST_COMMON_OPTS="${PYTEST_COMMON_OPTS} --maxfail=${TEST_MAX_FAIL}"
fi
}
function run_debug_tests()
{
echo "=== Show test settings ==="
echo "SCRIPT: ${SCRIPT}"
echo "FULL_PATH: ${FULL_PATH}"
echo "SCRIPT_PATH: ${SCRIPT_PATH}"
echo "BASE_PATH: ${BASE_PATH}"
echo "ANSIBLE_CONFIG: ${ANSIBLE_CONFIG}"
echo "ANSIBLE_LIBRARY: ${ANSIBLE_LIBRARY}"
echo "AUTO_RECOVER: ${AUTO_RECOVER}"
echo "BYPASS_UTIL: ${BYPASS_UTIL}"
echo "CLI_LOG_LEVEL: ${CLI_LOG_LEVEL}"
echo "EXTRA_PARAMETERS: ${EXTRA_PARAMETERS}"
echo "FILE_LOG_LEVEL: ${FILE_LOG_LEVEL}"
echo "INVENTORY: ${INVENTORY}"
echo "LOG_PATH: ${LOG_PATH}"
echo "OMIT_FILE_LOG: ${OMIT_FILE_LOG}"
echo "RETAIN_SUCCESS_LOG: ${RETAIN_SUCCESS_LOG}"
echo "SKIP_SCRIPTS: ${SKIP_SCRIPTS}"
echo "SKIP_FOLDERS: ${SKIP_FOLDERS}"
echo "TEST_CASES: ${TEST_CASES}"
echo "TEST_INPUT_ORDER: ${TEST_INPUT_ORDER}"
echo "TEST_MAX_FAIL: ${TEST_MAX_FAIL}"
echo "TEST_METHOD: ${TEST_METHOD}"
echo "TESTBED_FILE: ${TESTBED_FILE}"
echo "TEST_LOGGING_OPTIONS: ${TEST_LOGGING_OPTIONS}"
echo "TEST_TOPOLOGY_OPTIONS: ${TEST_TOPOLOGY_OPTIONS}"
echo "PRET_LOGGING_OPTIONS: ${PRET_LOGGING_OPTIONS}"
echo "POST_LOGGING_OPTIONS: ${POST_LOGGING_OPTIONS}"
echo "UTIL_TOPOLOGY_OPTIONS: ${UTIL_TOPOLOGY_OPTIONS}"
echo "PYTEST_COMMON_OPTS: ${PYTEST_COMMON_OPTS}"
}
function prepare_dut()
{
echo "=== Preparing DUT for subsequent tests ==="
pytest ${PYTEST_UTIL_OPTS} ${PRET_LOGGING_OPTIONS} ${UTIL_TOPOLOGY_OPTIONS} ${EXTRA_PARAMETERS} -m pretest
# Give some delay for the newly announced routes to propagate.
sleep 120
}
function cleanup_dut()
{
echo "=== Cleaning up DUT after tests ==="
pytest ${PYTEST_UTIL_OPTS} ${POST_LOGGING_OPTIONS} ${UTIL_TOPOLOGY_OPTIONS} ${EXTRA_PARAMETERS} -m posttest
}
function run_group_tests()
{
echo "=== Running tests in groups ==="
pytest ${TEST_CASES} ${PYTEST_COMMON_OPTS} ${TEST_LOGGING_OPTIONS} ${TEST_TOPOLOGY_OPTIONS} ${EXTRA_PARAMETERS}
}
function run_individual_tests()
{
EXIT_CODE=0
echo "=== Running tests individually ==="
for test_script in ${TEST_CASES}; do
if [[ x"${OMIT_FILE_LOG}" != x"True" ]]; then
test_dir=$(dirname ${test_script})
script_name=$(basename ${test_script})
test_name=${script_name%.py}
if [[ ${test_dir} != "." ]]; then
mkdir -p ${LOG_PATH}/${test_dir}
fi
TEST_LOGGING_OPTIONS="--log-file ${LOG_PATH}/${test_dir}/${test_name}.log --junitxml=${LOG_PATH}/${test_dir}/${test_name}.xml"
fi
pytest ${test_script} ${PYTEST_COMMON_OPTS} ${TEST_LOGGING_OPTIONS} ${TEST_TOPOLOGY_OPTIONS} ${EXTRA_PARAMETERS}
ret_code=$?
# If test passed, no need to keep its log.
if [ ${ret_code} -eq 0 ]; then
if [[ x"${OMIT_FILE_LOG}" != x"True" && x"${RETAIN_SUCCESS_LOG}" == x"False" ]]; then
rm -f ${LOG_PATH}/${test_dir}/${test_name}.log
fi
else
EXIT_CODE=1
if [[ ${TEST_MAX_FAIL} != 0 ]]; then
return ${EXIT_CODE}
fi
fi
done
return ${EXIT_CODE}
}
setup_environment
while getopts "h?a:b:c:d:e:f:i:k:l:m:n:oOp:q:rs:t:ux" opt; do
case ${opt} in
h|\? )
show_help_and_exit 0
;;
a )
AUTO_RECOVER=${OPTARG}
;;
b )
KUBE_MASTER_ID=${OPTARG}
SKIP_FOLDERS=${SKIP_FOLDERS//k8s/}
;;
c )
TEST_CASES="${TEST_CASES} ${OPTARG}"
;;
d )
DUT_NAME=${OPTARG}
;;
e )
EXTRA_PARAMETERS="${EXTRA_PARAMETERS} ${OPTARG}"
;;
f )
TESTBED_FILE=${OPTARG}
;;
i )
INVENTORY=${OPTARG}
;;
k )
FILE_LOG_LEVEL=${OPTARG}
;;
l )
CLI_LOG_LEVEL=${OPTARG}
;;
m )
TEST_METHOD=${OPTARG}
;;
n )
TESTBED_NAME=${OPTARG}
;;
o )
OMIT_FILE_LOG="True"
;;
O )
TEST_INPUT_ORDER="True"
;;
p )
LOG_PATH=${OPTARG}
;;
q )
TEST_MAX_FAIL=${OPTARG}
;;
r )
RETAIN_SUCCESS_LOG="True"
;;
s )
SKIP_SCRIPTS="${SKIP_SCRIPTS} ${OPTARG}"
;;
t )
TOPOLOGY=${OPTARG}
;;
u )
BYPASS_UTIL="True"
;;
x )
set -x
;;
esac
done
get_dut_from_testbed_file
if [[ x"${TEST_METHOD}" != x"debug" ]]; then
validate_parameters
fi
setup_test_options
if [[ x"${TEST_METHOD}" != x"debug" && x"${BYPASS_UTIL}" == x"False" ]]; then
prepare_dut
fi
RC=0
run_${TEST_METHOD}_tests || RC=$?
if [[ x"${TEST_METHOD}" != x"debug" && x"${BYPASS_UTIL}" == x"False" ]]; then
cleanup_dut
fi
exit ${RC}
|
<gh_stars>1-10
var Boom = require('boom');
var Hapi = require('hapi');
module.exports = function() {
var server = new Hapi.Server({ debug: false });
server.connection({
host: 'localhost',
port: 3232
});
server.route([
{
method: 'POST',
path: '/api/v2/user/verify-password',
handler: function(request, reply) {
var payload = request.payload;
if ( payload.uid === 'webmaker' && payload.password === 'password' ) {
return reply({
user: {
username: 'webmaker',
id: '1',
email: '<EMAIL>'
}
})
.type('application/json');
}
if ( payload.uid === 'invalidResponse' ) {
return reply('not json');
}
reply(Boom.unauthorized('Invalid username/email or password'));
}
},
{
method: 'POST',
path: '/api/v2/user/request-reset-code',
handler: function(request, reply) {
var payload = request.payload;
if ( payload.uid === 'webmaker') {
return reply({
status: 'created'
})
.type('application/json');
}
if ( payload.uid === 'invalidResponse' ) {
return reply('not json');
}
reply(Boom.badImplementation('Login API failure'));
}
},
{
method: 'POST',
path: '/api/v2/user/reset-password',
handler: function(request, reply) {
var payload = request.payload;
if ( payload.uid === 'webmaker' ) {
if ( payload.resetCode !== 'resetCode' ) {
return reply(Boom.unauthorized('invalid code'));
}
return reply({
status: 'success'
})
.type('application/json');
}
if ( payload.uid === 'badRequest' ) {
return reply(Boom.badRequest('bad request'));
}
if ( payload.uid === 'invalidResponse' ) {
return reply('not json');
}
reply(Boom.badImplementation('Login API failure'));
}
},
{
method: 'POST',
path: '/api/v2/user/create',
handler: function(request, reply) {
var payload = request.payload;
if ( payload.user.username === 'webmaker') {
return reply({
user: {
username: 'webmaker',
email: '<EMAIL>',
prefLocale: payload.user.prefLocale || 'en-US'
}
})
.type('application/json');
}
if ( payload.user.username === 'invalidResponse' ) {
return reply('not json');
}
if ( payload.user.username === 'jsonError' ) {
return reply({
error: 'LoginAPI error'
}).code(200);
}
if ( payload.user.username === 'weakpass' ) {
return reply()
.code(400);
}
reply(Boom.badImplementation('login API failure'));
}
},
{
method: 'GET',
path: '/user/id/{id}',
handler: function(request, reply) {
var id = request.params.id;
if ( id === '1') {
return reply({
user: {
username: 'test',
id: '1',
email: '<EMAIL>'
}
})
.type('application/json');
}
if ( id === 'jsonError' ) {
return reply({
error: 'Login API error'
});
}
reply(Boom.badImplementation('login API failure'));
}
},
{
method: 'post',
path: '/api/v2/user/request',
handler: function(request, reply) {
var username = request.payload.uid;
if ( username === 'test' ) {
return reply({
status: 'Login Token Sent'
});
}
reply(Boom.badImplementation('Login Database error'));
}
},
{
method: 'post',
path: '/api/v2/user/authenticateToken',
handler: function(request, reply) {
var username = request.payload.uid;
var token = request.payload.token;
if ( username === 'test' ) {
if ( token === '<PASSWORD>' ) {
return reply(true);
}
}
reply(Boom.unauthorized('invalid username/password combination'));
}
},
{
method: 'post',
path: '/api/v2/user/enable-passwords',
handler: function(request, reply) {
var username = request.payload.uid;
var password = request.payload.password;
if ( username === 'test' ) {
if ( password === '<PASSWORD>' ) {
// success
return reply({
user: {
username: 'test'
}
});
}
}
reply(Boom.badImplementation('Error setting password'));
}
},
{
method: 'post',
path: '/api/v2/user/exists',
handler: function(request, reply) {
if ( request.payload.uid === 'test' ) {
return reply({
exists: true,
usePasswordLogin: true
});
}
reply(Boom.notFound('user does not exist'));
}
}
]);
return server;
};
|
<filename>src/Components/DashboardCompanyProfileCompletion.js
import React from "react";
import { Link } from "@reach/router";
import { X } from "react-feather";
export default function DashboardCompanyProfileCompletion({
profilePercentage,
}) {
return (
<div className="dashboard__company__container__top">
<div className="dashboard__company__container__top__close">
<X size={20} color="currentColor" />
</div>
<div className="dashboard__company__container__top__percentage">
{profilePercentage}%
</div>
<div className="dashboard__company__container__top__content">
<div className="dashboard__company__container__top__content__heading">
{profilePercentage == 100
? "Thanks for completing your profile."
: "Your profile is not complete click below to complete."}
</div>
<Link
to="/home-company/profile"
className="header__nav__btn btn__primary"
style={{
height: 35,
fontSize: 13,
background: "#71797E",
boxShadow: "none",
}}
>
Personal
</Link>
</div>
</div>
);
}
|
package config
import (
"bytes"
"errors"
"net"
"os"
"strings"
"testing"
"time"
"encoding/json"
"github.com/eugene-fedorenko/prebid-server/openrtb_ext"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
)
func TestExternalCacheURLValidate(t *testing.T) {
testCases := []struct {
desc string
data ExternalCache
expErrors int
}{
{
desc: "With http://",
data: ExternalCache{Host: "http://www.google.com", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "Without http://",
data: ExternalCache{Host: "www.google.com", Path: "/path/v1"},
expErrors: 0,
},
{
desc: "No scheme but '//' prefix",
data: ExternalCache{Host: "//www.google.com", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "// appears twice",
data: ExternalCache{Host: "//www.google.com//", Path: "path/v1"},
expErrors: 1,
},
{
desc: "Host has an only // value",
data: ExternalCache{Host: "//", Path: "path/v1"},
expErrors: 1,
},
{
desc: "only scheme host, valid path",
data: ExternalCache{Host: "http://", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "No host, path only",
data: ExternalCache{Host: "", Path: "path/v1"},
expErrors: 1,
},
{
desc: "No host, nor path",
data: ExternalCache{Host: "", Path: ""},
expErrors: 0,
},
{
desc: "Invalid http at the end",
data: ExternalCache{Host: "www.google.com", Path: "http://"},
expErrors: 1,
},
{
desc: "Host has an unknown scheme",
data: ExternalCache{Host: "unknownscheme://host", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "Wrong colon side in scheme",
data: ExternalCache{Host: "http//:www.appnexus.com", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "Missing '/' in scheme",
data: ExternalCache{Host: "http:/www.appnexus.com", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "host with scheme, no path",
data: ExternalCache{Host: "http://www.appnexus.com", Path: ""},
expErrors: 1,
},
{
desc: "scheme, no host nor path",
data: ExternalCache{Host: "http://", Path: ""},
expErrors: 1,
},
{
desc: "Scheme Invalid",
data: ExternalCache{Scheme: "invalid", Host: "www.google.com", Path: "/path/v1"},
expErrors: 1,
},
{
desc: "Scheme HTTP",
data: ExternalCache{Scheme: "http", Host: "www.google.com", Path: "/path/v1"},
expErrors: 0,
},
{
desc: "Scheme HTTPS",
data: ExternalCache{Scheme: "https", Host: "www.google.com", Path: "/path/v1"},
expErrors: 0,
},
}
for _, test := range testCases {
errs := test.data.validate([]error{})
assert.Equal(t, test.expErrors, len(errs), "Test case threw unexpected number of errors. Desc: %s errMsg = %v \n", test.desc, errs)
}
}
func TestDefaults(t *testing.T) {
v := viper.New()
SetupViper(v, "")
cfg, err := New(v)
assert.NoError(t, err, "Setting up config should work but it doesn't")
cmpInts(t, "port", cfg.Port, 8000)
cmpInts(t, "admin_port", cfg.AdminPort, 6060)
cmpInts(t, "auction_timeouts_ms.max", int(cfg.AuctionTimeouts.Max), 0)
cmpInts(t, "max_request_size", int(cfg.MaxRequestSize), 1024*256)
cmpInts(t, "host_cookie.ttl_days", int(cfg.HostCookie.TTL), 90)
cmpInts(t, "host_cookie.max_cookie_size_bytes", cfg.HostCookie.MaxCookieSizeBytes, 0)
cmpStrings(t, "datacache.type", cfg.DataCache.Type, "dummy")
cmpStrings(t, "adapters.pubmatic.endpoint", cfg.Adapters[string(openrtb_ext.BidderPubmatic)].Endpoint, "https://hbopenbid.pubmatic.com/translator?source=prebid-server")
cmpInts(t, "currency_converter.fetch_interval_seconds", cfg.CurrencyConverter.FetchIntervalSeconds, 1800)
cmpStrings(t, "currency_converter.fetch_url", cfg.CurrencyConverter.FetchURL, "https://cdn.jsdelivr.net/gh/prebid/currency-file@1/latest.json")
cmpBools(t, "account_required", cfg.AccountRequired, false)
cmpInts(t, "metrics.influxdb.collection_rate_seconds", cfg.Metrics.Influxdb.MetricSendInterval, 20)
cmpBools(t, "account_adapter_details", cfg.Metrics.Disabled.AccountAdapterDetails, false)
cmpBools(t, "adapter_connections_metrics", cfg.Metrics.Disabled.AdapterConnectionMetrics, true)
cmpStrings(t, "certificates_file", cfg.PemCertsFile, "")
cmpBools(t, "stored_requests.filesystem.enabled", false, cfg.StoredRequests.Files.Enabled)
cmpStrings(t, "stored_requests.filesystem.directorypath", "./stored_requests/data/by_id", cfg.StoredRequests.Files.Path)
cmpBools(t, "auto_gen_source_tid", cfg.AutoGenSourceTID, true)
}
var fullConfig = []byte(`
gdpr:
host_vendor_id: 15
usersync_if_ambiguous: true
non_standard_publishers: ["siteID","fake-site-id","appID","agltb3B1Yi1pbmNyDAsSA0FwcBiJkfIUDA"]
ccpa:
enforce: true
lmt:
enforce: true
host_cookie:
cookie_name: userid
family: prebid
domain: cookies.prebid.org
opt_out_url: http://prebid.org/optout
opt_in_url: http://prebid.org/optin
max_cookie_size_bytes: 32768
external_url: http://prebid-server.prebid.org/
host: prebid-server.prebid.org
port: 1234
admin_port: 5678
auction_timeouts_ms:
max: 123
default: 50
cache:
scheme: http
host: prebidcache.net
query: uuid=%PBS_CACHE_UUID%
external_cache:
scheme: https
host: www.externalprebidcache.net
path: /endpoints/cache
http_client:
max_connections_per_host: 10
max_idle_connections: 500
max_idle_connections_per_host: 20
idle_connection_timeout_seconds: 30
http_client_cache:
max_connections_per_host: 5
max_idle_connections: 1
max_idle_connections_per_host: 2
idle_connection_timeout_seconds: 3
currency_converter:
fetch_url: https://currency.prebid.org
fetch_interval_seconds: 1800
recaptcha_secret: <PASSWORD>
metrics:
influxdb:
host: upstream:8232
database: metricsdb
username: admin
password: <PASSWORD>
metric_send_interval: 30
disabled_metrics:
account_adapter_details: true
adapter_connections_metrics: true
datacache:
type: postgres
filename: /usr/db/db.db
cache_size: 10000000
ttl_seconds: 3600
adapters:
appnexus:
endpoint: http://ib.adnxs.com/some/endpoint
extra_info: "{\"native\":\"http://www.native.org/endpoint\",\"video\":\"http://www.video.org/endpoint\"}"
audienceNetwork:
endpoint: http://facebook.com/pbs
usersync_url: http://facebook.com/ortb/prebid-s2s
platform_id: abcdefgh1234
app_secret: 987abc
ix:
endpoint: http://ixtest.com/api
rubicon:
endpoint: http://rubitest.com/api
usersync_url: http://pixel.rubiconproject.com/sync.php?p=prebid
xapi:
username: rubiuser
password: <PASSWORD>
brightroll:
usersync_url: http://test-bh.ybp.yahoo.com/sync/appnexuspbs?gdpr={{.GDPR}}&euconsent={{.GDPRConsent}}&us_privacy={{.USPrivacy}}&url=%s
endpoint: http://test-bid.ybp.yahoo.com/bid/appnexuspbs
adkerneladn:
usersync_url: https://tag.adkernel.com/syncr?gdpr={{.GDPR}}&gdpr_consent={{.GDPRConsent}}&r=
blacklisted_apps: ["spamAppID","sketchy-app-id"]
account_required: true
auto_gen_source_tid: false
certificates_file: /etc/ssl/cert.pem
request_validation:
ipv4_private_networks: ["1.1.1.0/24"]
ipv6_private_networks: ["1111::/16", "2222::/16"]
`)
var adapterExtraInfoConfig = []byte(`
adapters:
appnexus:
endpoint: http://ib.adnxs.com/some/endpoint
usersync_url: http://adnxs.com/sync.php?p=prebid
platform_id: appNexus
xapi:
username: appuser
password: <PASSWORD>
tracker: anxsTrack
disabled: true
extra_info: "{\"native\":\"http://www.native.org/endpoint\",\"video\":\"http://www.video.org/endpoint\"}"
audienceNetwork:
endpoint: http://facebook.com/pbs
usersync_url: http://facebook.com/ortb/prebid-s2s
platform_id: abcdefgh1234
ix:
endpoint: http://ixtest.com/api
rubicon:
endpoint: http://rubitest.com/api
usersync_url: http://pixel.rubiconproject.com/sync.php?p=prebid
xapi:
username: rubiuser
password: <PASSWORD>
brightroll:
usersync_url: http://test-bh.ybp.yahoo.com/sync/appnexuspbs?gdpr={{.GDPR}}&euconsent={{.GDPRConsent}}&url=%s
endpoint: http://test-bid.ybp.yahoo.com/bid/appnexuspbs
adkerneladn:
usersync_url: https://tag.adkernel.com/syncr?gdpr={{.GDPR}}&gdpr_consent={{.GDPRConsent}}&r=
blacklisted_apps: ["spamAppID","sketchy-app-id"]
`)
var invalidAdapterEndpointConfig = []byte(`
adapters:
appnexus:
endpoint: ib.adnxs.com/some/endpoint
audienceNetwork:
endpoint: http://facebook.com/pbs
usersync_url: http://facebook.com/ortb/prebid-s2s
platform_id: abcdefgh1234
brightroll:
usersync_url: http://http://test-bh.ybp.yahoo.com/sync/appnexuspbs?gdpr={{.GDPR}}&euconsent={{.GDPRConsent}}&url=%s
adkerneladn:
usersync_url: https://tag.adkernel.com/syncr?gdpr={{.GDPR}}&gdpr_consent={{.GDPRConsent}}&r=
`)
var invalidUserSyncURLConfig = []byte(`
adapters:
appnexus:
endpoint: http://ib.adnxs.com/some/endpoint
audienceNetwork:
endpoint: http://facebook.com/pbs
usersync_url: http://facebook.com/ortb/prebid-s2s
platform_id: abcdefgh1234
brightroll:
usersync_url: http//test-bh.ybp.yahoo.com/sync/appnexuspbs?gdpr={{.GDPR}}&euconsent={{.GDPRConsent}}&url=%s
adkerneladn:
usersync_url: http:\\tag.adkernel.com/syncr?gdpr={{.GDPR}}&gdpr_consent={{.GDPRConsent}}&r=
`)
var oldStoredRequestsConfig = []byte(`
stored_requests:
filesystem: true
directorypath: "/somepath"
`)
func cmpStrings(t *testing.T, key string, a string, b string) {
t.Helper()
assert.Equal(t, a, b, "%s: %s != %s", key, a, b)
}
func cmpInts(t *testing.T, key string, a int, b int) {
t.Helper()
assert.Equal(t, a, b, "%s: %d != %d", key, a, b)
}
func cmpBools(t *testing.T, key string, a bool, b bool) {
t.Helper()
assert.Equal(t, a, b, "%s: %t != %t", key, a, b)
}
func TestFullConfig(t *testing.T) {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer(fullConfig))
cfg, err := New(v)
assert.NoError(t, err, "Setting up config should work but it doesn't")
cmpStrings(t, "cookie domain", cfg.HostCookie.Domain, "cookies.prebid.org")
cmpStrings(t, "cookie name", cfg.HostCookie.CookieName, "userid")
cmpStrings(t, "cookie family", cfg.HostCookie.Family, "prebid")
cmpStrings(t, "opt out", cfg.HostCookie.OptOutURL, "http://prebid.org/optout")
cmpStrings(t, "opt in", cfg.HostCookie.OptInURL, "http://prebid.org/optin")
cmpStrings(t, "external url", cfg.ExternalURL, "http://prebid-server.prebid.org/")
cmpStrings(t, "host", cfg.Host, "prebid-server.prebid.org")
cmpInts(t, "port", cfg.Port, 1234)
cmpInts(t, "admin_port", cfg.AdminPort, 5678)
cmpInts(t, "auction_timeouts_ms.default", int(cfg.AuctionTimeouts.Default), 50)
cmpInts(t, "auction_timeouts_ms.max", int(cfg.AuctionTimeouts.Max), 123)
cmpStrings(t, "cache.scheme", cfg.CacheURL.Scheme, "http")
cmpStrings(t, "cache.host", cfg.CacheURL.Host, "prebidcache.net")
cmpStrings(t, "cache.query", cfg.CacheURL.Query, "uuid=%PBS_CACHE_UUID%")
cmpStrings(t, "external_cache.scheme", cfg.ExtCacheURL.Scheme, "https")
cmpStrings(t, "external_cache.host", cfg.ExtCacheURL.Host, "www.externalprebidcache.net")
cmpStrings(t, "external_cache.path", cfg.ExtCacheURL.Path, "/endpoints/cache")
cmpInts(t, "http_client.max_connections_per_host", cfg.Client.MaxConnsPerHost, 10)
cmpInts(t, "http_client.max_idle_connections", cfg.Client.MaxIdleConns, 500)
cmpInts(t, "http_client.max_idle_connections_per_host", cfg.Client.MaxIdleConnsPerHost, 20)
cmpInts(t, "http_client.idle_connection_timeout_seconds", cfg.Client.IdleConnTimeout, 30)
cmpInts(t, "http_client_cache.max_connections_per_host", cfg.CacheClient.MaxConnsPerHost, 5)
cmpInts(t, "http_client_cache.max_idle_connections", cfg.CacheClient.MaxIdleConns, 1)
cmpInts(t, "http_client_cache.max_idle_connections_per_host", cfg.CacheClient.MaxIdleConnsPerHost, 2)
cmpInts(t, "http_client_cache.idle_connection_timeout_seconds", cfg.CacheClient.IdleConnTimeout, 3)
cmpInts(t, "gdpr.host_vendor_id", cfg.GDPR.HostVendorID, 15)
cmpBools(t, "gdpr.usersync_if_ambiguous", cfg.GDPR.UsersyncIfAmbiguous, true)
//Assert the NonStandardPublishers was correctly unmarshalled
cmpStrings(t, "gdpr.non_standard_publishers", cfg.GDPR.NonStandardPublishers[0], "siteID")
cmpStrings(t, "gdpr.non_standard_publishers", cfg.GDPR.NonStandardPublishers[1], "fake-site-id")
cmpStrings(t, "gdpr.non_standard_publishers", cfg.GDPR.NonStandardPublishers[2], "appID")
cmpStrings(t, "gdpr.non_standard_publishers", cfg.GDPR.NonStandardPublishers[3], "agltb3B1Yi1pbmNyDAsSA0FwcBiJkfIUDA")
//Assert the NonStandardPublisherMap hash table was built correctly
var found bool
for i := 0; i < len(cfg.GDPR.NonStandardPublishers); i++ {
_, found = cfg.GDPR.NonStandardPublisherMap[cfg.GDPR.NonStandardPublishers[i]]
cmpBools(t, "cfg.GDPR.NonStandardPublisherMap", found, true)
}
_, found = cfg.GDPR.NonStandardPublisherMap["appnexus"]
cmpBools(t, "cfg.GDPR.NonStandardPublisherMap", found, false)
cmpBools(t, "ccpa.enforce", cfg.CCPA.Enforce, true)
cmpBools(t, "lmt.enforce", cfg.LMT.Enforce, true)
//Assert the NonStandardPublishers was correctly unmarshalled
cmpStrings(t, "blacklisted_apps", cfg.BlacklistedApps[0], "spamAppID")
cmpStrings(t, "blacklisted_apps", cfg.BlacklistedApps[1], "sketchy-app-id")
//Assert the BlacklistedAppMap hash table was built correctly
for i := 0; i < len(cfg.BlacklistedApps); i++ {
cmpBools(t, "cfg.BlacklistedAppMap", cfg.BlacklistedAppMap[cfg.BlacklistedApps[i]], true)
}
cmpStrings(t, "currency_converter.fetch_url", cfg.CurrencyConverter.FetchURL, "https://currency.prebid.org")
cmpInts(t, "currency_converter.fetch_interval_seconds", cfg.CurrencyConverter.FetchIntervalSeconds, 1800)
cmpStrings(t, "recaptcha_secret", cfg.RecaptchaSecret, "asdfasdfasdfasdf")
cmpStrings(t, "metrics.influxdb.host", cfg.Metrics.Influxdb.Host, "upstream:8232")
cmpStrings(t, "metrics.influxdb.database", cfg.Metrics.Influxdb.Database, "metricsdb")
cmpStrings(t, "metrics.influxdb.username", cfg.Metrics.Influxdb.Username, "admin")
cmpStrings(t, "metrics.influxdb.password", cfg.Metrics.Influxdb.Password, "<PASSWORD>")
cmpInts(t, "metrics.influxdb.metric_send_interval", cfg.Metrics.Influxdb.MetricSendInterval, 30)
cmpStrings(t, "datacache.type", cfg.DataCache.Type, "postgres")
cmpStrings(t, "datacache.filename", cfg.DataCache.Filename, "/usr/db/db.db")
cmpInts(t, "datacache.cache_size", cfg.DataCache.CacheSize, 10000000)
cmpInts(t, "datacache.ttl_seconds", cfg.DataCache.TTLSeconds, 3600)
cmpStrings(t, "", cfg.CacheURL.GetBaseURL(), "http://prebidcache.net")
cmpStrings(t, "", cfg.GetCachedAssetURL("a0eebc99-9c0b-4ef8-bb00-6bb9bd380a11"), "http://prebidcache.net/cache?uuid=a0eebc99-9c0b-4ef8-bb00-6bb9bd380a11")
cmpStrings(t, "adapters.appnexus.endpoint", cfg.Adapters[string(openrtb_ext.BidderAppnexus)].Endpoint, "http://ib.adnxs.com/some/endpoint")
cmpStrings(t, "adapters.appnexus.extra_info", cfg.Adapters[string(openrtb_ext.BidderAppnexus)].ExtraAdapterInfo, "{\"native\":\"http://www.native.org/endpoint\",\"video\":\"http://www.video.org/endpoint\"}")
cmpStrings(t, "adapters.audiencenetwork.endpoint", cfg.Adapters[strings.ToLower(string(openrtb_ext.BidderAudienceNetwork))].Endpoint, "http://facebook.com/pbs")
cmpStrings(t, "adapters.audiencenetwork.usersync_url", cfg.Adapters[strings.ToLower(string(openrtb_ext.BidderAudienceNetwork))].UserSyncURL, "http://facebook.com/ortb/prebid-s2s")
cmpStrings(t, "adapters.audiencenetwork.platform_id", cfg.Adapters[strings.ToLower(string(openrtb_ext.BidderAudienceNetwork))].PlatformID, "abcdefgh1234")
cmpStrings(t, "adapters.audiencenetwork.app_secret", cfg.Adapters[strings.ToLower(string(openrtb_ext.BidderAudienceNetwork))].AppSecret, "987abc")
cmpStrings(t, "adapters.beachfront.endpoint", cfg.Adapters[string(openrtb_ext.BidderBeachfront)].Endpoint, "https://display.bfmio.com/prebid_display")
cmpStrings(t, "adapters.beachfront.extra_info", cfg.Adapters[string(openrtb_ext.BidderBeachfront)].ExtraAdapterInfo, "{\"video_endpoint\":\"https://reachms.bfmio.com/bid.json?exchange_id\"}")
cmpStrings(t, "adapters.ix.endpoint", cfg.Adapters[strings.ToLower(string(openrtb_ext.BidderIx))].Endpoint, "http://ixtest.com/api")
cmpStrings(t, "adapters.rubicon.endpoint", cfg.Adapters[string(openrtb_ext.BidderRubicon)].Endpoint, "http://rubitest.com/api")
cmpStrings(t, "adapters.rubicon.usersync_url", cfg.Adapters[string(openrtb_ext.BidderRubicon)].UserSyncURL, "http://pixel.rubiconproject.com/sync.php?p=prebid")
cmpStrings(t, "adapters.rubicon.xapi.username", cfg.Adapters[string(openrtb_ext.BidderRubicon)].XAPI.Username, "rubiuser")
cmpStrings(t, "adapters.rubicon.xapi.password", cfg.Adapters[string(openrtb_ext.BidderRubicon)].XAPI.Password, "<PASSWORD>")
cmpStrings(t, "adapters.brightroll.endpoint", cfg.Adapters[string(openrtb_ext.BidderBrightroll)].Endpoint, "http://test-bid.ybp.yahoo.com/bid/appnexuspbs")
cmpStrings(t, "adapters.brightroll.usersync_url", cfg.Adapters[string(openrtb_ext.BidderBrightroll)].UserSyncURL, "http://test-bh.ybp.yahoo.com/sync/appnexuspbs?gdpr={{.GDPR}}&euconsent={{.GDPRConsent}}&us_privacy={{.USPrivacy}}&url=%s")
cmpStrings(t, "adapters.adkerneladn.usersync_url", cfg.Adapters[strings.ToLower(string(openrtb_ext.BidderAdkernelAdn))].UserSyncURL, "https://tag.adkernel.com/syncr?gdpr={{.GDPR}}&gdpr_consent={{.GDPRConsent}}&r=")
cmpStrings(t, "adapters.rhythmone.endpoint", cfg.Adapters[string(openrtb_ext.BidderRhythmone)].Endpoint, "http://tag.1rx.io/rmp")
cmpStrings(t, "adapters.rhythmone.usersync_url", cfg.Adapters[string(openrtb_ext.BidderRhythmone)].UserSyncURL, "https://sync.1rx.io/usersync2/rmphb?gdpr={{.GDPR}}&gdpr_consent={{.GDPRConsent}}&us_privacy={{.USPrivacy}}&redir=http%3A%2F%2Fprebid-server.prebid.org%2F%2Fsetuid%3Fbidder%3Drhythmone%26gdpr%3D{{.GDPR}}%26gdpr_consent%3D{{.GDPRConsent}}%26uid%3D%5BRX_UUID%5D")
cmpBools(t, "account_required", cfg.AccountRequired, true)
cmpBools(t, "auto_gen_source_tid", cfg.AutoGenSourceTID, false)
cmpBools(t, "account_adapter_details", cfg.Metrics.Disabled.AccountAdapterDetails, true)
cmpBools(t, "adapter_connections_metrics", cfg.Metrics.Disabled.AdapterConnectionMetrics, true)
cmpStrings(t, "certificates_file", cfg.PemCertsFile, "/etc/ssl/cert.pem")
cmpStrings(t, "request_validation.ipv4_private_networks", cfg.RequestValidation.IPv4PrivateNetworks[0], "1.1.1.0/24")
cmpStrings(t, "request_validation.ipv6_private_networks", cfg.RequestValidation.IPv6PrivateNetworks[0], "1111::/16")
cmpStrings(t, "request_validation.ipv6_private_networks", cfg.RequestValidation.IPv6PrivateNetworks[1], "2222::/16")
}
func TestUnmarshalAdapterExtraInfo(t *testing.T) {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer(adapterExtraInfoConfig))
cfg, err := New(v)
// Assert correctly unmarshaled
assert.NoError(t, err, "invalid endpoint in config should return an error")
// Unescape quotes of JSON-formatted string
strings.Replace(cfg.Adapters[string(openrtb_ext.BidderAppnexus)].ExtraAdapterInfo, "\\\"", "\"", -1)
// Assert JSON-formatted string
assert.JSONEqf(t, `{"native":"http://www.native.org/endpoint","video":"http://www.video.org/endpoint"}`, cfg.Adapters[string(openrtb_ext.BidderAppnexus)].ExtraAdapterInfo, "Unexpected value of the ExtraAdapterInfo String \n")
// Data type where we'll unmarshal endpoint values and adapter custom extra information
type AppNexusAdapterEndpoints struct {
NativeEndpoint string `json:"native,omitempty"`
VideoEndpoint string `json:"video,omitempty"`
}
var AppNexusAdapterExtraInfo AppNexusAdapterEndpoints
err = json.Unmarshal([]byte(cfg.Adapters[string(openrtb_ext.BidderAppnexus)].ExtraAdapterInfo), &AppNexusAdapterExtraInfo)
// Assert correctly unmarshaled
assert.NoErrorf(t, err, "Error. Could not unmarshal cfg.Adapters[string(openrtb_ext.BidderAppnexus)].ExtraAdapterInfo. Value: %s. Error: %v \n", cfg.Adapters[string(openrtb_ext.BidderAppnexus)].ExtraAdapterInfo, err)
// Assert endpoint values
assert.Equal(t, "http://www.native.org/endpoint", AppNexusAdapterExtraInfo.NativeEndpoint)
assert.Equal(t, "http://www.video.org/endpoint", AppNexusAdapterExtraInfo.VideoEndpoint)
}
func TestValidConfig(t *testing.T) {
cfg := Configuration{
StoredRequests: StoredRequests{
Files: FileFetcherConfig{Enabled: true},
InMemoryCache: InMemoryCache{
Type: "none",
},
},
StoredVideo: StoredRequests{
Files: FileFetcherConfig{Enabled: true},
InMemoryCache: InMemoryCache{
Type: "none",
},
},
CategoryMapping: StoredRequests{
Files: FileFetcherConfig{Enabled: true},
},
Accounts: StoredRequests{
Files: FileFetcherConfig{Enabled: true},
InMemoryCache: InMemoryCache{Type: "none"},
},
}
resolvedStoredRequestsConfig(&cfg)
err := cfg.validate()
assert.Nil(t, err, "OpenRTB filesystem config should work. %v", err)
}
func TestMigrateConfig(t *testing.T) {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer(oldStoredRequestsConfig))
migrateConfig(v)
cfg, err := New(v)
assert.NoError(t, err, "Setting up config should work but it doesn't")
cmpBools(t, "stored_requests.filesystem.enabled", true, cfg.StoredRequests.Files.Enabled)
cmpStrings(t, "stored_requests.filesystem.path", "/somepath", cfg.StoredRequests.Files.Path)
}
func TestMigrateConfigFromEnv(t *testing.T) {
if oldval, ok := os.LookupEnv("PBS_STORED_REQUESTS_FILESYSTEM"); ok {
defer os.Setenv("PBS_STORED_REQUESTS_FILESYSTEM", oldval)
} else {
defer os.Unsetenv("PBS_STORED_REQUESTS_FILESYSTEM")
}
os.Setenv("PBS_STORED_REQUESTS_FILESYSTEM", "true")
v := viper.New()
SetupViper(v, "")
cfg, err := New(v)
assert.NoError(t, err, "Setting up config should work but it doesn't")
cmpBools(t, "stored_requests.filesystem.enabled", true, cfg.StoredRequests.Files.Enabled)
}
func TestInvalidAdapterEndpointConfig(t *testing.T) {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer(invalidAdapterEndpointConfig))
_, err := New(v)
assert.Error(t, err, "invalid endpoint in config should return an error")
}
func TestInvalidAdapterUserSyncURLConfig(t *testing.T) {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer(invalidUserSyncURLConfig))
_, err := New(v)
assert.Error(t, err, "invalid user_sync URL in config should return an error")
}
func TestNegativeRequestSize(t *testing.T) {
cfg := newDefaultConfig(t)
cfg.MaxRequestSize = -1
assertOneError(t, cfg.validate(), "cfg.max_request_size must be >= 0. Got -1")
}
func TestNegativePrometheusTimeout(t *testing.T) {
cfg := newDefaultConfig(t)
cfg.Metrics.Prometheus.Port = 8001
cfg.Metrics.Prometheus.TimeoutMillisRaw = 0
assertOneError(t, cfg.validate(), "metrics.prometheus.timeout_ms must be positive if metrics.prometheus.port is defined. Got timeout=0 and port=8001")
}
func TestInvalidHostVendorID(t *testing.T) {
tests := []struct {
description string
vendorID int
wantErrorMsg string
}{
{
description: "Negative GDPR.HostVendorID",
vendorID: -1,
wantErrorMsg: "gdpr.host_vendor_id must be in the range [0, 65535]. Got -1",
},
{
description: "Overflowed GDPR.HostVendorID",
vendorID: (0xffff) + 1,
wantErrorMsg: "gdpr.host_vendor_id must be in the range [0, 65535]. Got 65536",
},
}
for _, tt := range tests {
cfg := newDefaultConfig(t)
cfg.GDPR.HostVendorID = tt.vendorID
errs := cfg.validate()
assert.Equal(t, 1, len(errs), tt.description)
assert.EqualError(t, errs[0], tt.wantErrorMsg, tt.description)
}
}
func TestInvalidFetchGVL(t *testing.T) {
cfg := newDefaultConfig(t)
cfg.GDPR.TCF1.FetchGVL = true
assertOneError(t, cfg.validate(), "gdpr.tcf1.fetch_gvl has been discontinued and must be removed from your config. TCF1 will always use the fallback GVL going forward")
}
func TestInvalidAMPException(t *testing.T) {
cfg := newDefaultConfig(t)
cfg.GDPR.AMPException = true
assertOneError(t, cfg.validate(), "gdpr.amp_exception has been discontinued and must be removed from your config. If you need to disable GDPR for AMP, you may do so per-account (gdpr.integration_enabled.amp) or at the host level for the default account (account_defaults.gdpr.integration_enabled.amp)")
}
func TestNegativeCurrencyConverterFetchInterval(t *testing.T) {
cfg := Configuration{
CurrencyConverter: CurrencyConverter{
FetchIntervalSeconds: -1,
},
}
err := cfg.validate()
assert.NotNil(t, err, "cfg.currency_converter.fetch_interval_seconds should prevent negative values, but it doesn't")
}
func TestOverflowedCurrencyConverterFetchInterval(t *testing.T) {
cfg := Configuration{
CurrencyConverter: CurrencyConverter{
FetchIntervalSeconds: (0xffff) + 1,
},
}
err := cfg.validate()
assert.NotNil(t, err, "cfg.currency_converter.fetch_interval_seconds prevent values over %d, but it doesn't", 0xffff)
}
func TestLimitTimeout(t *testing.T) {
doTimeoutTest(t, 10, 15, 10, 0)
doTimeoutTest(t, 10, 0, 10, 0)
doTimeoutTest(t, 5, 5, 10, 0)
doTimeoutTest(t, 15, 15, 0, 0)
doTimeoutTest(t, 15, 0, 20, 15)
}
func TestCookieSizeError(t *testing.T) {
testCases := []struct {
description string
cookieSize int
expectError bool
}{
{"MIN_COOKIE_SIZE_BYTES + 1", MIN_COOKIE_SIZE_BYTES + 1, false},
{"MIN_COOKIE_SIZE_BYTES", MIN_COOKIE_SIZE_BYTES, false},
{"MIN_COOKIE_SIZE_BYTES - 1", MIN_COOKIE_SIZE_BYTES - 1, true},
{"Zero", 0, false},
{"Negative", -100, true},
}
for _, test := range testCases {
resultErr := isValidCookieSize(test.cookieSize)
if test.expectError {
assert.Error(t, resultErr, test.description)
} else {
assert.NoError(t, resultErr, test.description)
}
}
}
func TestNewCallsRequestValidation(t *testing.T) {
testCases := []struct {
description string
privateIPNetworks string
expectedError string
expectedIPs []net.IPNet
}{
{
description: "Valid",
privateIPNetworks: `["1.1.1.0/24"]`,
expectedIPs: []net.IPNet{{IP: net.IP{1, 1, 1, 0}, Mask: net.CIDRMask(24, 32)}},
},
{
description: "Invalid",
privateIPNetworks: `["1"]`,
expectedError: "Invalid private IPv4 networks: '1'",
},
}
for _, test := range testCases {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer([]byte(
`request_validation:
ipv4_private_networks: ` + test.privateIPNetworks)))
result, resultErr := New(v)
if test.expectedError == "" {
assert.NoError(t, resultErr, test.description+":err")
assert.ElementsMatch(t, test.expectedIPs, result.RequestValidation.IPv4PrivateNetworksParsed, test.description+":parsed")
} else {
assert.Error(t, resultErr, test.description+":err")
}
}
}
func TestValidateDebug(t *testing.T) {
cfg := newDefaultConfig(t)
cfg.Debug.TimeoutNotification.SamplingRate = 1.1
err := cfg.validate()
assert.NotNil(t, err, "cfg.debug.timeout_notification.sampling_rate should not be allowed to be greater than 1.0, but it was allowed")
}
func TestValidateAccountsConfigRestrictions(t *testing.T) {
cfg := newDefaultConfig(t)
cfg.Accounts.Files.Enabled = true
cfg.Accounts.HTTP.Endpoint = "http://localhost"
cfg.Accounts.Postgres.ConnectionInfo.Database = "accounts"
errs := cfg.validate()
assert.Len(t, errs, 1)
assert.Contains(t, errs, errors.New("accounts.postgres: retrieving accounts via postgres not available, use accounts.files"))
}
func newDefaultConfig(t *testing.T) *Configuration {
v := viper.New()
SetupViper(v, "")
v.SetConfigType("yaml")
cfg, err := New(v)
assert.NoError(t, err)
return cfg
}
func assertOneError(t *testing.T, errs []error, message string) {
if !assert.Len(t, errs, 1) {
return
}
assert.EqualError(t, errs[0], message)
}
func doTimeoutTest(t *testing.T, expected int, requested int, max uint64, def uint64) {
t.Helper()
cfg := AuctionTimeouts{
Default: def,
Max: max,
}
expectedDuration := time.Duration(expected) * time.Millisecond
limited := cfg.LimitAuctionTimeout(time.Duration(requested) * time.Millisecond)
assert.Equal(t, limited, expectedDuration, "Expected %dms timeout, got %dms", expectedDuration, limited/time.Millisecond)
}
|
# SubServers Library Patcher
#
# Can be used to combine the following into one jar file:
# -> BungeeCord and SubServers.Bungee
# -> BungeeCord and SubServers.Sync
# -> GalaxiEngine and SubServers.Host
#
# Usage: "bash SubServers.Patcher.sh <Platform.jar> <SubServers.jar>"
#
#!/usr/bin/env bash
if [ -z "$1" ]
then
echo "SubServers Library Patcher"
echo ""
echo "Can be used to combine the following into one jar file:"
echo " -> BungeeCord and SubServers.Bungee"
echo " -> BungeeCord and SubServers.Sync"
echo " -> GalaxiEngine and SubServers.Host"
echo ""
echo "Usage: bash $0 <Platform.jar> <SubServers.jar>"
exit 1
fi
if [ ! -f "$1" ]
then
echo ERROR: Cannot find $1
exit 2
fi
if [ -z "$2" ]
then
echo ERROR: No SubServers File Supplied
exit 1
fi
if [ ! -f "$2" ]
then
echo ERROR: Cannot find $2
exit 2
fi
if [ -d "SubServers.Patcher" ]; then
rm -Rf SubServers.Patcher
fi
echo ">> Extracting $1..."
mkdir SubServers.Patcher
mkdir SubServers.Patcher/Patched.jar
cd SubServers.Patcher/Patched.jar
jar xvf "../../$1"; __RETURN=$?;
if [ $__RETURN -eq 0 ]
then
if [ -f "LICENSE.txt" ]; then
rm -Rf LICENSE.txt
fi
if [ -f "LICENSE" ]; then
rm -Rf LICENSE
fi
if [ -f "META-INF/MANIFEST.MF" ]; then
cat META-INF/MANIFEST.MF | sed 's/\r$//' | sed ':a;N;$!ba;s/\n //g' | sed -e "/^\s*$/d" -e "/^Main-Class:.*$/d" -e "/^Implementation-Title:.*$/d" -e "/^Specification-Title:.*$/d" -e "/^Build-Jdk:.*$/d" -e "/^Created-By:.*$/d" -e "/^Built-By:.*$/d" > ../MANIFEST.MF
else
printf "Manifest-Version: 1.0\n" > ../MANIFEST.MF
fi
if [ -f "MODIFICATIONS" ]; then
mv -f MODIFICATIONS ../MODIFICATIONS
fi
echo ">> Extracting $2..."
mkdir ../Original.jar
cd ../Original.jar
jar xvf "../../$2"; __RETURN=$?;
if [ $__RETURN -eq 0 ]
then
echo ">> Writing Changes..."
if [ -f "META-INF/MANIFEST.MF" ]
then # (Read File) (Convert to LF) (Rejoin Split Lines) (Omit Empty, Duplicate, and Unnecessary Properties)
cat META-INF/MANIFEST.MF | sed 's/\r$//' | sed ':a;N;$!ba;s/\n //g' | sed -e "/^\s*$/d" -e "/^Manifest-Version:.*$/d" -e "/^Class-Path:.*$/d" -e "/^Build-Jdk:.*$/d" -e "/^Created-By:.*$/d" -e "/^Built-By:.*$/d" >> ../MANIFEST.MF
else
if [ ! -d "META-INF" ]; then
mkdir META-INF
fi
fi
if [ -f "MODIFICATIONS" ]; then
cat MODIFICATIONS >> ../MODIFICATIONS
fi
yes | cp -rf . ../Patched.jar
cd ../
printf "Built-By: SubServers.Patcher\n" >> MANIFEST.MF
cp -f MANIFEST.MF Patched.jar/META-INF
if [ -f "Patched.jar/bungee.yml" ]; then
rm -Rf Patched.jar/bungee.yml
fi
if [ ! -f "MODIFICATIONS" ]; then
printf "# SubServers.Patcher generated difference list (may be empty if git is not installed)\n#\n" > MODIFICATIONS
fi
printf "@ `date`\n> git --no-pager diff --no-index --name-status SubServers.Patcher/Original.jar SubServers.Patcher/Patched.jar\n" >> MODIFICATIONS
git --no-pager diff --no-index --name-status Original.jar Patched.jar | sed -e "s/\tOriginal.jar\//\t\//" -e "s/\tPatched.jar\//\t\//" >> MODIFICATIONS
cp -f MODIFICATIONS Patched.jar
cd Patched.jar
echo ">> Recompiling..."
if [ -f "../../SubServers.Patched.jar" ]; then
rm -Rf ../../SubServers.Patched.jar
fi
jar cvfm ../../SubServers.Patched.jar META-INF/MANIFEST.MF .; __RETURN=$?;
if [ $__RETURN -eq 0 ]
then
echo ">> Cleaning Up..."
cd ../../
rm -Rf SubServers.Patcher
exit 0;
else
echo ">> Error Recomiling Files"
exit 4
fi
else
echo ">> Error Decompiling $2"
exit 3
fi
else
echo ">> Error Decompiling $1"
exit 3
fi
|
/*
* Creating a KVM-based VM basically needs 7 steps:
*
* 1. Open the KVM device, `kvmfd=open("/dev/kvm", O_RDWR|O_CLOEXEC)`
* 2. Do create a VM, `vmfd=ioctl(kvmfd, KVM_CREATE_VM, 0)`
* 3. Set up memory for VM guest, `ioctl(vmfd, KVM_SET_USER_MEMORY_REGION, ®ion)`
* 4. Create a virtual CPU for the VM, `vcpufd=ioctl(vmfd, KVM_CREATE_VCPU, 0)`
* 5. Set up memory for the vCPU
* - `vcpu_size=ioctl(kvmfd, KVM_GET_VCPU_MMAP_SIZE, NULL)`
* - `run=(struct kvm_run*)mmap(NULL, mmap_size, PROT_READ|PROT_WRITE, MAP_SHARED, vcpufd, 0)`
* 6. Put assembled code on user memory region, set up vCPU's registers such as rip
* 7. Run and handle exit reason. `while(1) { ioctl(vcpufd, KVM_RUN, 0); ... }`
*
* https://david942j.blogspot.com/2018/10/note-learning-kvm-implement-your-own.html
*/
#include <stdio.h>
#include <linux/kvm.h>
#include <sys/ioctl.h>
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/mman.h>
#define NONE "\033[m"
#define LIGHT_BLUE "\033[1;34m"
#define DBG_MSG(fmt, ...) \
while(1) { \
printf("[D] %s:%d, ", __func__, __LINE__); \
printf(LIGHT_BLUE fmt NONE, ##__VA_ARGS__); \
break; \
}
struct vm {
int dev_fd; // open "/dev/kvm"
int vm_fd; // KVM_CREATE_VM
int vcpu_fd; // KVM_CREATE_VCPU
__u64 ram_start; // KVM_SET_USER_MEMORY_REGION
__u64 ram_size; // KVM_SET_USER_MEMORY_REGION
struct kvm_run *run; // mmap KVM_GET_VCPU_MMAP_SIZE
};
/* 加载 vm binary */
void load_binary(struct vm *vm, char *path){
int fd = open(path, O_RDONLY);
int ret = 0;
char *p = (char *)vm->ram_start;
while(1){
ret = read(fd, p, 4096);
if(ret <= 0)
break;
// DBG_MSG("read size: %d\n", ret);
p += ret;
}
}
void run_vm(struct vm *vm){
DBG_MSG("%s\n", "KVM start run");
while(1){
/* KVM_RUN */
ioctl(vm->vcpu_fd, KVM_RUN, 0);
switch(vm->run->exit_reason){
case KVM_EXIT_UNKNOWN:
DBG_MSG("KVM_EXIT_UNKNOWN\n");
break;
case KVM_EXIT_IO:
DBG_MSG("KVM_EXIT_IO, out port: 0x%x, data: %d\n",
vm->run->io.port,
*(int *)((void *)(vm->run) + vm->run->io.data_offset)
);
break;
case KVM_EXIT_MMIO:
DBG_MSG("KVM_EXIT_MMIO\n");
break;
case KVM_EXIT_HLT:
DBG_MSG("KVM_EXIT_HLT\n");
goto exit_kvm;
case KVM_EXIT_SHUTDOWN:
DBG_MSG("KVM_EXIT_SHUTDOWN\n");
goto exit_kvm;
default:
DBG_MSG("KVM EXIT: %d\n", vm->run->exit_reason);
goto exit_kvm;
}
}
exit_kvm:
return;
}
int main(int argc, char *argv[]){
int ret;
struct vm *vm = malloc(sizeof(struct vm));
/* KVM_CREATE_VM */
vm->dev_fd = open("/dev/kvm", O_RDWR);
DBG_MSG("vm->dev_fd: %d\n", vm->dev_fd);
vm->vm_fd = ioctl(vm->dev_fd, KVM_CREATE_VM, 0);
DBG_MSG("vm->vm_fd: %d\n", vm->vm_fd);
vm->ram_size = 0x200000;
vm->ram_start= (__u64)mmap(NULL, vm->ram_size, PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
DBG_MSG("vm->ram_start: %p\n", (void *)vm->ram_start);
/* KVM_SET_USER_MEMORY_REGION */
struct kvm_userspace_memory_region mem;
mem.slot = 0;
mem.guest_phys_addr = 0;
mem.memory_size = vm->ram_size;
mem.userspace_addr = vm->ram_start;
ret =ioctl(vm->vm_fd, KVM_SET_USER_MEMORY_REGION, &mem);
DBG_MSG("KVM_SET_USER_MEMORY_REGION ret: %d\n", ret);
/* KVM_CREATE_VCPU */
int run_mmap_size;
vm->vcpu_fd = ioctl(vm->vm_fd, KVM_CREATE_VCPU, 0);
DBG_MSG("vm->vcpu_fd: %d\n", vm->vcpu_fd);
/* KVM_GET_VCPU_MMAP_SIZE */
run_mmap_size = ioctl(vm->dev_fd, KVM_GET_VCPU_MMAP_SIZE, 0);
vm->run = mmap(NULL, run_mmap_size, PROT_READ | PROT_WRITE,
MAP_SHARED, vm->vcpu_fd, 0);
DBG_MSG("vm->run: %p\n", vm->run);
/* KMV_SET_SREGS */
struct kvm_sregs sregs;
ioctl(vm->vcpu_fd, KVM_GET_SREGS, &sregs);
sregs.cs.base = sregs.cs.selector = 0;
ioctl(vm->vcpu_fd, KVM_SET_SREGS, &sregs);
/* KMV_SET_REGS */
struct kvm_regs regs;
ioctl(vm->vcpu_fd, KVM_SET_REGS, &(regs));
regs.rflags = 0x2ULL;
regs.rip = 0;
regs.rsp = 0x1000;
regs.rbp= 0;
ret = ioctl(vm->vcpu_fd, KVM_SET_REGS, &(regs));
DBG_MSG("KVM_SET_REGS ret: %d\n", ret);
/* Load binary */
if(argc < 2)
load_binary(vm, "a.bin");
else
load_binary(vm, argv[1]);
run_vm(vm);
}
|
<filename>src/app/admin/root/components/admin-nav-item/admin-nav-item.component.ts
import { Component, Input, Output, EventEmitter, OnInit } from '@angular/core';
@Component({
selector: 'admin-nav-item',
templateUrl: './admin-nav-item.component.html',
styleUrls: ['./admin-nav-item.component.scss']
})
export class AdminNavItemComponent implements OnInit {
@Input() routerLink: string | any[];
@Input() disableActive: boolean;
@Output() navigate = new EventEmitter();
activeClass: string;
ngOnInit() {
this.activeClass = this.disableActive ? '' : 'active';
}
}
|
<filename>isomorfeus-i18n/lib/lucid_translation/mixin.rb
module LucidTranslation
module Mixin
CONTEXT_SEPARATOR = "\004"
NAMESPACE_SEPARATOR = '|'
NIL_BLOCK = -> { nil }
TRANSLATION_METHODS = [:_, :n_, :np_, :ns_, :p_, :s_]
if RUBY_ENGINE != 'opal'
class InternalTranslationProxy
extend FastGettext::Translation
extend FastGettext::TranslationMultidomain
end
end
if RUBY_ENGINE == 'opal'
def _(*keys, &block)
domain = Isomorfeus.i18n_domain
locale = Isomorfeus.locale
Isomorfeus.raise_error(message: "I18n _(): no key given!") if keys.empty?
result = Redux.fetch_by_path(:i18n_state, domain, locale, '_', keys)
return result if result
if Isomorfeus::I18n::Init.initialized?
Isomorfeus::Transport.promise_send_path('Isomorfeus::I18n::Handler::LocaleHandler', domain, locale, '_', keys).then do |agent|
if agent.processed
agent.result
else
agent.processed = true
if agent.response.key?(:error)
Isomorfeus.raise_error(message: agent.response[:error])
end
Isomorfeus.store.collect_and_defer_dispatch(type: 'I18N_LOAD', data: { domain => agent.response[domain] })
end
end
end
block_given? ? block.call : keys.first
end
def n_(*keys, count, &block)
domain = Isomorfeus.i18n_domain
locale = Isomorfeus.locale
Isomorfeus.raise_error(message: "I18n n_(): no key given!") if keys.empty?
result = Redux.fetch_by_path(:i18n_state, domain, locale, 'n_', keys + [count])
return result if result
if Isomorfeus::I18n::Init.initialized?
Isomorfeus::Transport.promise_send_path('Isomorfeus::I18n::Handler::LocaleHandler', domain, locale, 'n_', keys + [count]).then do |agent|
if agent.processed
agent.result
else
agent.processed = true
if agent.response.key?(:error)
Isomorfeus.raise_error(message: agent.response[:error])
end
Isomorfeus.store.collect_and_defer_dispatch(type: 'I18N_LOAD', data: { domain => agent.response[domain] })
end
end
end
block_given? ? block.call : keys.last
end
def np_(context, plural_one, *args, separator: nil, &block)
nargs = ["#{context}#{separator || CONTEXT_SEPARATOR}#{plural_one}"] + args
translation = n_(*nargs, &NIL_BLOCK)
return translation if translation
block_given? ? block.call : n_(plural_one, *args)
end
def ns_(*args, &block)
domain = Isomorfeus.i18n_domain
locale = Isomorfeus.locale
Isomorfeus.raise_error(message: "I18n ns_(): no args given!") if args.empty?
result = Redux.fetch_by_path(:i18n_state, domain, locale, 'ns_', args)
return result if result
if Isomorfeus::I18n::Init.initialized?
Isomorfeus::Transport.promise_send_path('Isomorfeus::I18n::Handler::LocaleHandler', domain, locale, 'ns_', args).then do |agent|
if agent.processed
agent.result
else
agent.processed = true
if agent.response.key?(:error)
Isomorfeus.raise_error(message: agent.response[:error])
end
Isomorfeus.store.collect_and_defer_dispatch(type: 'I18N_LOAD', data: { domain => agent.response[domain] })
end
end
end
block_given? ? block.call : n_(*args).split(NAMESPACE_SEPARATOR).last
end
def p_(namespace, key, separator = nil, &block)
domain = Isomorfeus.i18n_domain
locale = Isomorfeus.locale
args = separator ? [namespace, key, separator] : [namespace, key]
result = Redux.fetch_by_path(:i18n_state, domain, locale, 'p_', args)
return result if result
if Isomorfeus::I18n::Init.initialized?
Isomorfeus::Transport.promise_send_path('Isomorfeus::I18n::Handler::LocaleHandler', domain, locale, 'p_', args).then do |agent|
if agent.processed
agent.result
else
agent.processed = true
if agent.response.key?(:error)
Isomorfeus.raise_error(message: agent.response[:error])
end
Isomorfeus.store.collect_and_defer_dispatch(type: 'I18N_LOAD', data: { domain => agent.response[domain] })
end
end
end
block_given? ? block.call : key
end
def s_(key, separator = nil, &block)
domain = Isomorfeus.i18n_domain
locale = Isomorfeus.locale
args = separator ? [key, separator] : [key]
result = Redux.fetch_by_path(:i18n_state, domain, locale, 's_', args)
return result if result
if Isomorfeus::I18n::Init.initialized?
Isomorfeus::Transport.promise_send_path('Isomorfeus::I18n::Handler::LocaleHandler', domain, locale, 's_', args).then do |agent|
if agent.processed
agent.result
else
agent.processed = true
if agent.response.key?(:error)
Isomorfeus.raise_error(message: agent.response[:error])
end
Isomorfeus.store.collect_and_defer_dispatch(type: 'I18N_LOAD', data: { domain => agent.response[domain] })
end
end
end
block_given? ? block.call : key.split(separator || NAMESPACE_SEPARATOR).last
end
def N_(translate)
translate
end
def Nn_(*keys)
keys
end
TRANSLATION_METHODS.each do |method|
define_method("d#{method}") do |domain, *args, &block|
old_domain = Isomorfeus.i18n_domain
begin
Isomorfeus.i18n_domain = domain
send(method, *args, &block)
ensure
Isomorfeus.i18n_domain = old_domain
end
end
define_method("D#{method}") do |*args, &block|
domain = Isomorfeus.i18n_domain
locale = Isomorfeus.locale
Isomorfeus.raise_error(message: "I18n D#{method}(): no args given!") if args.empty?
result = Redux.fetch_by_path(:i18n_state, domain, locale, "D#{method}", args)
return result if result
if Isomorfeus::I18n::Init.initialized?
Isomorfeus::Transport.promise_send_path('Isomorfeus::I18n::Handler::LocaleHandler', domain, locale, "D#{method}", args).then do |agent|
if agent.processed
agent.result
else
agent.processed = true
if agent.response.key?(:error)
Isomorfeus.raise_error(message: agent.response[:error])
end
Isomorfeus.store.collect_and_defer_dispatch(type: 'I18N_LOAD', data: { domain => agent.response[domain] })
end
end
end
block_given? ? block.call : send(method, *args, &block)
end
end
else
TRANSLATION_METHODS.each do |method|
define_method(method) do |domain, *args, &block|
Isomorfeus::I18n::Init.init unless Thread.current[:isomorfeus_i18n_initialized] == true
InternalTranslationProxy.send(method, domain, *args, &block)
end
define_method("d#{method}") do |domain, *args, &block|
Isomorfeus::I18n::Init.init unless Thread.current[:isomorfeus_i18n_initialized] == true
InternalTranslationProxy.send("d#{method}", domain, *args, &block)
end
define_method("D#{method}") do |*args, &block|
Isomorfeus::I18n::Init.init unless Thread.current[:isomorfeus_i18n_initialized] == true
InternalTranslationProxy.send("D#{method}", *args, &block)
end
end
end
end
end
|
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.analysis.collector;
import java.io.NotSerializableException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.moeaframework.algorithm.PeriodicAction;
import org.moeaframework.core.Algorithm;
/**
* Decorates an algorithm to periodically collect information about its runtime
* behavior. The {@code NFE} field is automatically recorded by this class.
*/
public class InstrumentedAlgorithm extends PeriodicAction {
/**
* The accumulator to which all recorded information is stored.
*/
private final Accumulator accumulator;
/**
* The collectors responsible for recording the necessary information.
*/
private final List<Collector> collectors;
/**
* Decorates the specified algorithm to periodically collect information
* about its runtime behavior.
*
* @param algorithm the algorithm to decorate
* @param frequency the frequency, in evaluations, that data is collected
*/
public InstrumentedAlgorithm(Algorithm algorithm, int frequency) {
super(algorithm, frequency, FrequencyType.EVALUATIONS);
accumulator = new Accumulator();
collectors = new ArrayList<Collector>();
}
/**
* Adds a collector to this instrumented algorithm. The collector should
* have already been attached to the algorithm.
*
* @param collector the collector
*/
public void addCollector(Collector collector) {
collectors.add(collector);
}
/**
* Returns the accumulator to which all recorded information is stored.
*
* @return the accumulator to which all recorded information is stored
*/
public Accumulator getAccumulator() {
return accumulator;
}
@Override
public void doAction() {
accumulator.add("NFE", algorithm.getNumberOfEvaluations());
for (Collector collector : collectors) {
collector.collect(accumulator);
}
}
/**
* Proxy for serializing and deserializing the state of an
* {@code InstrumentedAlgorithm} instance. This proxy supports saving
* the underlying algorithm state and the {@code accumulator}.
*/
private static class InstrumentedAlgorithmState implements
Serializable {
private static final long serialVersionUID = -313598408729472790L;
/**
* The state of the underlying algorithm.
*/
private final Serializable algorithmState;
/**
* The {@code accumulator} from the {@code InstrumentedAlgorithm}
* instance.
*/
private final Accumulator accumulator;
/**
* Constructs a proxy for storing the state of an
* {@code InstrumentedAlgorithm} instance.
*
* @param algorithmState the state of the underlying algorithm
* @param accumulator the {@code accumulator} from the
* {@code InstrumentedAlgorithm} instance
*/
public InstrumentedAlgorithmState(Serializable algorithmState,
Accumulator accumulator) {
super();
this.algorithmState = algorithmState;
this.accumulator = accumulator;
}
/**
* Returns the underlying algorithm state.
*
* @return the underlying algorithm state
*/
public Serializable getAlgorithmState() {
return algorithmState;
}
/**
* Returns the {@code accumulator} from the
* {@code InstrumentedAlgorithm} instance.
*
* @return the {@code accumulator} from the
* {@code InstrumentedAlgorithm} instance
*/
public Accumulator getAccumulator() {
return accumulator;
}
}
@Override
public Serializable getState() throws NotSerializableException {
return new InstrumentedAlgorithmState(super.getState(),
accumulator);
}
@Override
public void setState(Object objState) throws NotSerializableException {
InstrumentedAlgorithmState state =
(InstrumentedAlgorithmState)objState;
super.setState(state.getAlgorithmState());
//copy the stored accumulator contents to this accumulator
Accumulator storedAccumulator = state.getAccumulator();
for (String key : storedAccumulator.keySet()) {
for (int i=0; i<storedAccumulator.size(key); i++) {
accumulator.add(key, storedAccumulator.get(key, i));
}
}
}
}
|
import junit.framework.*;
public class testEvenOdd extends TestCase
{
public static void main(String[] args)
{
junit.textui.TestRunner.run(testEvenOdd.class);
}
public void testgetdata()
{
EvenOdd d1 = new EvenOdd();
d1.getdata();
assertEquals(2, d1.y);
}
public void testeven()
{
EvenOdd d1 = new EvenOdd();
boolean result = d1.prime(2);
assertTrue(result);
}
public void testodd()
{
EvenOdd d1 = new EvenOdd();
boolean result = d1.prime(1);
assertFalse(result);
}
}
|
// Define the RemoteIdentityType enum
public enum RemoteIdentityType
{
User,
Service
}
// Implement the RemoteServiceInfo class
public class RemoteServiceInfo
{
public RemoteIdentityType IdentityType { get; }
public Uri SubscriptionId { get; }
// Constructor to initialize the properties
public RemoteServiceInfo(RemoteIdentityType identityType, Uri subscriptionId)
{
IdentityType = identityType;
SubscriptionId = subscriptionId;
}
}
|
class Translator:
def detect(self, text):
# Placeholder implementation of language detection
# Replace this with actual language detection logic
return "en" # Assuming English as the detected language for demonstration purposes
def translate(self, text, dest):
# Placeholder implementation of text translation
# Replace this with actual translation logic
return f"Translated to {dest}: {text}" # Assuming a simple translation for demonstration purposes
def translate_text(text, target_language):
try:
translator = Translator()
detected_language = translator.detect(text)
except Exception as e:
detected_language = "unknown"
try:
translated_text = translator.translate(text, dest=target_language)
except Exception as e:
translated_text = "Translation failed"
return translated_text
|
#!/bin/bash
dieharder -d 202 -g 206 -S 2284807359
|
#!/usr/bin/env bash
set -e
aws --profile muumuus s3 sync --delete _site/ s3://muumu.us
|
#!/bin/bash
set -e
export MVK_PROD_NAME="MoltenVK"
export MVK_PROD_PROJ_PATH="${PROJECT_DIR}/${MVK_PROD_NAME}"
export MVK_PKG_PROD_PATH="${PROJECT_DIR}/Package/${CONFIGURATION}/${MVK_PROD_NAME}"
export MVK_PKG_PROD_PATH_OS="${MVK_PKG_PROD_PATH}/${MVK_OS}"
rm -rf "${MVK_PKG_PROD_PATH_OS}"
mkdir -p "${MVK_PKG_PROD_PATH_OS}/static"
cp -a "${MVK_BUILT_PROD_PATH}/lib${MVK_PROD_NAME}.a" "${MVK_PKG_PROD_PATH_OS}/static"
mkdir -p "${MVK_PKG_PROD_PATH_OS}/dynamic"
cp -a "${MVK_BUILT_PROD_PATH}/dynamic/lib${MVK_PROD_NAME}.dylib" "${MVK_PKG_PROD_PATH_OS}/dynamic"
if test "$CONFIGURATION" = Debug; then
cp -a "${MVK_BUILT_PROD_PATH}/dynamic/lib${MVK_PROD_NAME}.dylib.dSYM" "${MVK_PKG_PROD_PATH_OS}/dynamic"
fi
cp -a "${MVK_PROD_PROJ_PATH}/icd/${MVK_PROD_NAME}_icd.json" "${MVK_PKG_PROD_PATH_OS}/dynamic"
mkdir -p "${MVK_PKG_PROD_PATH_OS}/framework"
cp -a "${MVK_BUILT_PROD_PATH}/framework/${MVK_PROD_NAME}.framework" "${MVK_PKG_PROD_PATH_OS}/framework"
# Remove the code signature
rm -rf "${MVK_PKG_PROD_PATH_OS}/framework/${MVK_PROD_NAME}.framework/_CodeSignature"
# Remove and replace header include folder
rm -rf "${MVK_PKG_PROD_PATH}/include"
cp -pRL "${MVK_PROD_PROJ_PATH}/include" "${MVK_PKG_PROD_PATH}"
|
def find_max(arr):
max_elements = []
max_val = max(arr)
for i in arr:
if i == max_val:
max_elements.append(i)
return max_elements
|
#!/bin/bash
cd davis2017-evaluation/
rm results/semi-supervised/osvos/*results-val.csv
python evaluation_method.py --davis_path ../data_storage/DAVIS --results_path results/semi-supervised/osvos --task semi-supervised
|
let nums = [1,2,3,4,5];
let sum = 0;
nums.forEach(function(num){
sum += num;
});
let avg = sum / nums.length;
console.log(avg);
|
/*
Package middleware provides shared middleware that is used in HTTP services.
*/
package middleware
import (
"fmt"
"net/http"
)
// CORS handling middleware
type CorsHandler struct {
corsHostAndPort string
delegate http.Handler
}
// Construct a new CORS handler.
//
// corsHostAndPort is a string representation of the allowed origin.
func NewCorsHandler(corsHostAndPort string, handler http.Handler) *CorsHandler {
return &CorsHandler{
corsHostAndPort: corsHostAndPort,
delegate: handler,
}
}
/*
Write the appropriate CORS headers required for eventbus to function
properly. Currently two headers are added:
* Access-Control-Allow-Origin: http://host:port
* Access-Control-Allow-Headers: Content-Type
*/
func (handler *CorsHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Access-Control-Allow-Origin", fmt.Sprintf("http://%s", handler.corsHostAndPort))
w.Header().Set("Access-Control-Allow-Headers", "Content-Type")
handler.delegate.ServeHTTP(w, r)
}
|
<gh_stars>10-100
window.$ = window.jQuery = require("jquery");
const io = require("socket.io-client");
const client = require("./src/game-client");
function run(flag) {
client.renderer = flag ? require("./src/mode/god") : require("./src/mode/player");
client.connectGame("//" + location.host, $("#name").val(), (success, msg) => {
if (success) {
$("#main-ui").fadeIn(1000);
$("#begin, #wasted").fadeOut(1000);
}
else {
$("#error").text(msg);
}
}, flag);
}
$(() => {
const err = $("#error");
if (!window.WebSocket) {
err.text("Your browser does not support WebSockets!");
return;
}
err.text("Loading... Please wait"); //TODO: show loading screen
(() => {
const socket = io(`//${location.host}`, {
forceNew: true,
upgrade: false,
transports: ["websocket"]
});
socket.on("connect", () => {
socket.emit("pings");
});
socket.on("pongs", () => {
socket.disconnect();
err.text("All done, have fun!");
$("#name").on("keypress", evt => {
if (evt.key === "Enter") run();
});
$(".start").removeAttr("disabled").on("click", evt => {
run();
});
$(".spectate").removeAttr("disabled").click(evt => {
run(true);
});
});
socket.on("connect_error", () => {
err.text("Cannot connect with server. This probably is due to misconfigured proxy server. (Try using a different browser)");
});
})();
});
//Event listeners
$(document).on("keydown", e => {
let newHeading = -1;
switch (e.key) {
case "w": case "ArrowUp":
newHeading = 0; break; //UP (W)
case "d": case "ArrowRight":
newHeading = 1; break; //RIGHT (D)
case "s": case "ArrowDown":
newHeading = 2; break; //DOWN (S)
case "a": case "ArrowLeft":
newHeading = 3; break; //LEFT (A)
default: return; //Exit handler for other keys
}
client.changeHeading(newHeading);
//e.preventDefault();
});
$(document).on("touchmove", e => {
e.preventDefault();
});
$(document).on("touchstart", e1 => {
const x1 = e1.targetTouches[0].pageX;
const y1 = e1.targetTouches[0].pageY;
$(document).one("touchend", e2 => {
const x2 = e2.changedTouches[0].pageX;
const y2 = e2.changedTouches[0].pageY;
const deltaX = x2 - x1;
const deltaY = y2 - y1;
let newHeading = -1;
if (deltaY < 0 && Math.abs(deltaY) > Math.abs(deltaX)) newHeading = 0;
else if (deltaX > 0 && Math.abs(deltaY) < deltaX) newHeading = 1;
else if (deltaY > 0 && Math.abs(deltaX) < deltaY) newHeading = 2;
else if (deltaX < 0 && Math.abs(deltaX) > Math.abs(deltaY)) newHeading = 3;
client.changeHeading(newHeading);
});
});
$(".menu").on("click", () => {
client.disconnect();
$("#main-ui, #wasted").fadeOut(1000);
$("#begin").fadeIn(1000);
});
$(".toggle").on("click", () => {
$("#settings").slideToggle();
});
|
<gh_stars>0
import React, { PropTypes} from 'react';
function Row({className, children}) {
const cssName = className ? 'rows ' + className : 'rows';
return (
<div
className={cssName}>
{children}
</div>
);
}
Row.propTypes = {
className: PropTypes.string,
children: PropTypes.node,
};
Row.defaultProps = {
className: '',
children: null,
};
export default Row;
|
<reponame>koushikan/engine
var ResetPhysics = pc.createScript('resetPhysics');
// initialize code called once per entity
ResetPhysics.prototype.postInitialize = function () {
// Record the start state of all dynamic rigid bodies
this.bodies = [];
this.app.root.findComponents('rigidbody').forEach(function (bodyComponent) {
if (bodyComponent.type === 'dynamic') {
this.bodies.push({
entity: bodyComponent.entity,
initialPos: bodyComponent.entity.getPosition().clone(),
initialRot: bodyComponent.entity.getRotation().clone()
});
}
}, this);
};
// update code called every frame
ResetPhysics.prototype.update = function (dt) {
if (this.app.keyboard.wasPressed(pc.KEY_R)) {
this.bodies.forEach(function (body) {
// Reset all dynamic bodies to their initial state
body.entity.rigidbody.teleport(body.initialPos, body.initialRot);
body.entity.rigidbody.linearVelocity = pc.Vec3.ZERO;
body.entity.rigidbody.angularVelocity = pc.Vec3.ZERO;
});
}
};
|
<gh_stars>0
import styled from 'styled-components';
import breakpoints, { pageGutter } from 'components/core/breakpoints';
// mimics a navbar element on the top left corner
const StyledBackLink = styled.div`
height: 80px;
display: flex;
align-items: center;
position: absolute;
left: 0;
top: 0;
z-index: 3;
padding: 0 ${pageGutter.mobile}px;
@media only screen and ${breakpoints.tablet} {
padding: 0 ${pageGutter.tablet}px;
}
`;
export default StyledBackLink;
|
""" Module for defining user related models. """
from datetime import datetime
from sqlalchemy import Column, Integer, String, DateTime, Float
from sqlalchemy.orm import relationship
from utils import get_logger
from models import Base
LOGGER = get_logger(__name__)
class User(Base):
""" Table for tracking the users of the game and their login info. """
__tablename__ = "User"
id = Column(Integer, primary_key=True)
email = Column(String, unique=True, index=True, nullable=False)
name = Column(String, nullable=False)
salt = Column(String, nullable=False)
password = Column(String, nullable=False)
last_seen = Column(DateTime, default=datetime.today, nullable=False)
money = Column(Float, default=100, nullable=False)
ship = relationship("Ship", cascade="all, delete-orphan", uselist=False)
def ping(self):
""" Update last seen time. """
self.last_seen = datetime.today()
LOGGER.debug(f"{self!r} ping at {self.last_seen.strftime('%H:%M:%S')}")
def __str__(self) -> str:
return f"{self.name}"
def __repr__(self) -> str:
return f"User(id={self.id}, name={self.name}, email={self.email})"
|
#!/bin/bash
# ========== Experiment Seq. Idx. 174 / 43.3 / N. 35/2 - _S=43.3 D1_N=35 a=1 b=1 c=1 d=-1 e=-1 f=-1 D3_N=2 g=-1 h=1 i=-1 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 174 / 43.3 / N. 35/2 - _S=43.3 D1_N=35 a=1 b=1 c=1 d=-1 e=-1 f=-1 D3_N=2 g=-1 h=1 i=-1 ==========\n\n'
if [[ "Yes" == "No" ]]; then
echo 'FATAL: This treatment did not include an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
SVM_DIR="$JBHI_DIR/svm-models"
SVM_PREFIX="$SVM_DIR/deep.35.layer.2.svm"
SVM_PATH="$SVM_PREFIX.pkl"
FEATURES_DIR="$JBHI_DIR/features"
TRAIN_FEATURES_PREFIX="$FEATURES_DIR/deep.35.layer.2.train"
TRAIN_FEATURES_PATH="$TRAIN_FEATURES_PREFIX.feats.pkl"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$TRAIN_FEATURES_PREFIX.finish.txt"
START_PATH="$SVM_PREFIX.start.txt"
FINISH_PATH="$SVM_PREFIX.finish.txt"
LOCK_PATH="$SVM_PREFIX.running.lock"
LAST_OUTPUT="$SVM_PATH"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$SVM_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
echo -n
fi
#...trains SVM layer
echo Training SVM layer from "$TRAIN_FEATURES_PATH"
python \
"$SOURCES_GIT_DIR/train_svm_layer.py" \
--jobs 1 \
--svm_method LINEAR_PRIMAL \
--output_model "$SVM_PATH" \
--input_training "$TRAIN_FEATURES_PATH"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
<filename>yoga-business/yoga-weixinapp/src/main/java/com/yoga/weixinapp/service/WxmpUserService.java
package com.yoga.weixinapp.service;
import com.yoga.core.base.BaseService;
import com.yoga.core.exception.BusinessException;
import com.yoga.core.mybatis.MapperQuery;
import com.yoga.core.utils.StringUtil;
import com.yoga.weixinapp.ao.SettingConfig;
import com.yoga.weixinapp.mapper.WxmpUserMapper;
import com.yoga.weixinapp.model.WxmpBindUser;
import com.yoga.weixinapp.wxapi.WxApiFactory;
import com.yoga.weixinapp.wxapi.WxSessionResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
@Service
public class WxmpUserService extends BaseService {
@Autowired
private WxmpUserMapper userMapper;
@Lazy
@Autowired
private WxmpService wxmpService;
@Autowired
private WxApiFactory wxApiFactory;
public WxmpBindUser getUser(long tenantId, String openid) {
WxmpBindUser user = MapperQuery.create(WxmpBindUser.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("openid", openid)
.queryFirst(userMapper);
return user;
}
public WxmpBindUser getUser(long tenantId, long userId) {
WxmpBindUser user = MapperQuery.create(WxmpBindUser.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("userId", userId)
.queryFirst(userMapper);
return user;
}
public void bindUser(long tenantId, String openid, long userId) {
WxmpBindUser user = MapperQuery.create(WxmpBindUser.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("openid", openid)
.queryFirst(userMapper);
if (user != null && user.getUserId() == userId) return;
if (user != null) throw new BusinessException("该微信号已经被绑定!");
user = MapperQuery.create(WxmpBindUser.class)
.andEqualTo("userId", userId)
.queryFirst(userMapper);
if (user != null) throw new BusinessException("该账号已经被绑定!");
userMapper.insert(new WxmpBindUser(openid, tenantId, userId));
}
public void unbindUser(long tenantId, String openid, long userId) {
WxmpBindUser user = MapperQuery.create(WxmpBindUser.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("openid", openid)
.andEqualTo("userId", userId)
.queryFirst(userMapper);
if (user != null) userMapper.delete(user);
}
public String getOpenidByCode(long tenantId, String code) {
try {
SettingConfig config = wxmpService.getSetting(tenantId);
if (config == null || StringUtil.isBlank(config.getAppId())) throw new BusinessException("尚未配置小程序开发ID!");
WxSessionResult result = wxApiFactory.getWxApi().getSession(config.getAppId(), config.getAppSecret(), code, "authorization_code").execute().body();
if (result == null) throw new BusinessException("请求微信回话失败!");
if (result.getErrcode() != 0) throw new BusinessException(result.getErrMsg());
return result.getOpenid();
} catch (Exception ex) {
throw new BusinessException(ex.getLocalizedMessage());
}
}
public String getOpenidByCode(String appId, String appSecret, String code) {
try {
WxSessionResult result = wxApiFactory.getWxApi().getSession(appId, appSecret, code, "authorization_code").execute().body();
if (result == null) throw new BusinessException("请求微信回话失败!");
if (result.getErrcode() != 0) throw new BusinessException(result.getErrMsg());
return result.getOpenid();
} catch (Exception ex) {
throw new BusinessException(ex.getLocalizedMessage());
}
}
}
|
<gh_stars>0
var classarmnn_1_1profiling_1_1_test_profiling_connection_armnn_error =
[
[ "TestProfilingConnectionArmnnError", "classarmnn_1_1profiling_1_1_test_profiling_connection_armnn_error.xhtml#ac78cdd7a5d1ea251e57140b6935aec01", null ],
[ "ReadCalledCount", "classarmnn_1_1profiling_1_1_test_profiling_connection_armnn_error.xhtml#afb1d388dbb6bed7623ef1d3fb52fb52b", null ],
[ "ReadPacket", "classarmnn_1_1profiling_1_1_test_profiling_connection_armnn_error.xhtml#a1adbda8257d3a74a0566e0d8407ff458", null ]
];
|
<gh_stars>1-10
/**
* @module React
*/
import React from 'react'
import { Link } from 'react-router-dom'
import Icon from 'components/icon'
import AccountDropdown from 'components/account/AccountDropdown'
import TextCtaButton from 'components/buttons/TextCtaButton'
import FadeIn from 'components/animation/FadeIn'
const HeaderPrivate = (props) => {
const {
showAccount,
onLogout,
onAccountClick
} = props
return (
<div className='header__content'>
<div className='visible-sm-up'>
<div className='header__links'>
<div className='header__search'>
<Link to='/browse-horses'>
<Icon
modifier='magnifying-glass' />
</Link>
</div>
<div className='header-private__link'>
<Link to='/dashboard'>
<TextCtaButton
modifier='gray'
className='uppercase semi-bold header-private__link'
text={'my horses'} />
</Link>
</div>
<div className='header-private__link'>
<TextCtaButton
onClick={onAccountClick}
modifier='gray'
className='uppercase semi-bold header-private__link'
text={'account'} />
</div>
</div>
</div>
<div className='hidden-sm-up'>
<div className='header__links'>
<div className='header__search'>
<Link to='/browse-horses'>
<Icon
modifier='magnifying-glass' />
</Link>
</div>
<div className='header-private__link'>
<Link to='/dashboard'>
<Icon
modifier='horse' />
</Link>
</div>
<div className='header-private__link'>
<Icon
onClick={onAccountClick}
modifier='account' />
</div>
</div>
</div>
<div>
<FadeIn>
{
showAccount && (
<AccountDropdown
className='section-shadow'
closeAccount={onAccountClick}
onLogout={onLogout} />
)
}
</FadeIn>
</div>
</div>
)
}
export default HeaderPrivate
|
<reponame>seawindnick/javaFamily
package leetCode;//Trie(发音类似 "try")或者说 前缀树 是一种树形数据结构,
// 用于高效地存储和检索字符串数据集中的键。这一数据结构有相当多的应用情景,例如自动补完和拼
//写检查。
//
// 请你实现 Trie 类:
//
//
// Trie() 初始化前缀树对象。
// void insert(String word) 向前缀树中插入字符串 word 。
// boolean search(String word) 如果字符串 word 在前缀树中,返回 true(即,在检索之前已经插入);否则,返回 false
// 。
// boolean startsWith(String prefix) 如果之前已经插入的字符串 word 的前缀之一为 prefix ,返回 true ;否
//则,返回 false 。
//
//
//
//
// 示例:
//
//
//输入
//["Trie", "insert", "search", "search", "startsWith", "insert", "search"]
//[[], ["apple"], ["apple"], ["app"], ["app"], ["app"], ["app"]]
//输出
//[null, null, true, false, true, null, true]
//
//解释
//Trie trie = new Trie();
//trie.insert("apple");
//trie.search("apple"); // 返回 True
//trie.search("app"); // 返回 False
//trie.startsWith("app"); // 返回 True
//trie.insert("app");
//trie.search("app"); // 返回 True
//
//
//
//
// 提示:
//
//
// 1 <= word.length, prefix.length <= 2000
// word 和 prefix 仅由小写英文字母组成
// insert、search 和 startsWith 调用次数 总计 不超过 3 * 104 次
//
// Related Topics 设计 字典树 哈希表 字符串
// 👍 840 👎 0
//leetcode submit region begin(Prohibit modification and deletion)
import java.util.ArrayList;
import java.util.List;
import java.util.TreeMap;
/**
* Inserts a word into the trie.
*/
public class L10208_StartsWith {
private TrieNode root;
public L10208_StartsWith() {
root = new TrieNode();
}
public void insert(String word) {
if (word == null) {
return;
}
TrieNode insetTreeNode = root;
for (int index = 0; index < word.toCharArray().length; index++) {
char indexChar = word.charAt(index);
int charIndex = indexChar - 'a';
TrieNode[] insetTreeNodeArray = insetTreeNode.trieNodes;
TrieNode indexTreeNode = insetTreeNodeArray[charIndex];
if (indexTreeNode == null) {
indexTreeNode = new TrieNode();
insetTreeNodeArray[charIndex] = indexTreeNode;
}
if (index == word.length() - 1) {
indexTreeNode.endFlag = true;
}
insetTreeNode = indexTreeNode;
}
}
public boolean search(String word) {
if (word == null) {
return false;
}
TrieNode startNode = root;
for (int checkIndex = 0; checkIndex < word.length(); checkIndex++) {
char indexChar = word.charAt(checkIndex);
int charIndex = indexChar - 'a';
TrieNode[] existTireNodeArray = startNode.trieNodes;
TrieNode charIndexNode = existTireNodeArray[charIndex];
if (charIndexNode == null) {
return false;
}
if (checkIndex == word.length() - 1) {
return charIndexNode.endFlag;
}
startNode = charIndexNode;
}
return false;
}
public boolean startsWith(String prefix) {
if (prefix == null) {
return false;
}
TrieNode startNode = root;
for (int checkIndex = 0; checkIndex < prefix.length(); checkIndex++) {
char indexChar = prefix.charAt(checkIndex);
int charIndex = indexChar - 'a';
TrieNode[] existTireNodeArray = startNode.trieNodes;
TrieNode charIndexNode = existTireNodeArray[charIndex];
if (charIndexNode == null) {
return false;
}
startNode = charIndexNode;
}
return true;
}
public static class TrieNode {
private TrieNode[] trieNodes = new TrieNode[26];
private Boolean endFlag = false;
}
}
/**
* Your Trie object will be instantiated and called as such:
* Trie obj = new Trie();
* obj.insert(word);
* boolean param_2 = obj.search(word);
* boolean param_3 = obj.startsWith(prefix);
*/
|
<reponame>CamilYed/readable-tests-by-example
package tech.allegro.blog.vinyl.shop.delivery.domain;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import tech.allegro.blog.vinyl.shop.client.domain.ClientReputation;
import tech.allegro.blog.vinyl.shop.common.money.Money;
import tech.allegro.blog.vinyl.shop.common.result.Result;
import tech.allegro.blog.vinyl.shop.sales.domain.SpecialPriceProvider;
public interface DeliveryCostPolicy {
Delivery calculate(Money orderValue, ClientReputation clientReputation);
static DeliveryCostPolicy defaultPolicy(DeliveryCostProvider deliveryCostProvider, SpecialPriceProvider specialPriceProvider) {
return new DefaultDeliveryCostPolicy(deliveryCostProvider, specialPriceProvider);
}
@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
class DefaultDeliveryCostPolicy implements DeliveryCostPolicy {
private final DeliveryCostProvider deliveryCostProvider;
private final SpecialPriceProvider specialPriceProvider;
@Override
public Delivery calculate(Money orderValue, ClientReputation clientReputation) {
if (clientReputation.isVip())
return Delivery.freeDelivery();
var MOV = specialPriceProvider.getMinimumOrderValueForFreeDelivery();
if (orderValue.greaterOrEqualTo(MOV))
return Delivery.freeDelivery();
final var resultOfGettingCurrentDeliveryCost = Result.of(() -> Delivery.standardDelivery(deliveryCostProvider.currentCost()));
return resultOfGettingCurrentDeliveryCost.getSuccessOrDefault(Delivery.standardDeliveryWithDefaultPrice());
}
}
}
|
local _namespaces=( core )
if ! $_core_files_only && [[ "${#_orb_extensions[@]}" != 0 ]]; then
_collect_namespace_extensions
fi
local _current_namespace;
_current_namespace=$(_get_current_namespace "$@") && shift
local _function_name=$1; shift
local _function_descriptor=$(_get_function_descriptor)
local _namespace_files=() # namespace files collector
local _namespace_files_dir_tracker # index with directory
local _namespace_help_requested=false
declare -A _namespace_options=(
['--help']='show help'
)
if _is_flag "$_function_name"; then
if [[ $_function_name == '--help' ]]; then
_namespace_help_requested=true
else
_raise_error "invalid option\n"
fi
fi
if ! $_global_help_requested && ! $_namespace_help_requested; then
if [[ -z $_function_name ]]; then
_raise_error +t "is a namespace, no command or function provided\n\n Add --help for list of functions"
fi
if ! declare -n _args_declaration=${_function_name}_args 2> /dev/null; then
_raise_error "not a valid option or function name"
fi
declare -A _args # args collector
local _args_nrs=() # 1, 2, 3...
local _args_wildcard=() # *
local _args_dash_wildcard=() # -- *
# declare block arrays
local _blocks=($(_declared_blocks))
local _block; for _block in "${_blocks[@]}"; do
declare -a "$(_block_to_arr_name "$_block")"
done
fi
|
import { isEqual, debounce } from './utils';
interface Props {
key?: string,
}
export interface MyElement<P = {}> {
type: string | FC,
key: string | null,
props: Props & P,
children: MyChildren[],
}
type MyNode = string | number | boolean | null | undefined | MyElement;
export type MyChildren = MyNode | Array<MyNode | MyChildren>;
export type FC<P = {}> = (props: P & Props & {children?: any}, context?: any) => MyChildren;
const notNull = <T,>(value: T|null): value is T => {
return value !== null;
}
interface HTMLProps {
onClick?: (e: MouseEvent) => any
}
const updateHtmlElement = (element: HTMLElement, { onClick }: Props & HTMLProps): HTMLElement => {
if (onClick) {
element.onclick = onClick;
}
return element;
}
const createHtmlElement = (tag: string, props: Props & HTMLProps) => {
const element = document.createElement(tag);
return updateHtmlElement(element, props)
}
export const replaceChildren = (parent: Node, children: Node[]) => {
if (children.length > 0) {
children.forEach((child) => {
parent.appendChild(child);
});
while (parent.firstChild !== null && parent.firstChild !== children[0]) {
parent.removeChild(parent.firstChild);
}
}
return parent;
}
const isText = (value: any): value is Text => {
return value instanceof Node && value.nodeType === Node.TEXT_NODE;
}
class Container {
static instance: Container | null = null;
static getInstance() {
return this.instance as Container;
}
element: HTMLElement | null = null;
hookStates: any[] = [];
hookStateIndex = 0;
children: {[key: string]: Container|Text} = {};
constructor(
private prev: MyElement
) {}
debounceRender = debounce((...args) => {
return this.render(...args);
})
shouldRender(current: MyElement) {
const { prev } = this;
if (prev.type !== current.type) {
this.destroy();
}
return !isEqual(prev.props, current.props);
}
destroy() {
const { element } = this;
if (element !== null) {
element.remove();
this.element = null;
} else {
Object.keys(this.children).forEach((key) => {
this.destroyChild(key);
});
}
this.children = {};
}
destroyChild(key: string) {
if (key in this.children) {
const child = this.children[key];
if (isText(child)) {
child.remove();
} else {
child.destroy();
}
delete this.children[key];
}
}
getDirectNodes(): Node[] {
if (this.element !== null) {
return [this.element];
}
return Object.values(this.children).map((child) => {
return isText(child) ? child : child.getDirectNodes();
}).flat();
}
renderChildren(children: MyChildren[]): Node[] {
const newChildren: {[key: string]: Container|Text} = {};
const addChild = (key: string, child: Container|Text, destory: boolean) => {
newChildren[key] = child;
if (key in this.children) {
delete this.children[key];
}
if (destory) {
this.destroyChild(key);
}
return child;
}
const results = children.flat(Infinity)
.map((child: MyNode, i) => {
const key = `__learn_${i}`;
// ignore boolean, null, undefined
if (typeof child === 'string' || typeof child === 'number') {
const node = document.createTextNode('' + child);
addChild(key, node, true);
return node;
} else if (child && typeof child === 'object') {
if (child.key === null) {
child.key = key;
}
let container = this.children[child.key];
let current: MyElement | null = child;
if (!container || isText(container)) {
container = new Container(child);
current = null;
}
addChild(child.key, container, !current);
return container.render(current);
}
return null;
})
.flat()
.filter(notNull)
;
Object.keys(this.children).forEach((key) => {
this.destroyChild(key);
});
this.children = newChildren;
return results;
}
render(current?: MyElement | null) {
if (!current) {
current = this.prev;
} else if (!this.shouldRender(current)) {
return this.getDirectNodes();
}
const { type, props, children } = current;
if (typeof type === 'string') {
if (this.element === null) {
this.element = createHtmlElement(type, props);
} else {
this.element = updateHtmlElement(this.element, props);
}
return [replaceChildren(this.element, this.renderChildren(children))];
} else {
Container.instance = this;
this.hookStateIndex = 0;
const newChildren = [type({...props, children})];
return this.renderChildren(newChildren);
}
}
nextHookState<T>(initialState: T): [T, boolean] {
const isNew = this.hookStateIndex === this.hookStates.length;
if (isNew) {
this.hookStates.push(initialState);
}
const state = this.hookStates[this.hookStateIndex];
this.hookStateIndex += 1;
return [state, isNew];
}
useRef<T>(initial: T): {current: T} {
return this.nextHookState({current: initial})[0];
}
useState<T>(initial: T): [T, (value: T) => T] {
const [state] = this.nextHookState<{
value: T, setValue: null | ((value: T) => T)
}>({ value: initial, setValue: null });
if (state.setValue === null) {
state.setValue = (value: T) => {
state.value = value;
this.debounceRender();
return value;
};
}
return [state.value, state.setValue];
}
}
export default Container;
|
<filename>resources/assets/js/common/requests/UserRequest.js
import BaseModelRequest from '../lib/BaseModelRequest';
export default class UserRequest extends BaseModelRequest {
getModelName() {
return 'users'
}
login(params) {
let passport = {
grant_type: 'password',
client_id: process.env.MIX_CLIENT_ID,
client_secret: process.env.MIX_CLIENT_SECRET,
scope: '*'
};
params = { ...params, ...passport};
const url = '/oauth/token';
return this.post(url, params);
}
getCurrentUser(useCache = false, params) {
if(this.user && useCache) {
return new Promise((resolve) => {
resolve(this.user);
});
}
return new Promise((resolve, reject) => {
const url = '/user';
const self = this;
this.get(url, params).then((user) => {
self.user = user;
resolve(user);
})
.catch((error) => {
reject(error);
})
})
}
register(params) {
const url = '/register';
return this.post(url, params);
}
emailVerify(params) {
const url ='/email-verify';
return this.post(url, params);
}
otpVerify(params) {
const url = '/otp-verify';
return this.post(url, params);
}
disableOtp(params) {
const url = '/disable-otp';
return this.post(url, params);
}
getInformartion(params) {
const url ='/user-security-level';
return this.get(url, params);
}
generalQrCode(params) {
const url ='/general-qr-code';
return this.get(url, params);
}
}
|
<reponame>acouvreur/skeleton-generator
package org.sklsft.generator.repository.build;
public interface Command {
void execute();
}
|
<gh_stars>1-10
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
/**
* Animal schema for crud functions on DB
*/
let Animal = new Schema({
nombre: { type: String, required: 'El nombre es requerido' },
sexo: { type: String, required: 'El sexo es requerido'},
promedioEdad: { type: Number },
createDate: { type: Date, default: Date.now() }
});
module.exports = mongoose.model('Animales', Animal);
|
<filename>SciHive Source Code (Client)/www/games/candylandia/table.js
/* This script will construct a table and linked to index.html */
//DEFAULT_BOARD_SIZE = 8;
var img_array =[];
function lookupLetter(num) {
var letterArr = ["a","b","c","d","e","f","g","h"];
return letterArr[--num];
}
function colorTable() {
var tableInfo = "<div class='p-2 align-self-center' style='margin:-10px;'><table class='tableBorder'>";
for(var row = 0; row < DEFAULT_BOARD_SIZE; row++){
tableInfo += "<tr class='tableBorder'>";
for(var col = 0; col < DEFAULT_BOARD_SIZE; col++) {
var colName = lookupLetter(col+1);
var colorBack = board.getCandyAt(row, col);
var textColor = "white";
if( colorBack == "yellow") {
textColor = "#505050";
}
tableInfo += "<td class='tableBorder padCell text-center' style='background-color:" + colorBack +";color:"
+ textColor +"'>" + colName + (row+1) +"</td>";
// console.log(tableInfo);
// console.log(" ");
}
tableInfo += "</tr>";
// console.log(tableInfo);
}
tableInfo += "</table></div>";
return tableInfo;
}
// function load_img(imgToLoad) {
// var loaded = false;
// var counter = 0;
// for( var i=0; i<imgToLoad.length; i++){
// var img = new Image();
// console.log(imgToLoad.length);
// img.onload = function() {
// counter++;
// //console.log(imgToLoad[i]);
// console.log(counter);
// if(counter == imgToLoad.length){
// loaded = true;
// }
// }
// img.src = imgToLoad[i];
// console.log(img.src);
// img_array = img;
// }
// console.log(loaded);
// }
// function drawBoard() {
// load_img(['./graphics/blue-candy.png', './graphics/blue-special.png',
// './graphics/green-candy.png', './graphics/green-special.png',
// './graphics/orange-candy.png', './graphics/orange-special.png',
// './graphics/purple-candy.png', './graphics/purple-special.png',
// './graphics/red-candy.png', './graphics/red-special.png',
// './graphics/yellow-candy.png', './graphics/color-bomb.png']);
// var canvas = "<div class'p-2 align-self-center' style='margin:-10px;'> <canvas class='CanvasDraw'>";
// ctx = canvas.getContext('2d');
// for(var row = 0; row < DEFAULT_BOARD_SIZE; row++){
// for(var col = 0; col < DEFAULT_BOARD_SIZE; col++) {
// var colorBack = board.getCandyAt(row, col);
// console.log(colorBack);
// ctx.d
// }
// // console.log(tableInfo);
// }
// }
|
/*!
* Fundament framework v0.4.0
*
* https://getfundament.com
*
* @license MIT
* @author <NAME> and The Fundament Authors
*/
window.requestAnimationFrame=window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.msRequestAnimationFrame||window.oRequestAnimationFrame||function(n){setTimeout(n,0)};var Fm=function(n){function i(){return(Math.random().toString(16)+"000000000").substr(2,8)}function t(n,i,t){var e;return function(){var o=this,r=arguments,a=t&&!e,u=function(){e=null,t||n.apply(o,r)};clearTimeout(e),e=setTimeout(u,i),a&&n.apply(o,r)}}function e(n){if(void 0===a[n])for(var i=0;i<r.length;i++){var t=r[i]+n;void 0!==a[t]&&(n=t)}return n}function o(){var n={transition:"transitionend",OTransition:"otransitionend",MozTransition:"transitionend",WebkitTransition:"webkitTransitionEnd"};for(var i in n)if(void 0!==a[i])return n[i];return null}var r=["-webkit-","-moz-","-ms-","-o-"],a=n.createElement("div").style;return{createID:i,debounce:t,prefixProp:e,transitionEnd:o}}(document);
//# sourceMappingURL=core.js.map
|
# == Route Map (Updated 2014-03-19 09:47)
#
# Prefix Verb URI Pattern Controller#Action
# notifications GET /notifications(.:format) notifications#index
# notification GET /notifications/:id(.:format) notifications#show
# root GET / home#show
# GET|POST /auth/:provider/callback(.:format) sessions#create
# auth_failure GET /auth/failure(.:format) sessions#new {:error=>true}
# signout GET /signout(.:format) sessions#destroy
# session POST /session(.:format) sessions#create
# new_session GET /session/new(.:format) sessions#new
# DELETE /session(.:format) sessions#destroy
# edit_profile GET /profile/edit(.:format) profiles#edit
# profile PATCH /profile(.:format) profiles#update
# PUT /profile(.:format) profiles#update
# public_comments POST /public_comments(.:format) comments#create {:type=>"PublicComment"}
# internal_comments POST /internal_comments(.:format) comments#create {:type=>"InternalComment"}
# speaker DELETE /speakers/:id(.:format) speakers#destroy
# proposals GET /proposals(.:format) proposals#index
# events GET /events(.:format) events#index
# event GET /events/:slug(.:format) events#show
# event_proposals POST /events/:slug/proposals(.:format) proposals#create
# parse_edit_field_proposal GET /events/:slug/parse_edit_field(.:format) proposals#parse_edit_field
# confirm_proposal GET /events/:slug/proposals/:uuid/confirm(.:format) proposals#confirm
# set_confirmed_proposal POST /events/:slug/proposals/:uuid/set_confirmed(.:format) proposals#set_confirmed
# withdraw_proposal POST /events/:slug/proposals/:uuid/withdraw(.:format) proposals#withdraw
# proposal DELETE /events/:slug/proposals/:uuid(.:format) proposals#destroy
# GET /events/:slug/proposals(.:format) proposals#index
# POST /events/:slug/proposals(.:format) proposals#create
# new_proposal GET /events/:slug/proposals/new(.:format) proposals#new
# edit_proposal GET /events/:slug/proposals/:uuid/edit(.:format) proposals#edit
# GET /events/:slug/proposals/:uuid(.:format) proposals#show
# PATCH /events/:slug/proposals/:uuid(.:format) proposals#update
# PUT /events/:slug/proposals/:uuid(.:format) proposals#update
# DELETE /events/:slug/proposals/:uuid(.:format) proposals#destroy
# accept_invitation POST /invitations/:invitation_slug/accept(.:format) invitations#update
# refuse_invitation POST /invitations/:invitation_slug/refuse(.:format) invitations#update {:refuse=>true}
# resend_invitation POST /invitations/:invitation_slug/resend(.:format) invitations#resend
# invitations POST /invitations(.:format) invitations#create
# invitation GET /invitations/:invitation_slug(.:format) invitations#show
# DELETE /invitations/:invitation_slug(.:format) invitations#destroy
# admin_events POST /admin/events(.:format) admin/events#create
# new_admin_event GET /admin/events/new(.:format) admin/events#new
# admin_event DELETE /admin/events/:id(.:format) admin/events#destroy
# admin_people GET /admin/people(.:format) admin/people#index
# POST /admin/people(.:format) admin/people#create
# new_admin_person GET /admin/people/new(.:format) admin/people#new
# edit_admin_person GET /admin/people/:id/edit(.:format) admin/people#edit
# admin_person GET /admin/people/:id(.:format) admin/people#show
# PATCH /admin/people/:id(.:format) admin/people#update
# PUT /admin/people/:id(.:format) admin/people#update
# DELETE /admin/people/:id(.:format) admin/people#destroy
# organizer_event_program GET /organizer/events/:event_id/program(.:format) organizer/program#show
# organizer_event_participants POST /organizer/events/:event_id/participants(.:format) organizer/participants#create
# organizer_event_participant PATCH /organizer/events/:event_id/participants/:id(.:format) organizer/participants#update
# PUT /organizer/events/:event_id/participants/:id(.:format) organizer/participants#update
# DELETE /organizer/events/:event_id/participants/:id(.:format) organizer/participants#destroy
# organizer_event_proposal_finalize POST /organizer/events/:event_id/proposals/:proposal_uuid/finalize(.:format) organizer/proposals#finalize
# organizer_event_proposal_accept POST /organizer/events/:event_id/proposals/:proposal_uuid/accept(.:format) organizer/proposals#accept
# organizer_event_proposal_reject POST /organizer/events/:event_id/proposals/:proposal_uuid/reject(.:format) organizer/proposals#reject
# organizer_event_proposal_waitlist POST /organizer/events/:event_id/proposals/:proposal_uuid/waitlist(.:format) organizer/proposals#waitlist
# organizer_event_proposal_update_state POST /organizer/events/:event_id/proposals/:proposal_uuid/update_state(.:format) organizer/proposals#update_state
# organizer_event_proposals GET /organizer/events/:event_id/proposals(.:format) organizer/proposals#index
# edit_organizer_event_proposal GET /organizer/events/:event_id/proposals/:uuid/edit(.:format) organizer/proposals#edit
# organizer_event_proposal GET /organizer/events/:event_id/proposals/:uuid(.:format) organizer/proposals#show
# PATCH /organizer/events/:event_id/proposals/:uuid(.:format) organizer/proposals#update
# PUT /organizer/events/:event_id/proposals/:uuid(.:format) organizer/proposals#update
# DELETE /organizer/events/:event_id/proposals/:uuid(.:format) organizer/proposals#destroy
# organizer_event_speaker_emails GET /organizer/events/:event_id/speaker_emails(.:format) organizer/speakers#emails
# organizer_event_speakers GET /organizer/events/:event_id/speakers(.:format) organizer/speakers#index
# organizer_event_speaker GET /organizer/events/:event_id/speakers/:id(.:format) organizer/speakers#show
# edit_organizer_event GET /organizer/events/:id/edit(.:format) organizer/events#edit
# organizer_event GET /organizer/events/:id(.:format) organizer/events#show
# PATCH /organizer/events/:id(.:format) organizer/events#update
# PUT /organizer/events/:id(.:format) organizer/events#update
# organizer_autocomplete_email GET /organizer/autocomplete_email(.:format) organizer/participants#emails {:format=>:json}
# reviewer_event_proposal_ratings POST /reviewer/events/:event_id/proposals/:proposal_uuid/ratings(.:format) reviewer/ratings#create {:format=>:js}
# reviewer_event_proposal_rating PATCH /reviewer/events/:event_id/proposals/:proposal_uuid/ratings/:id(.:format) reviewer/ratings#update {:format=>:js}
# PUT /reviewer/events/:event_id/proposals/:proposal_uuid/ratings/:id(.:format) reviewer/ratings#update {:format=>:js}
# reviewer_event_proposals GET /reviewer/events/:event_id/proposals(.:format) reviewer/proposals#index
# reviewer_event_proposal GET /reviewer/events/:event_id/proposals/:uuid(.:format) reviewer/proposals#show
# PATCH /reviewer/events/:event_id/proposals/:uuid(.:format) reviewer/proposals#update
# PUT /reviewer/events/:event_id/proposals/:uuid(.:format) reviewer/proposals#update
# GET /404(.:format) errors#not_found
# GET /422(.:format) errors#unacceptable
# GET /500(.:format) errors#internal_error
#
CFPApp::Application.routes.draw do
resources :notifications, only: [ :index, :show ]
root 'home#show'
match '/auth/:provider/callback' => 'sessions#create', via: [:get, :post]
get '/auth/failure' => 'sessions#new', error: true
get '/signout' => 'sessions#destroy', as: :signout
resource :session, only: [:new, :create, :destroy]
get '/mentorship' => 'home#mentorship', as: :mentorship
resource :profile, only: [:edit, :update]
resource :public_comments, only: [:create], controller: :comments, type: 'PublicComment'
resource :internal_comments, only: [:create], controller: :comments, type: 'InternalComment'
resources :speakers, only: [:destroy]
resources :proposals, only: [:index]
resources :events, only: [:index]
scope '/events/:slug' do
get '/' => 'events#show', as: :event
post '/proposals' => 'proposals#create', as: :event_proposals
get 'parse_edit_field' => 'proposals#parse_edit_field',
as: :parse_edit_field_proposal
resources :proposals, param: :uuid do
member { get :confirm }
member { post :set_confirmed }
member { post :withdraw }
member { delete :destroy}
end
end
resources :participant_invitations, only: :show, param: :slug do
member do
get ":token/accept", action: :accept, as: :accept
get ":token/refuse", action: :refuse, as: :refuse
end
end
resources :invitations, only: [:show, :create, :destroy], param: :invitation_slug do
member do
post :accept, action: :update
post :refuse, action: :update, refuse: true
post :resend, action: :resend
end
end
namespace 'admin' do
resources :events, except: [:show, :index, :edit, :update]
resources :people
end
namespace 'organizer' do
resources :events, only: [:edit, :show, :update] do
resources :participant_invitations, except: [ :new, :edit, :update, :show ]
controller :program do
get 'program' => 'program#show'
end
resources :participants, only: [:create, :destroy, :update] do
collection { get :emails, defaults: { format: :json } }
end
resources :rooms, only: [:create, :update, :destroy]
resources :tracks, only: [:create, :destroy]
resources :sessions, except: :show
resources :proposals, param: :uuid do
post :finalize
post :update_state
end
controller :speakers do
get :speaker_emails, action: :emails
end
resources :speakers, only: [:index, :show]
end
end
namespace 'reviewer' do
resources :events, only: [] do
resources :proposals, only: [:index, :show, :update], param: :uuid do
resources :ratings, only: [:create, :update], defaults: { format: :js }
end
end
end
get "/404", :to => "errors#not_found"
get "/422", :to => "errors#unacceptable"
get "/500", :to => "errors#internal_error"
end
|
#!/bin/bash -eE
# (C) Sergey Tyurin 2020-03-15 13:00:00
# Disclaimer
##################################################################################################################
# You running this script/function means you will not blame the author(s)
# if this breaks your stuff. This script/function is provided AS IS without warranty of any kind.
# Author(s) disclaim all implied warranties including, without limitation,
# any implied warranties of merchantability or of fitness for a particular purpose.
# The entire risk arising out of the use or performance of the sample scripts and documentation remains with you.
# In no event shall author(s) be held liable for any damages whatsoever
# (including, without limitation, damages for loss of business profits, business interruption,
# loss of business information, or other pecuniary loss) arising out of the use of or inability
# to use the script or documentation. Neither this script/function,
# nor any part of it other than those parts that are explicitly copied from others,
# may be republished without author(s) express written permission.
# Author(s) retain the right to alter this disclaimer at any time.
##################################################################################################################
SCRIPT_DIR=`cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P`
source "${SCRIPT_DIR}/env.sh"
source "${SCRIPT_DIR}/functions.shinc"
echo
echo "################################# service setup script ###################################"
echo "+++INFO: $(basename "$0") BEGIN $(date +%s) / $(date)"
# set log verbocity for Cnode.
# To increase log verbosity run this script as `./setup_as_service.sh <verbocity level>`` and
# restart the sevice
verb="${1:-1}"
OS_SYSTEM=`uname -s`
if [[ "$OS_SYSTEM" == "Linux" ]];then
SETUP_USER="$(id -u)"
SETUP_GROUP="$(id -g)"
else
SETUP_USER="$(id -un)"
SETUP_GROUP="$(id -gn)"
fi
Net_Name="${NETWORK_TYPE%%.*}"
if [[ "${OS_SYSTEM}" == "Linux" ]];then
V_CPU=`nproc`
########################################################################
############## Node Services for Linux (Ubuntu & CentOS) ###############
USE_THREADS=$((V_CPU - 2))
SERVICE_FILE="/etc/systemd/system/tonnode.service"
if [[ "$NODE_TYPE" == "RUST" ]]; then
#=====================================================
# Rust node on Linux ##
SVC_FILE_CONTENTS=$(cat <<-_ENDCNT_
[Unit]
Description=TON Validator RUST Node
After=network.target
StartLimitIntervalSec=0
[Service]
Type=simple
Restart=always
RestartSec=1
User=$USER
LimitNOFILE=2048000
ExecStart=$CALL_RN
[Install]
WantedBy=multi-user.target
_ENDCNT_
)
else # node type select
#=====================================================
# C++ node on Linux
SVC_FILE_CONTENTS=$(cat <<-_ENDCNT_
[Unit]
Description=TON Validator C++ Node
After=network.target
StartLimitIntervalSec=0
[Service]
Type=simple
Restart=always
RestartSec=1
User=$USER
LimitNOFILE=2048000
ExecStart=/bin/bash -c "exec $CALL_VE -v $verb -t $USE_THREADS ${ENGINE_ADDITIONAL_PARAMS} -C ${TON_WORK_DIR}/etc/ton-global.config.json --db ${TON_WORK_DIR}/db >> ${TON_LOG_DIR}/${CNODE_LOG_FILE} 2>&1"
[Install]
WantedBy=multi-user.target
_ENDCNT_
)
fi
echo "${SVC_FILE_CONTENTS}" > ${SCRIPT_DIR}/tmp.txt
sudo mv -f ${SCRIPT_DIR}/tmp.txt ${SERVICE_FILE}
sudo chown root:root ${SERVICE_FILE}
sudo chmod 644 ${SERVICE_FILE}
[[ "$(hostnamectl |grep 'Operating System'|awk '{print $3}')" == "CentOS" ]] && sudo chcon system_u:object_r:etc_t:s0 ${SERVICE_FILE}
sudo systemctl daemon-reload
sudo systemctl enable tonnode
echo
echo -e "To start node service run ${BoldText}${GreeBack}sudo service tonnode start${NormText}"
echo "To restart updated node or service - run all follow commands:"
echo
echo "sudo systemctl disable tonnode"
echo "sudo systemctl daemon-reload"
echo "sudo systemctl enable tonnode"
echo "sudo service tonnode restart"
# ************************************************************
# ************** Setup watchdog service **********************
# SERVICE_FILE="/etc/systemd/system/nodewd.service"
# SVC_FILE_CONTENTS=$(cat <<-_ENDCNT_
# [Unit]
# Description=TON Validator watchdog for node
# After=network.target
# StartLimitIntervalSec=0
# [Service]
# Type=simple
# PIDFile=${TON_LOG_DIR}/nodewd.pid
# Restart=always
# RestartSec=3
# User=$USER
# Group=$SETUP_GROUP
# LimitNOFILE=2048000
# Environment="HOME=$HOME"
# WorkingDirectory=${SCRIPT_DIR}
# ExecStart=/bin/bash -c "exec script --return --quiet --append --command \"${SCRIPT_DIR}/watchdog.sh 2>&1 >> ${TON_LOG_DIR}/time_diff.log\""
# [Install]
# WantedBy=multi-user.target
# _ENDCNT_
# )
# echo "${SVC_FILE_CONTENTS}" > ${SCRIPT_DIR}/tmp.txt
# sudo mv -f ${SCRIPT_DIR}/tmp.txt ${SERVICE_FILE}
# sudo chown root:root ${SERVICE_FILE}
# sudo chmod 644 ${SERVICE_FILE}
# [[ "$(hostnamectl |grep 'Operating System'|awk '{print $3}')" == "CentOS" ]] && sudo chcon system_u:object_r:etc_t:s0 ${SERVICE_FILE}
# sudo systemctl daemon-reload
# sudo systemctl enable nodewd
# echo
# echo -e "To start WATCHDOG service run ${BoldText}${GreeBack}sudo service nodewd start${NormText}"
# echo "To restart updated node or service - run all follow commands:"
# echo
# echo "sudo systemctl disable nodewd"
# echo "sudo systemctl daemon-reload"
# echo "sudo systemctl enable nodewd"
# echo "sudo service nodewd restart"
else # -------------------- OS select
# Next for FreeBSD
########################################################################
############## FreeBSD rc daemon ########################################
echo "---INFO: Setup rc daemon..."
V_CPU=`sysctl -n hw.ncpu`
USE_THREADS=$((V_CPU - 2))
SERVICE_FILE="/usr/local/etc/rc.d/tonnode"
cp -f ${CONFIGS_DIR}/FB_service.tmplt ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_LOG_DIR%${NODE_LOGS_ARCH}%" ${SCRIPT_DIR}/tmp.txt
if [[ "$NODE_TYPE" == "RUST" ]]; then
# =====================================================
# Rust node
pidfile="$NODE_LOGS_ARCH/daemon.pid"
pidfile_child="$NODE_LOGS_ARCH/${name}.pid"
logfile="$NODE_LOGS_ARCH/${name}.log"
echo "Setup FreeBSD daemon for RNODE"
sed -i.bak "s%N_SERVICE_DESCRIPTION%Free TON RUST Node Daemon%" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_USER%${USER}%g" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_NODE_LOGS_ARCH%${NODE_LOGS_ARCH}%g" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_NODE_LOG_FILE%${R_LOG_DIR}/${RNODE_LOG_FILE}%g" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_COMMAND%$CALL_RN%" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_ARGUMENTS% %" ${SCRIPT_DIR}/tmp.txt
else # -------------------- node type select
# =====================================================
# C++ node
echo "Setup FreeBSD daemon for CNODE"
sed -i.bak "s%N_SERVICE_DESCRIPTION%Free TON C++ Node Daemon%" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_USER%${USER}%" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_NODE_LOGS_ARCH%${NODE_LOGS_ARCH}%g" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_NODE_LOG_FILE%${TON_LOG_DIR}/${CNODE_LOG_FILE}%g" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_COMMAND%$CALL_VE%" ${SCRIPT_DIR}/tmp.txt
sed -i.bak "s%N_ARGUMENTS%-v $verb -t $USE_THREADS ${ENGINE_ADDITIONAL_PARAMS} -C ${TON_WORK_DIR}/etc/ton-global.config.json --db ${TON_WORK_DIR}/db >> ${TON_LOG_DIR}/${CNODE_LOG_FILE}%" ${SCRIPT_DIR}/tmp.txt
fi # -------------------- node type select
########################################################################
sudo mv -f ${SCRIPT_DIR}/tmp.txt ${SERVICE_FILE}
sudo chown root:wheel ${SERVICE_FILE}
sudo chmod 755 ${SERVICE_FILE}
[[ -z "$(cat /etc/rc.conf | grep 'tonnode_enable')" ]] && sudo sh -c "echo ' ' >> /etc/rc.conf; echo 'tonnode_enable="YES"' >> /etc/rc.conf"
ls -al ${SERVICE_FILE}
echo -e "To start node service run ${BoldText}${GreeBack}'service tonnode start'${NormText}"
echo "To restart updated node or service run 'service tonnode restart'"
echo
# ************************************************************
# ************** Setup watchdog service **********************
# echo "Setup FreeBSD daemon for CNODE"
# SERVICE_FILE="/usr/local/etc/rc.d/nodewd"
# cp -f ${CONFIGS_DIR}/FB_service.tmplt ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%tonnode%nodewd%g" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_LOG_DIR%${NODE_LOGS_ARCH}%" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_SERVICE_DESCRIPTION%Free TON Node WatchDog Daemon%" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_USER%${USER}%" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_NODE_LOGS_ARCH%${NODE_LOGS_ARCH}%g" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_NODE_LOG_FILE%${TON_LOG_DIR}/${CNODE_LOG_FILE}%g" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_COMMAND%cd%" ${SCRIPT_DIR}/tmp.txt
# sed -i.bak "s%N_ARGUMENTS%${SCRIPT_DIR} && ${SCRIPT_DIR}/watchdog.sh%" ${SCRIPT_DIR}/tmp.txt
# sudo mv -f ${SCRIPT_DIR}/tmp.txt ${SERVICE_FILE}
# sudo chown root:wheel ${SERVICE_FILE}
# sudo chmod 755 ${SERVICE_FILE}
# [[ -z "$(cat /etc/rc.conf | grep 'nodewd_enable')" ]] && sudo sh -c "echo ' ' >> /etc/rc.conf; echo 'nodewd_enable="YES"' >> /etc/rc.conf"
# ls -al ${SERVICE_FILE}
# echo -e "To start node watchdog service run ${BoldText}${GreeBack}'service nodewd start'${NormText}"
# echo "To restart updated node or service run 'service nodewd restart'"
# echo
echo "---INFO: rc daemon setup DONE!"
fi # ############################## OS select
echo
echo "+++INFO: $(basename "$0") FINISHED $(date +%s) / $(date)"
echo "================================================================================================"
exit 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.