text stringlengths 1 1.05M |
|---|
import { p, readLines, sum } from "./util/util";
const lines = readLines("input/a06.txt");
const startAges = lines[0].split(",").map((n) => parseInt(n));
function afterGenerations(startAges: number[], generations: number): number {
// the index is the age and the value is the count
let state: number[] = [];
for (const age of startAges) {
state[age] = (state[age] || 0) + 1;
}
for (let i = 0; i < generations; i++) {
let newState: number[] = [];
state.forEach((count, age) => {
let nextAge = age - 1;
if (age === 0) {
newState[8] = count;
nextAge = 6;
}
newState[nextAge] = (newState[nextAge] || 0) + count;
});
state = newState;
}
return sum(state);
}
p(afterGenerations(startAges, 80));
p(afterGenerations(startAges, 256));
|
<gh_stars>1-10
(function(cornerstoneTools) {
'use strict';
var keys = {
UP: 38,
DOWN: 40
};
function keyDownCallback(e, eventData) {
var keyCode = eventData.keyCode;
if (keyCode !== keys.UP && keyCode !== keys.DOWN) {
return;
}
var images = 1;
if (keyCode === keys.DOWN) {
images = -1;
}
cornerstoneTools.scroll(eventData.element, images);
}
// module/private exports
cornerstoneTools.stackScrollKeyboard = cornerstoneTools.keyboardTool(keyDownCallback);
})(cornerstoneTools);
|
class Node {
int data;
Node next;
public Node(int data) {
this.data = data;
this.next = null;
}
}
public class LinkedList {
// head of list
Node head;
/* Linked list Node*/
public LinkedList() {
head = null;
}
// Method to insert a new node
public void insert(int data) {
// Create a new node with given data
Node new_node = new Node(data);
new_node.next = null;
// If the Linked List is empty,
// then make the new node as head
if (head == null) {
head = new_node;
}
else {
// Else traverse till the last node
// and insert the new_node there
Node last = head;
while (last.next != null) {
last = last.next;
}
// Insert the new_node at last node
last.next = new_node;
}
}
// Method to print the LinkedList.
public void printList()
{
Node currNode = head;
System.out.print("LinkedList: ");
// Traverse through the LinkedList
while (currNode != null) {
// Print the data at current node
System.out.print(currNode.data + " ");
// Go to next node
currNode = currNode.next;
}
}
public static void main(String[] args)
{
/* Start with the empty list. */
LinkedList llist = new LinkedList();
// Insert the values
llist.insert(1);
llist.insert(2);
llist.insert(3);
llist.insert(4);
// Print the LinkedList
llist.printList();
}
} |
./cloud_sql_proxy -instances=nestjs-auth-251708:us-central1:auth-db=tcp:5432 -credential_file=key.json |
#!/bin/bash
[[ -f /usr/local/finisher/settings.conf ]] && . /usr/local/finisher/settings.conf
MUK_DIR=${MUK_DIR:-"/opt/modify_ubuntu_kit"}
[[ ! -e ${MUK_DIR}/files/includes.sh ]] && (echo Missing includes file! Aborting!; exit 1)
. ${MUK_DIR}/files/includes.sh
# No parameter specified? Or maybe help requested?
if [[ "$1" == "--help" || "$1" == "-h" ]]; then
echo -e "${RED}Purpose:${NC} Builds the NUC LED kernel module for your computer."
echo ""
exit 0
fi
#==============================================================================
_title "Install support software to control front LED panel..."
# Source: https://nucblog.net/2017/05/linux-kernel-driver-for-nuc-led-control/
#==============================================================================
# First: Install the software (duh... :p)
#==============================================================================
wget https://github.com/douglasorend/intel_nuc_led/releases/download/v2.0/intel-nuc-led-dkms_2.0_all.deb -O /tmp/intel-nuc-led-dkms_2.0_all.deb
apt install -y /tmp/intel-nuc-led-dkms_2.0_all.deb
rm /tmp/intel-nuc-led-dkms_2.0_all.deb
# Second: Make sure the WMI and the NUC LED modules get loaded:
#==============================================================================
cat << EOF > /etc/modules-load.d/nuc_led.conf
# Intel NUC LED kernel driver
wmi
nuc_led
EOF
chown root:root /etc/modules-load.d/nuc_led.conf
# Third: Configure the kernel module
#==============================================================================
echo "options nuc_led nuc_led_perms=0664 nuc_led_gid=100 nuc_led_uid=1000" > /etc/modprobe.d/nuc_led.conf
chown root:root /etc/modprobe.d/nuc_led.conf
# Fourth: Pull the "service.recording-led-for-nuc" addon:
#==============================================================================
KODI_NAME="service.recording-led-for-nuc"
### First: Get the repo:
[[ ! -d ${KODI_OPT} ]] && mkdir -p ${KODI_OPT}
git clone --depth=1 https://github.com/xptsp/${KODI_NAME} ${KODI_OPT}/${KODI_NAME}
### Second: Link the repo:
[[ ! -d ${KODI_ADD} ]] && mkdir -p ${KODI_ADD}
ln -sf ${KODI_OPT}/${KODI_NAME} ${KODI_ADD}/${KODI_NAME}
### Third: Create default addon data:
KODI_DATA=$(dirname ${KODI_ADD})
7z x -aoa ${MUK_DIR}/files/kodi_userdata.7z addon_data/service.recording-led -O${KODI_DATA}/
### Fourth: Enable all these addons:
kodi_enable service.recording-led
|
package de.lmu.cis.ocrd.ml.features;
import com.google.gson.JsonObject;
import de.lmu.cis.ocrd.ml.OCRToken;
import de.lmu.cis.ocrd.util.JSON;
import java.util.ArrayList;
import java.util.List;
public class TokenCaseClassFeature extends NamedStringSetFeature {
private static final long serialVersionUID = 6185953194478613291L;
private static final String LOWER = "all-lower-case";
private static final String UPPER = "all-upper-case";
private static final String TITLE = "title-case";
private static final String MIXED = "mixed-case";
private static final List<String> CLASSES = new ArrayList<>();
static {
CLASSES.add(LOWER);
CLASSES.add(UPPER);
CLASSES.add(TITLE);
CLASSES.add(MIXED);
}
public TokenCaseClassFeature(JsonObject o, ArgumentFactory args) {
this(JSON.mustGetNameOrType(o));
}
public TokenCaseClassFeature(String name) {
super(name, CLASSES);
}
@Override
public boolean handlesOCR(int i, int n) {
return handlesOnlyMasterOCR(i, n);
}
@Override
public Object calculate(OCRToken token, int i, int n) {
return getCaseClass(getWord(token, i, n).toString());
}
protected String getCaseClass(String str) {
boolean allLowerCase = true;
boolean allUpperCase = true;
boolean firstUpperCase = false;
boolean first = true;
for (int c : str.codePoints().toArray()) {
final int type = Character.getType(c);
if (type == Character.UPPERCASE_LETTER) {
firstUpperCase = first;
allLowerCase = false;
} else if (type == Character.LOWERCASE_LETTER) {
allUpperCase = false;
} else {
allUpperCase = false;
allLowerCase = false;
}
first = false;
}
if (allLowerCase) {
return LOWER;
}
if (allUpperCase) {
return UPPER;
}
if (firstUpperCase) {
return TITLE;
}
return MIXED;
}
}
|
sudo dpkg -i ./build/wireguird_amd64.deb
|
#!/bin/sh
auto/testAutoMake.sh > TestMakefile.am
autoreconf -i |
<reponame>minuk8932/Algorithm_BaekJoon
package dynamic_programming;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 17404번: RGB거리 2
*
* @see https://www.acmicpc.net/problem/17404
*
*/
public class Boj17404 {
private static int[][][] dp;
private static int[][] houses;
private static int N;
private static final int INF = 10_000_000;
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
N = Integer.parseInt(br.readLine());
houses = new int[N + 1][3];
for(int i = 1; i <= N; i++) {
StringTokenizer st = new StringTokenizer(br.readLine());
houses[i][0] = Integer.parseInt(st.nextToken());
houses[i][1] = Integer.parseInt(st.nextToken());
houses[i][2] = Integer.parseInt(st.nextToken());
}
dp = new int[3][N + 1][3];
int min = INF;
for(int i = 0; i < dp.length; i++) {
for(int j = 0; j < dp[i].length; j ++) {
dp[i][j][0] = dp[i][j][1] = dp[i][j][2] = -1;
}
}
for(int i = 0; i < 3; i++) {
min = Math.min(recursion(i, i, 1), min);
}
System.out.println(min);
}
/**
*
* Recursion
*
* line 68: find other colors
*
* @param first: first index color
* @param color
* @param current
* @return
*/
private static int recursion(int first, int color, int current) {
if(current == N) return first == color ? INF: houses[current][color];
if(dp[first][current][color] != -1) return dp[first][current][color];
int result = INF;
for(int i = 0; i < 3; i++) {
if(color == i) continue;
result = Math.min(result, recursion(first, i, current + 1));
}
return dp[first][current][color] = result + houses[current][color];
}
}
|
#!/bin/sh
# (C) Copyright 2005- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
#
# In applying this licence, ECMWF does not waive the privileges and immunities granted to it by
# virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction.
#
. ./include.sh
REDIRECT=/dev/null
files="regular_latlon_surface.grib2 \
regular_latlon_surface.grib1"
for file in $files; do
infile=${data_dir}/$file
outfile1=${infile}_decimalPrecision_1
outfile2=${infile}_decimalPrecision_2
${tools_dir}/grib_set -r -s decimalScaleFactor=1,bitsPerValue=0 $infile $outfile1
${tools_dir}/grib_compare -P -c data:n $infile $outfile1 > $REDIRECT
${tools_dir}/grib_set -s changeDecimalPrecision=1 $infile $outfile2
${tools_dir}/grib_compare -P -c data:n $infile $outfile2 > $REDIRECT
${tools_dir}/grib_compare $outfile1 $outfile2
rm -f $outfile1 $outfile2
done
# ECC-458: spectral_complex packing
temp=temp.grib_decimalPrecision.grib
infile=${data_dir}/spectral_complex.grib1
# Catch errors re negative values
export ECCODES_FAIL_IF_LOG_MESSAGE=1
${tools_dir}/grib_set -r -s decimalScaleFactor=0 $infile $temp
${tools_dir}/grib_set -r -s decimalScaleFactor=1 $infile $temp
rm -f $temp
|
<reponame>bink81/java-experiments<filename>src/main/java/utils/CommonCode.java
package utils;
import java.util.function.BiConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CommonCode {
private static final Logger logger = LoggerFactory.getLogger(CommonCode.class);
@FunctionalInterface
public interface CheckedRunnable {
void run() throws Throwable;
}
/**
* This method helps to skip catch clauses that can be ignored
*/
public static void ignoreException(CheckedRunnable r) {
try {
r.run();
} catch (Throwable e) {
logger.info("Exception caught: {} - {}", e.getClass().getSimpleName(), e.getMessage());
logger.debug("Exception stack: {}", e);
}
}
public static void doubleForLoop(int lengthI, int lengthJ, BiConsumer<Integer, Integer> consumer) {
for (int i = 0; i < lengthI; i++) {
for (int k = 0; k < lengthJ; k++) {
consumer.accept(i, k);
}
}
}
}
|
package manager
import (
"context"
"fmt"
"log"
"net"
"strings"
"time"
rpc "github.com/r2-studio/robotmon-desktop/simple-manager/manager/apprpc"
"github.com/r2-studio/robotmon-desktop/simple-manager/manager/proxy"
"google.golang.org/grpc"
)
// AppService app service struct
type AppService struct {
adbClient *AdbClient
// grpcAddress -> httpBindingAddress
proxyTable map[string]string
}
// NewAppService new app service
func NewAppService(adbClient *AdbClient) *AppService {
a := &AppService{
adbClient: adbClient,
proxyTable: make(map[string]string),
}
a.Init()
return a
}
// Init init app service
func (a *AppService) Init() {
opt1 := grpc.MaxRecvMsgSize(64 * 1024 * 1024)
opt2 := grpc.MaxSendMsgSize(64 * 1024 * 1024)
grpcServer := grpc.NewServer(opt1, opt2)
rpc.RegisterAppServiceServer(grpcServer, a)
lis, err := net.Listen("tcp", ":9487")
if err != nil {
log.Fatalf("failed to listen: %v", err)
}
fmt.Println("Listen :9487 for gRPC")
go grpcServer.Serve(lis)
time.Sleep(time.Second)
fmt.Println("Listen :9488 for http proxy")
go proxy.RunProxy("0.0.0.0:9488", "localhost:9487")
}
// GetDevices get devices
func (a *AppService) GetDevices(context.Context, *rpc.Empty) (*rpc.Devices, error) {
forwardResult := adbClient.ForwardList()
forwards := strings.Split(forwardResult, "\n")
serials, err := adbClient.GetDevices()
if err != nil {
return nil, err
}
devices := make([]*rpc.Device, 0, len(serials))
for _, serial := range serials {
// find forward port
forward := ""
for _, f := range forwards {
if strings.Contains(f, serial) && strings.Contains(f, "tcp:8081") {
forward = f
break
}
}
// find ip
ip := adbClient.GetIPAddress(serial)
launched := false
pid1 := ""
pid2 := ""
pids, err := adbClient.GetPids(serial, "app_process")
if err == nil {
if len(pids) > 0 {
pid1 = pids[0]
}
if len(pids) > 1 {
pid2 = pids[1]
launched = true
}
}
device := &rpc.Device{
Serial: serial,
ServiceIp: ip,
ServicePort: "8081",
ServicePid1: pid1,
ServicePid2: pid2,
ServiceForward: forward,
ServiceLaunched: launched,
}
devices = append(devices, device)
}
result := &rpc.Devices{
Devices: devices,
}
return result, nil
}
// GetStartCommand get devices
func (a *AppService) GetStartCommand(ctx context.Context, req *rpc.DeviceSerial) (*rpc.GetStartCommandResult, error) {
_, details, err := adbClient.GetRobotmonStartCommand(req.Serial)
if err != nil {
return nil, err
}
if len(details) != 5 {
return nil, fmt.Errorf("Get start command failed: %v", details)
}
return &rpc.GetStartCommandResult{
LdPath: details[0],
ClassPath: details[1],
AppProcess: details[2],
BaseCommand: details[3],
FullCommand: details[4],
}, nil
}
// AdbRestart call adb kill-server then adb start-server
func (a *AppService) AdbRestart(context.Context, *rpc.Empty) (*rpc.Empty, error) {
adbClient.Restart()
return &rpc.Empty{}, nil
}
// AdbConnect call adb connect
func (a *AppService) AdbConnect(ctx context.Context, req *rpc.AdbConnectParams) (*rpc.Message, error) {
result, err := adbClient.Connect(req.Ip, req.Port)
if err != nil {
return nil, err
}
return &rpc.Message{Message: result}, nil
}
// AdbShell call adb shell
func (a *AppService) AdbShell(ctx context.Context, req *rpc.AdbShellParams) (*rpc.Message, error) {
result, err := adbClient.Shell(req.Serial, req.Command)
if err != nil {
return nil, err
}
return &rpc.Message{Message: result}, nil
}
// AdbForward call adb shell
func (a *AppService) AdbForward(ctx context.Context, req *rpc.AdbForwardParams) (*rpc.Message, error) {
result, err := adbClient.Forward(req.Serial, req.DevicePort, req.PcPort)
if err != nil {
return nil, err
}
if result {
return &rpc.Message{Message: fmt.Sprintf("Forward success %s -> %s", req.DevicePort, req.PcPort)}, nil
}
return &rpc.Message{Message: "Forward failed"}, nil
}
// AdbForwardList call adb shell
func (a *AppService) AdbForwardList(context.Context, *rpc.Empty) (*rpc.Message, error) {
result := adbClient.ForwardList()
return &rpc.Message{Message: result}, nil
}
// AdbTCPIP call adb shell
func (a *AppService) AdbTCPIP(ctx context.Context, req *rpc.AdbTCPIPParams) (*rpc.Message, error) {
err := adbClient.TCPIP(req.Serial, req.Port)
if err != nil {
return nil, err
}
return &rpc.Message{Message: fmt.Sprintf("Forward success port: %s", req.Port)}, nil
}
// StartService get devices
func (a *AppService) StartService(ctx context.Context, req *rpc.DeviceSerial) (*rpc.StartServiceResult, error) {
pids, err := adbClient.StartRobotmonService(req.Serial)
if pids == nil || err != nil {
return nil, err
}
var pid1, pid2 string
if len(pids) > 0 {
pid1 = pids[0]
}
if len(pids) > 1 {
pid2 = pids[1]
}
return &rpc.StartServiceResult{
Pid1: pid1,
Pid2: pid2,
}, nil
}
// StopService get devices
func (a *AppService) StopService(ctx context.Context, req *rpc.DeviceSerial) (*rpc.Message, error) {
err := adbClient.StopService(req.Serial)
if err != nil {
return nil, err
}
return &rpc.Message{Message: "StopService success"}, nil
}
// CreateProxy get devices
func (a *AppService) CreateProxy(ctx context.Context, req *rpc.CreateGRPCProxy) (*rpc.Message, error) {
httpAddress := req.HttpAddress
grpcAddress := req.GrpcAddress
_, ok := a.proxyTable[httpAddress]
if ok {
return &rpc.Message{Message: "Proxy already created"}, nil
}
a.proxyTable[httpAddress] = grpcAddress
go func() {
proxy.RunProxy(req.HttpAddress, req.GrpcAddress)
delete(a.proxyTable, httpAddress)
}()
time.Sleep(time.Second)
return &rpc.Message{Message: "Proxy server created"}, nil
}
|
<gh_stars>10-100
export declare class DateWithZone {
date: Date;
tzid?: string | null;
constructor(date: Date, tzid?: string | null);
private get isUTC();
toString(): string;
getTime(): number;
rezonedDate(): Date;
}
//# sourceMappingURL=datewithzone.d.ts.map |
git add .
git commit -am "Deploy in Heroku"
git push heroku master -f
## If deploy to heroku have to comment code in 'serv.connect.js'
##!! git push heroku master says “Everything up-to-date”, but the app is not current !!!!
## see https://stackoverflow.com/questions/21947406/git-push-heroku-master-says-everything-up-to-date-but-the-app-is-not-current
## 1.
## Kindly confirm your current branch is master.
# git branch
##If the pointer is not pointing the master, then check out to master branch
# git checkout master
##Commit your changes and try to push to heroku
## git commit -am "xxxyyzzz"
## git push heroku master
## OR 2.
#git checkout master
#git pull
#sh deploy.sh
## OR 3.
# git push -f origin master
# git push -f heroku master
|
#!/bin/bash -e
################################################################################
## File: miniconda.sh
## Desc: Installs miniconda
################################################################################
# Install Miniconda
curl -sL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o miniconda.sh \
&& chmod +x miniconda.sh \
&& ./miniconda.sh -b -p /usr/share/miniconda \
&& rm miniconda.sh
CONDA=/usr/share/miniconda
echo "CONDA=$CONDA" | tee -a /etc/environment
ln -s $CONDA/bin/conda /usr/bin/conda
invoke_tests "Tools" "Conda" |
const express = require("express");
const postRouter = require("./posts/postRouter");
const commentRouter = require("./comments/commentsRouter");
const server = express();
server.use(express.json());
server.use("/api/posts", postRouter);
server.use("/api/posts", commentRouter);
server.listen(8000, () => console.log("API running on port 8000"));
|
import pygame
class Tile(pygame.sprite.Sprite):
def __init__(self,pos,size):
super().__init__()
self.image = pygame.Surface((size,size))
self.image.fill('grey')
self.rect = self.image.get_rect(topleft = pos)
def update(self,x_shift):
self.rect.x += x_shift |
#!/usr/bin/env bash
export target="$1"
export branch="$2"
basedir="$(pwd -P)"
git="git -c commit.gpgsign=false"
apply="$git am --3way --ignore-whitespace"
windows="$([[ "$OSTYPE" == "cygwin" || "$OSTYPE" == "msys" ]] && echo "true" || echo "false")"
mkdir -p "$basedir/$target-Patches"
echo "Resetting $target..."
mkdir -p "$basedir/$target"
cd "$basedir/$target" || exit 1
$git init
$git remote rm upstream > /dev/null 2>&1
$git remote add upstream "$basedir/work/$target" >/dev/null 2>&1
$git checkout "$branch" 2>/dev/null || $git checkout -b "$branch"
$git fetch upstream >/dev/null 2>&1
$git reset --hard "upstream/$branch"
cd "$basedir/$target" || exit 1
echo " Applying patches to $target..."
$git am --abort >/dev/null 2>&1
if [[ $windows == "true" ]]; then
echo " Using workaround for Windows ARG_MAX constraint"
find "$basedir/$target-Patches/"*.patch -print0 | xargs -0 $apply
else
$apply "$basedir/$target-Patches/"*.patch
fi
if [ "$?" != "0" ]; then
echo " Something did not apply cleanly to $target."
echo " Please review above details and finish the apply then"
echo " save the changes with rebuildPatches.sh"
if [[ $windows == "true" ]]; then
echo ""
echo " Because you're on Windows you'll need to finish the AM,"
echo " rebuild all patches, and then re-run the patch apply again."
echo " Consider using the scripts with Windows Subsystem for Linux."
fi
exit 1
else
echo " Patches applied cleanly to $target"
fi
|
<filename>src/BQ35100.h<gh_stars>0
#ifndef BATTERY_GAUGE_BQ35100_H
#define BATTERY_GAUGE_BQ35100_H
#include <Wire.h>
#include <stdint.h>
#include "BQ35100_registers.h"
/**
* BQ35100 battery status.
*/
typedef struct {
bool isDischargeDetected;
bool isAlertActive;
} battery_status_t;
/**
* BQ35100 battery alert status enumeration
*/
enum battery_alert_t {
BQ35100_BATLOW = 0,
BQ35100_TEMPLOW,
BQ35100_TEMPHIGH,
BQ35100_SOH_LOW,
BQ35100_EOS,
BQ35100_G_DONE,
BQ35100_INITCOMP
};
/**
* BQ35100 class for measuring battery
*/
class BQ35100 {
public:
BQ35100();
~BQ35100();
bool begin(uint8_t address = 0x55);
uint16_t readVoltage();
battery_status_t readBatteryStatus();
int16_t readCurrent();
private:
uint8_t i2cAddress;
};
#endif |
#!/usr/bin/env bash
set -ex
REPO_DIR=$PWD
SCRIPT_DIR="$( cd "$(dirname "$0")" ; pwd -P )"
LIBVCX_DIR="${REPO_DIR}/libvcx"
BUILD_TYPE="--release"
export ANDROID_BUILD_FOLDER="/tmp/android_build"
TARGET_ARCH=$1
if [ -z "${TARGET_ARCH}" ]; then
echo STDERR "Missing TARGET_ARCH argument"
echo STDERR "expecting one of: x86, x86_64, arm, arm7"
exit 1
fi
source ${SCRIPT_DIR}/setup.android.env.sh
generate_arch_flags ${TARGET_ARCH}
setup_dependencies_env_vars ${ABSOLUTE_ARCH}
if [ -z "${INDY_DIR}" ] ; then
INDY_DIR="libindy_${TARGET_ARCH}"
if [ -d "${INDY_DIR}" ] ; then
echo "Found ${INDY_DIR}"
elif [ -n "$2" ] ; then
INDY_DIR=$2
else
echo STDERR "Missing INDY_DIR argument and environment variable"
echo STDERR "e.g. set INDY_DIR=<path> for environment or libindy_${TARGET_ARCH}"
exit 1
fi
fi
echo ">> in runner script"
declare -a EXE_ARRAY
build_test_artifacts(){
pushd ${LIBVCX_DIR}
cargo clean
SET_OF_TESTS=''
RUSTFLAGS="-L${TOOLCHAIN_DIR}/sysroot/usr/${TOOLCHAIN_SYSROOT_LIB} -lc -lz -L${LIBZMQ_LIB_DIR} -L${SODIUM_LIB_DIR} -L${INDY_LIB_DIR} -lsodium -lzmq -lc++_shared -lindy" \
LIBINDY_DIR=${INDY_LIB_DIR} \
cargo build ${BUILD_TYPE} --target=${TRIPLET}
# This is needed to get the correct message if test are not built. Next call will just reuse old results and parse the response.
RUSTFLAGS="-L${TOOLCHAIN_DIR}/sysroot/usr/${TOOLCHAIN_SYSROOT_LIB} -L${LIBZMQ_LIB_DIR} -L${SODIUM_LIB_DIR} -L${INDY_LIB_DIR} -L${OPENSSL_DIR} -lsodium -lzmq -lc++_shared -lindy" \
LIBINDY_DIR=${INDY_LIB_DIR} \
cargo test ${BUILD_TYPE} --target=${TRIPLET} ${SET_OF_TESTS} --no-run
# Collect items to execute tests, uses resulting files from previous step
EXE_ARRAY=($(
RUSTFLAGS="-L${TOOLCHAIN_DIR}/sysroot/usr/${TOOLCHAIN_SYSROOT_LIB} -lc -lz -L${LIBZMQ_LIB_DIR} -L${SODIUM_LIB_DIR} -L${INDY_LIB_DIR} -lsodium -lzmq -lc++_shared -lindy" \
LIBINDY_DIR=${INDY_LIB_DIR} \
cargo test ${BUILD_TYPE} --target=${TRIPLET} ${SET_OF_TESTS} --no-run --message-format=json | jq -r "select(.profile.test == true) | .filenames[]"))
popd
}
create_cargo_config(){
mkdir -p ${HOME}/.cargo
cat << EOF > ${HOME}/.cargo/config
[target.${TRIPLET}]
ar = "$(realpath ${AR})"
linker = "$(realpath ${CXX})"
EOF
}
execute_on_device(){
set -x
adb -e push \
"${TOOLCHAIN_DIR}/sysroot/usr/lib/${TRIPLET}/libc++_shared.so" "/data/local/tmp/libc++_shared.so"
adb -e push \
"${SODIUM_LIB_DIR}/libsodium.so" "/data/local/tmp/libsodium.so"
adb -e push \
"${LIBZMQ_LIB_DIR}/libzmq.so" "/data/local/tmp/libzmq.so"
adb -e push \
"${INDY_LIB_DIR}/libindy.so" "/data/local/tmp/libindy.so"
adb -e logcat | grep indy &
for i in "${EXE_ARRAY[@]}"
do
:
EXE="${i}"
EXE_NAME=`basename ${EXE}`
adb -e push "$EXE" "/data/local/tmp/$EXE_NAME"
adb -e shell "chmod 755 /data/local/tmp/$EXE_NAME"
OUT="$(mktemp)"
MARK="ADB_SUCCESS!"
time adb -e shell "TEST_POOL_IP=10.0.0.2 LD_LIBRARY_PATH=/data/local/tmp RUST_TEST_THREADS=1 RUST_BACKTRACE=1 RUST_LOG=debug /data/local/tmp/$EXE_NAME && echo $MARK" 2>&1 | tee $OUT
grep $MARK $OUT
done
}
recreate_avd
set_env_vars
create_standalone_toolchain_and_rust_target
create_cargo_config
build_test_artifacts &&
check_if_emulator_is_running &&
execute_on_device
kill_avd
|
#!/usr/bin/env bash
## 移除「~/.local/share/applications/tv-news.desktop」
rm -f ~/.local/share/applications/tv-news.desktop
|
<filename>src/main/java/dev/vality/sink/common/handle/machineevent/eventpayload/change/PayoutChangeEventHandler.java
package dev.vality.sink.common.handle.machineevent.eventpayload.change;
import dev.vality.damsel.payout_processing.PayoutChange;
import dev.vality.sink.common.handle.machineevent.MachineEventHandler;
public interface PayoutChangeEventHandler extends MachineEventHandler<PayoutChange> {
}
|
import {ComponentRef, EmbeddedViewRef} from '@angular/core';
export const CONTROL_FADE_OUT_TIME = 3000;
export class VideoPlayerHelpers {
static isiOSDevice(): boolean {
return (navigator.userAgent.match(/ip(hone|ad|od)/i) && !navigator.userAgent.match(/(iemobile)[\/\s]?([\w\.]*)/i));
}
static isMobileDevice(): boolean {
return (typeof window.orientation !== "undefined") || (navigator.userAgent.indexOf("IEMobile") !== -1);
}
static calcSliderRatio(rect: ClientRect, x: number): number {
let offsetX = 0;
if (x < rect.left) {
offsetX = 0;
} else if (x > rect.right) {
offsetX = rect.width;
} else {
offsetX = x - rect.left;
}
return offsetX / rect.width;
}
static getExtname(url: string) {
let parts = url.split('.');
return parts[parts.length - 1];
}
static convertTime(timeInSeconds: number): string {
let hours = Math.floor(timeInSeconds / 3600);
let minutes = Math.floor((timeInSeconds - hours * 3600) / 60);
let seconds = Math.floor(timeInSeconds - hours * 3600 - minutes * 60);
let mm, ss;
if (minutes < 10) {
mm = '0' + minutes;
} else {
mm = '' + minutes;
}
if (seconds < 10) {
ss = '0' + seconds;
} else {
ss = '' + seconds;
}
if (hours > 0) {
return `${hours}:${mm}:${ss}`;
}
return `${mm}:${ss}`;
}
static isPortrait(): boolean {
const viewportWidth = document.documentElement.clientWidth;
const viewportHeight = document.documentElement.clientHeight;
if (viewportHeight > 0 && viewportWidth > 0) {
return viewportWidth <= 0.625 * viewportHeight;
}
return false;
}
}
/** Gets the root HTMLElement for an instantiated component. */
export function getComponentRootNode(componentRef: ComponentRef<any>): HTMLElement {
return (componentRef.hostView as EmbeddedViewRef<any>).rootNodes[0] as HTMLElement;
}
|
import React from "react"
import StarNoFillSvg from "../../static/icons/star-no-fill.svg"
export default function StarNoFillIcon(props: any) {
return <StarNoFillSvg {...props} />
}
|
//
// Created by ooooo on 2020/2/5.
//
#ifndef CPP_0121__SOLUTION2_H_
#define CPP_0121__SOLUTION2_H_
#include <iostream>
#include <vector>
using namespace std;
/**
* 动态规划
*
* i: 第几天, k:第几次交易
* dp[i][k][0] = max(dp[i-1][k][1] + p , dp[i-1][k][0])
* dp[i][k][1] = max(dp[i-1][k-1][0] - p , dp[i-1][k][1])
*/
class Solution {
public:
int maxProfit(vector<int> &prices) {
int ans = 0;
vector<vector<vector<int>>> dp(prices.size() + 1, vector<vector<int>>(2, vector<int>(2, 0)));
dp[0][1][1] = INT_MIN;
for (int i = 0; i < prices.size() + 1; ++i) {
dp[i][0][1] = INT_MIN;
}
for (int j = 0; j < prices.size(); ++j) {
int i = j + 1, p = prices[j];
for (int k = 1; k <= 1; ++k) {
dp[i][k][0] = max(dp[i - 1][k][1] + p, dp[i - 1][k][0]);
dp[i][k][1] = max(dp[i - 1][k - 1][0] - p, dp[i - 1][k][1]);
ans = max(dp[i][k][0], ans);
}
}
return dp[prices.size()][1][0];
}
};
#endif //CPP_0121__SOLUTION2_H_
|
class OrderHelperService {
constructor ($httpParamSerializerJQLike, OvhApiMe) {
this.$httpParamSerializerJQLike = $httpParamSerializerJQLike;
this.User = OvhApiMe;
}
openExpressOrderUrl (config, urlParams = {}) {
this.getExpressOrderUrl(config, urlParams)
.then(href => {
location.href = href;
});
}
getUrlConfigPart (config, urlParams = {}) {
// Transform configuration and option value if necessary
const formattedConfig = _.assign({}, config);
if (formattedConfig.configuration && !_.isArray(formattedConfig.configuration)) {
formattedConfig.configuration = this.transformToOrderValues(formattedConfig.configuration);
}
if (formattedConfig.option) {
const formattedOptions = formattedConfig.option.map(option => {
if (option.configuration && !_.isArray(option.configuration)) {
option.configuration = this.transformToOrderValues(option.configuration);
}
return option;
});
formattedConfig.option = formattedOptions;
}
const paramsPart = this.$httpParamSerializerJQLike(_.assign({}, urlParams, {
products: JSURL.stringify([formattedConfig])
}));
return paramsPart;
}
/**
* Transform an object to an Order compliant array
* @param {[Object} config plain json
* @return {Array} an array compatible with Order
*/
transformToOrderValues (config) {
let orderConfig = [];
_.forEach(_.keys(config), key => {
const configParam = {
label: key
};
if (_.isArray(config[key])) {
configParam.values = config[key];
} else {
configParam.values = [config[key]];
}
orderConfig.push(configParam);
});
return orderConfig;
}
getExpressOrderUrl (config, urlParams = {}) {
const path = "/order/express/#/new/express/resume";
const paramsPart = this.getUrlConfigPart(config, urlParams);
return this.buildUrl(`${path}?${paramsPart}`);
}
buildUrl (path) {
// Maybe this could be put in configuration
return this.User.Lexi().get()
.$promise
.then(user => {
let targetURL;
switch (user.ovhSubsidiary) {
case "FR":
targetURL = "https://www.ovh.com/fr";
break;
case "ASIA":
targetURL = "https://ca.ovh.com/asia";
break;
case "CA":
targetURL = "https://ca.ovh.com/en";
break;
case "SG":
targetURL = "https://ca.ovh.com/sg";
break;
case "WS":
targetURL = "https://us.ovh.com/es";
break;
case "WE":
targetURL = "https://ca.ovh.com/en";
break;
case "QC":
targetURL = "https://ca.ovh.com/fr";
break;
default:
targetURL = "https://www.ovh.com/fr";
}
return `${targetURL}${path}`;
});
}
}
angular.module("managerApp").service("OrderHelperService", OrderHelperService);
|
#!/bin/bash
if [ -n "$1" ]; then # start mongd as a deamon
echo "Shutting down MongoDB"
/home/rodrigo/anaconda3/envs/snpdb/bin/mongo -eval "db.adminCommand({shutdown: 1})" ||
echo "Warning: MongoDB server not being executed"
echo "Deleting data directory"
rm -rf /home/rodrigo/snpdb/mongodb_data/ --verbose
mkdir /home/rodrigo/snpdb/mongodb_data --verbose
echo "Starting database and loading schema"
/home/rodrigo/anaconda3/pkgs/numactl-2.0.11-0/bin/numactl --interleave=all /home/rodrigo/anaconda3/envs/snpdb/bin/mongod --dbpath /home/rodrigo/snpdb/mongodb_data/ --wiredTigerCollectionBlockCompressor $1 --fork --logpath /home/rodrigo/snpdb/mongod.log --logappend &&
/home/rodrigo/anaconda3/envs/snpdb/bin/mongo --eval "load('/home/rodrigo/snpdb/mongo_setup.js')" &&
exit 0
else
echo "usage: start_mongo.sh {snappy|zlib}" &&
exit 1
fi
|
#!/bin/bash -e
# This script is an experiment to clone litecoin into a
# brand new coin + blockchain.
# The script will perform the following steps:
# 1) create first a docker image with ubuntu ready to build and run the new coin daemon
# 2) clone GenesisH0 and mine the genesis blocks of main, test and regtest networks in the container (this may take a lot of time)
# 3) clone litecoin
# 4) replace variables (keys, merkle tree hashes, timestamps..)
# 5) build new coin
# 6) run 4 docker nodes and connect to each other
#
# By default the script uses the regtest network, which can mine blocks
# instantly. If you wish to switch to the main network, simply change the
# CHAIN variable below
# change the following variables to match your new coin
COIN_NAME="Moneta"
COIN_UNIT="MNTA"
# 42 million coins at total (litecoin total supply is 84000000)
# TOTAL_SUPPLY=420000000
MAINNET_PORT="47890"
TESTNET_PORT="47891"
REGTEST_PORT="47892"
# PHRASE="The date is 5/21/2021 and Michael Montuori decided to say. It is time for a new coin"
# First letter of the wallet address. Check https://en.bitcoin.it/wiki/Base58Check_encoding
# PUBKEY_CHAR="20"
# number of blocks to wait to be able to spend coinbase UTXO's
# COINBASE_MATURITY=5
# leave CHAIN empty for main network, -regtest for regression network and -testnet for test network
# CHAIN="-regtest"
# CHAIN="-testnet"
CHAIN=""
# this is the amount of coins to get as a reward of mining the block of height 1. if not set this will default to 50
# PREMINED_AMOUNT=1000000
ACCEPT_MINERS=true
# warning: change this to your own pubkey to get the genesis block mining reward
GENESIS_REWARD_PUBKEY=047848280A44401390B68C811E3977E6B17F4BA385AB477917DFF0593C9978DEAA415E6558702F9C5571A88208C0D4D1D13F90542BFE52DE8A90E51CF840984FD0
# dont change the following variables unless you know what you are doing
LITECOIN_BRANCH=0.18
GENESISHZERO_REPOS=https://github.com/mmontuori/GenesisH0.git
LITECOIN_REPOS=https://github.com/mmontuori/moneta.git
# LITECOIN_PUB_KEY=040184710fa689ad5023690c80f3a49c8f13f8d45b8c857fbcbc8bc4a8e4d3eb4b10f4d4604fa08dce601aaf0f470216fe1b51850b4acf21b179c45070ac7b03a9
# LITECOIN_MERKLE_HASH=97ddfbbae6be97fd6cdf3e7ca13232a3afff2353e29badfab7f73011edd4ced9
# LITECOIN_MAIN_GENESIS_HASH=12a765e31ffd4059bada1e25190f6e98c99d9714d334efa41a195a7e7e04bfe2
# LITECOIN_TEST_GENESIS_HASH=4966625a4b2851d9fdee139e56211a0d88575f59ed816ff5e6a63deb4e3e29a0
# LITECOIN_REGTEST_GENESIS_HASH=530827f38f93b43ed12af0b3ad25a288dc02ed74d6d7857862df51fc56c416f9
# MINIMUM_CHAIN_WORK_MAIN=0x0000000000000000000000000000000000000000000000c1bfe2bbe614f41260
# MINIMUM_CHAIN_WORK_TEST=0x000000000000000000000000000000000000000000000000001df7b5aa1700ce
COIN_NAME_LOWER=$(echo $COIN_NAME | tr '[:upper:]' '[:lower:]')
COIN_NAME_UPPER=$(echo $COIN_NAME | tr '[:lower:]' '[:upper:]')
COIN_UNIT_LOWER=$(echo $COIN_UNIT | tr '[:upper:]' '[:lower:]')
DIRNAME=$(dirname $0)
DOCKER_NETWORK="172.18.0"
DOCKER_IMAGE_LABEL="mnta-env"
OSVERSION="$(uname -s)"
SEEDNODE_DNS_SERVER="thehost"
SEEDNODE_HOST="thehost"
docker_build_image_new()
{
IMAGE=$(docker images -q $DOCKER_IMAGE_LABEL)
if [ -z $IMAGE ]; then
echo Building docker image
if [ ! -f $DOCKER_IMAGE_LABEL/Dockerfile ]; then
mkdir -p $DOCKER_IMAGE_LABEL
cat <<EOF > $DOCKER_IMAGE_LABEL/Dockerfile
FROM ubuntu:18.04
#ENV DEBIAN_FRONTEND noninteractive
#ENV DEBCONF_NONINTERACTIVE_SEEN true
#RUN truncate -s0 /tmp/preseed.cfg
#RUN echo "tzdata tzdata/Areas select America" >> /tmp/preseed.cfg
#RUN echo "tzdata tzdata/Zones/America select New_York" >> /tmp/preseed.cfg
#RUN debconf-set-selections /tmp/preseed.cfg
#RUN rm -f /etc/timezone /etc/localtime
RUN apt update
#RUN apt install -y tzdata
#RUN apt install -y libterm-readline-gnu-perl
#RUN apt install -y apt-utils
#RUN apt install -y gnupg
RUN apt -y upgrade
RUN apt install -y build-essential libboost-all-dev libssl-dev libtool autotools-dev automake pkg-config bsdmainutils python3 software-properties-common
RUN echo deb http://ppa.launchpad.net/bitcoin/bitcoin/ubuntu xenial main >> /etc/apt/sources.list
RUN sleep 10;apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv D46F45428842CE5E
RUN apt update
RUN apt-get install -y ccache git libboost-system1.62.0 libboost-filesystem1.62.0 libboost-program-options1.62.0 libboost-thread1.62.0 libboost-chrono1.62.0 libssl1.1 libevent-pthreads-2.1.6 libevent-2.1.6 build-essential libssl-dev libevent-dev libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-program-options-dev libboost-test-dev libboost-thread-dev libdb4.8-dev libdb4.8++-dev libminiupnpc-dev libzmq3-dev
RUN apt-get install -y libqt5core5a libqt5dbus5 qttools5-dev qttools5-dev-tools libprotobuf-dev protobuf-compiler libqrencode-dev libqt5gui5
RUN apt-get install -y python-pip iputils-ping net-tools libboost-all-dev curl
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
#RUN curl https://bootstrap.pypa.io/pip/2.7/get-pip.py --output get-pip.py
#RUN python2 get-pip.py
EOF
fi
docker build --label $DOCKER_IMAGE_LABEL --tag $DOCKER_IMAGE_LABEL $DIRNAME/$DOCKER_IMAGE_LABEL/
else
echo Docker image already built
fi
}
docker_build_image()
{
IMAGE=$(docker images -q $DOCKER_IMAGE_LABEL)
if [ -z $IMAGE ]; then
echo Building docker image
if [ ! -f $DOCKER_IMAGE_LABEL/Dockerfile ]; then
mkdir -p $DOCKER_IMAGE_LABEL
cat <<EOF > $DOCKER_IMAGE_LABEL/Dockerfile
FROM ubuntu:16.04
RUN echo deb http://ppa.launchpad.net/bitcoin/bitcoin/ubuntu xenial main >> /etc/apt/sources.list
#RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv D46F45428842CE5E
RUN apt-get update
RUN apt-get -y --allow-unauthenticated install ccache git libboost-system1.58.0 libboost-filesystem1.58.0 libboost-program-options1.58.0 libboost-thread1.58.0 libboost-chrono1.58.0 libssl1.0.0 libevent-pthreads-2.0-5 libevent-2.0-5 build-essential libtool autotools-dev automake pkg-config libssl-dev libevent-dev bsdmainutils libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-program-options-dev libboost-test-dev libboost-thread-dev libdb4.8-dev libdb4.8++-dev libminiupnpc-dev libzmq3-dev libqt5gui5 libqt5core5a libqt5dbus5 qttools5-dev qttools5-dev-tools libprotobuf-dev protobuf-compiler libqrencode-dev python-pip iputils-ping net-tools libboost-all-dev
RUN pip install construct==2.5.2 scrypt
EOF
fi
docker build --label $DOCKER_IMAGE_LABEL --tag $DOCKER_IMAGE_LABEL $DIRNAME/$DOCKER_IMAGE_LABEL/
else
echo Docker image already built
fi
}
docker_run_genesis()
{
mkdir -p $DIRNAME/.ccache
docker run -v $DIRNAME/GenesisH0:/GenesisH0 $DOCKER_IMAGE_LABEL /bin/bash -c "$1"
}
docker_run()
{
mkdir -p $DIRNAME/.ccache
docker run \
-v $DIRNAME/GenesisH0:/GenesisH0 \
-v $DIRNAME/.ccache:/root/.ccache \
-v $DIRNAME/$COIN_NAME_LOWER:/$COIN_NAME_LOWER $DOCKER_IMAGE_LABEL \
/bin/bash -c "$1"
}
docker_stop_nodes()
{
echo "Stopping all docker nodes"
for id in $(docker ps -q -a -f ancestor=$DOCKER_IMAGE_LABEL); do
docker exec -ti $id /moneta/src/moneta-cli $CHAIN stop
done
for id in $(docker ps -q -a -f ancestor=$DOCKER_IMAGE_LABEL); do
docker stop $id
done
echo "y" | docker system prune >/dev/null 2>&1
}
docker_remove_nodes()
{
echo "Removing all docker nodes"
for id in $(docker ps -q -a -f ancestor=$DOCKER_IMAGE_LABEL); do
docker rm $id
done
}
docker_create_network()
{
echo "Creating docker network"
if ! docker network inspect newcoin &>/dev/null; then
docker network create --subnet=$DOCKER_NETWORK.0/16 newcoin
fi
}
docker_remove_network()
{
echo "Removing docker network"
docker network rm newcoin
}
docker_run_node()
{
local NODE_NUMBER=$1
local NODE_COMMAND=$2
mkdir -p $DIRNAME/miner${NODE_NUMBER}
if [ ! -f $DIRNAME/miner${NODE_NUMBER}/$COIN_NAME_LOWER.conf ]; then
cat <<EOF > $DIRNAME/miner${NODE_NUMBER}/$COIN_NAME_LOWER.conf
rpcuser=${COIN_NAME_LOWER}rpc
rpcpassword=$(cat /dev/urandom | env LC_CTYPE=C tr -dc a-zA-Z0-9 | head -c 32; echo)
EOF
fi
port_cmd=""
if [ "$NODE_NUMBER" == "2" ] && [ "$ACCEPT_MINERS" == "true" ]; then
if [ "$CHAIN" == "" ]; then
port_cmd="--expose $MAINNET_PORT --publish $MAINNET_PORT:$MAINNET_PORT"
elif [ "$CHAIN" == "-testnet" ]; then
port_cmd="--expose $TESTNET_PORT --publish $TESTNET_PORT:$TESTNET_PORT"
elif [ "$CHAIN" == "-regtest" ]; then
port_cmd="--expose $REGTEST_PORT --publish $REGTEST_PORT:$REGTEST_PORT"
else
echo "ERROR: CHAIN $CHAIN is not supported!"
exit 1
fi
fi
docker run \
--net newcoin \
$port_cmd \
--ip $DOCKER_NETWORK.${NODE_NUMBER} \
-v $DIRNAME/miner${NODE_NUMBER}:/root/.$COIN_NAME_LOWER \
-v $DIRNAME/$COIN_NAME_LOWER:/$COIN_NAME_LOWER $DOCKER_IMAGE_LABEL \
/bin/bash -c "$NODE_COMMAND"
}
generate_genesis_block()
{
if [ ! -d GenesisH0 ]; then
git clone $GENESISHZERO_REPOS
pushd GenesisH0
else
pushd GenesisH0
git pull
fi
if [ ! -f ${COIN_NAME}-main.txt ]; then
echo "Mining genesis block... this procedure can take many hours of cpu work.."
echo "python /GenesisH0/genesis.py -a scrypt -z \"$PHRASE\" -p $GENESIS_REWARD_PUBKEY 2>&1 | tee /GenesisH0/${COIN_NAME}-main.txt"
docker_run_genesis "python /GenesisH0/genesis.py -a scrypt -z \"$PHRASE\" -p $GENESIS_REWARD_PUBKEY 2>&1 | tee /GenesisH0/${COIN_NAME}-main.txt"
else
echo "Genesis block already mined.."
cat ${COIN_NAME}-main.txt
fi
if [ ! -f ${COIN_NAME}-test.txt ]; then
echo "Mining genesis block of test network... this procedure can take many hours of cpu work.."
docker_run_genesis "python /GenesisH0/genesis.py -t 1486949366 -a scrypt -z \"$PHRASE\" -p $GENESIS_REWARD_PUBKEY 2>&1 | tee /GenesisH0/${COIN_NAME}-test.txt"
else
echo "Genesis block already mined.."
cat ${COIN_NAME}-test.txt
fi
if [ ! -f ${COIN_NAME}-regtest.txt ]; then
echo "Mining genesis block of regtest network... this procedure can take many hours of cpu work.."
docker_run_genesis "python /GenesisH0/genesis.py -t 1296688602 -b 0x207fffff -n 0 -a scrypt -z \"$PHRASE\" -p $GENESIS_REWARD_PUBKEY 2>&1 | tee /GenesisH0/${COIN_NAME}-regtest.txt"
else
echo "Genesis block already mined.."
cat ${COIN_NAME}-regtest.txt
fi
popd
}
retrieve_hashes()
{
pushd GenesisH0
MAIN_PUB_KEY=$(cat ${COIN_NAME}-main.txt | grep "^pubkey:" | $SED 's/^pubkey: //')
MERKLE_HASH=$(cat ${COIN_NAME}-main.txt | grep "^merkle hash:" | $SED 's/^merkle hash: //')
TIMESTAMP=$(cat ${COIN_NAME}-main.txt | grep "^time:" | $SED 's/^time: //')
BITS=$(cat ${COIN_NAME}-main.txt | grep "^bits:" | $SED 's/^bits: //')
MAIN_NONCE=$(cat ${COIN_NAME}-main.txt | grep "^nonce:" | $SED 's/^nonce: //')
TEST_NONCE=$(cat ${COIN_NAME}-test.txt | grep "^nonce:" | $SED 's/^nonce: //')
REGTEST_NONCE=$(cat ${COIN_NAME}-regtest.txt | grep "^nonce:" | $SED 's/^nonce: //')
MAIN_GENESIS_HASH=$(cat ${COIN_NAME}-main.txt | grep "^genesis hash:" | $SED 's/^genesis hash: //')
TEST_GENESIS_HASH=$(cat ${COIN_NAME}-test.txt | grep "^genesis hash:" | $SED 's/^genesis hash: //')
REGTEST_GENESIS_HASH=$(cat ${COIN_NAME}-regtest.txt | grep "^genesis hash:" | $SED 's/^genesis hash: //')
popd
}
newcoin_replace_vars()
{
if [ -d $COIN_NAME_LOWER ]; then
echo "Warning: $COIN_NAME_LOWER already existing. Not replacing any values"
return 0
fi
if [ ! -d "moneta-master" ]; then
# clone litecoin and keep local cache
git clone -b $LITECOIN_BRANCH $LITECOIN_REPOS moneta-master
else
echo "Updating master branch"
pushd moneta-master
git pull
popd
fi
git clone -b $LITECOIN_BRANCH moneta-master $COIN_NAME_LOWER
pushd $COIN_NAME_LOWER
# first rename all directories
# for i in $(find . -type d | grep -v "^./.git" | grep litecoin); do
# git mv $i $(echo $i| $SED "s/litecoin/$COIN_NAME_LOWER/")
# done
# then rename all files
# for i in $(find . -type f | grep -v "^./.git" | grep litecoin); do
# git mv $i $(echo $i| $SED "s/litecoin/$COIN_NAME_LOWER/")
# done
# now replace all litecoin references to the new coin name
# for i in $(find . -type f | grep -v "^./.git"); do
# $SED -i "s/Litecoin/$COIN_NAME/g" $i
# $SED -i "s/litecoin/$COIN_NAME_LOWER/g" $i
# $SED -i "s/LITECOIN/$COIN_NAME_UPPER/g" $i
# $SED -i "s/LTC/$COIN_UNIT/g" $i
# done
# $SED -i "s/ltc/$COIN_UNIT_LOWER/g" src/chainparams.cpp
# $SED -i "s/84000000/$TOTAL_SUPPLY/" src/amount.h
# $SED -i "s/1,48/1,$PUBKEY_CHAR/" src/chainparams.cpp
# $SED -i "s/1317972665/$TIMESTAMP/" src/chainparams.cpp
# $SED -i "s;NY Times 05/Oct/2011 Steve Jobs, Apple’s Visionary, Dies at 56;$PHRASE;" src/chainparams.cpp
# $SED -i "s/= 9333;/= $MAINNET_PORT;/" src/chainparams.cpp
# $SED -i "s/= 19335;/= $TESTNET_PORT;/" src/chainparams.cpp
# $SED -i "s/$LITECOIN_PUB_KEY/$MAIN_PUB_KEY/" src/chainparams.cpp
# $SED -i "s/$LITECOIN_MERKLE_HASH/$MERKLE_HASH/" src/chainparams.cpp
# $SED -i "s/$LITECOIN_MERKLE_HASH/$MERKLE_HASH/" src/qt/test/rpcnestedtests.cpp
# $SED -i "0,/$LITECOIN_MAIN_GENESIS_HASH/s//$MAIN_GENESIS_HASH/" src/chainparams.cpp
# $SED -i "0,/$LITECOIN_TEST_GENESIS_HASH/s//$TEST_GENESIS_HASH/" src/chainparams.cpp
# $SED -i "0,/$LITECOIN_REGTEST_GENESIS_HASH/s//$REGTEST_GENESIS_HASH/" src/chainparams.cpp
# $SED -i "0,/2084524493/s//$MAIN_NONCE/" src/chainparams.cpp
# $SED -i "0,/293345/s//$TEST_NONCE/" src/chainparams.cpp
# $SED -i "0,/1296688602, 0/s//1296688602, $REGTEST_NONCE/" src/chainparams.cpp
# $SED -i "0,/0x1e0ffff0/s//$BITS/" src/chainparams.cpp
# $SED -i "s,vSeeds.emplace_back,//vSeeds.emplace_back,g" src/chainparams.cpp
#if [ -n "$PREMINED_AMOUNT" ]; then
# $SED -i "s/CAmount nSubsidy = QUARK_REWARD \* COIN;/if \(nHeight == 1\) return COIN \* $PREMINED_AMOUNT;\n CAmount nSubsidy = QUARK_REWARD \* COIN;/" src/validation.cpp
#fi
# $SED -i "s/COINBASE_MATURITY = 100/COINBASE_MATURITY = $COINBASE_MATURITY/" src/consensus/consensus.h
# reset minimum chain work to 0
# $SED -i "s/$MINIMUM_CHAIN_WORK_MAIN/0x00/" src/chainparams.cpp
# $SED -i "s/$MINIMUM_CHAIN_WORK_TEST/0x00/" src/chainparams.cpp
# change bip activation heights
# bip 16
# $SED -i "s/218579/0/" src/chainparams.cpp
# bip 34
# $SED -i "s/710000/0/" src/chainparams.cpp
# $SED -i "s/fa09d204a83a768ed5a7c8d441fa62f2043abf420cff1226c7b4329aeb9d51cf/$MAIN_GENESIS_HASH/" src/chainparams.cpp
# bip 65
# $SED -i "s/918684/0/" src/chainparams.cpp
# bip 66
# $SED -i "s/811879/0/" src/chainparams.cpp
# testdummy
# $SED -i "s/1199145601/Consensus::BIP9Deployment::ALWAYS_ACTIVE/g" src/chainparams.cpp
# $SED -i "s/1230767999/Consensus::BIP9Deployment::NO_TIMEOUT/g" src/chainparams.cpp
# $SED -i "s/1199145601/Consensus::BIP9Deployment::ALWAYS_ACTIVE/g" src/chainparams.cpp
# $SED -i "s/1230767999/Consensus::BIP9Deployment::NO_TIMEOUT/g" src/chainparams.cpp
# csv
# $SED -i "s/1485561600/Consensus::BIP9Deployment::ALWAYS_ACTIVE/g" src/chainparams.cpp
# $SED -i "s/1517356801/Consensus::BIP9Deployment::NO_TIMEOUT/g" src/chainparams.cpp
# $SED -i "s/1483228800/Consensus::BIP9Deployment::ALWAYS_ACTIVE/g" src/chainparams.cpp
# $SED -i "s/1517356801/Consensus::BIP9Deployment::NO_TIMEOUT/g" src/chainparams.cpp
# segwit
# $SED -i "s/1485561600/Consensus::BIP9Deployment::ALWAYS_ACTIVE/g" src/chainparams.cpp
# timeout of segwit is the same as csv
# defaultAssumeValid
# $SED -i "s/0x66f49ad85624c33e4fd61aa45c54012509ed4a53308908dd07f56346c7939273/0x$MAIN_GENESIS_HASH/" src/chainparams.cpp
# $SED -i "s/0x1efb29c8187d5a496a33377941d1df415169c3ce5d8c05d055f25b683ec3f9a3/0x$TEST_GENESIS_HASH/" src/chainparams.cpp
# TODO: fix checkpoints
popd
}
build_new_coin()
{
# only run autogen.sh/configure if not done previously
if [ ! -e $COIN_NAME_LOWER/Makefile ]; then
docker_run "cd /$COIN_NAME_LOWER ; bash /$COIN_NAME_LOWER/autogen.sh"
docker_run "cd /$COIN_NAME_LOWER ; bash /$COIN_NAME_LOWER/configure --disable-tests --disable-bench"
fi
# always build as the user could have manually changed some files
docker_run "cd /$COIN_NAME_LOWER ; make -j2"
}
if [ $DIRNAME = "." ]; then
DIRNAME=$PWD
fi
cd $DIRNAME
# sanity check
case $OSVERSION in
Linux*)
SED=sed
;;
Darwin*)
SED=$(which gsed 2>/dev/null)
if [ $? -ne 0 ]; then
echo "please install gnu-sed with 'brew install gnu-sed'"
exit 1
fi
SED=gsed
;;
*)
echo "This script only works on Linux and MacOS"
exit 1
;;
esac
if ! which docker &>/dev/null; then
echo Please install docker first
exit 1
fi
if ! which git &>/dev/null; then
echo Please install git first
exit 1
fi
case $1 in
stop)
docker_stop_nodes
;;
remove_nodes)
docker_stop_nodes
docker_remove_nodes
;;
clean_up)
docker_stop_nodes
for i in $(seq 2 5); do
docker_run_node $i "rm -rf /$COIN_NAME_LOWER /root/.$COIN_NAME_LOWER" &>/dev/null
done
docker_remove_nodes
docker_remove_network
rm -rf $COIN_NAME_LOWER
if [ "$2" != "keep_genesis_block" ]; then
rm -f GenesisH0/${COIN_NAME}-*.txt
fi
for i in $(seq 2 5); do
rm -rf miner$i
done
;;
prepare)
docker_build_image_new
generate_genesis_block
retrieve_hashes
newcoin_replace_vars
build_new_coin
;;
start_miner)
if [ -n "$(docker ps -q -f ancestor=$DOCKER_IMAGE_LABEL)" ]; then
echo "There are nodes running. Please stop them first with: $0 stop"
exit 1
fi
docker_run_node 2 "cd /$COIN_NAME_LOWER ; ./src/${COIN_NAME_LOWER}d $CHAIN" &
;;
start)
if [ -n "$(docker ps -q -f ancestor=$DOCKER_IMAGE_LABEL)" ]; then
echo "There are nodes running. Please stop them first with: $0 stop"
exit 1
fi
docker_create_network
docker_run_node 2 "cd /$COIN_NAME_LOWER ; ./src/${COIN_NAME_LOWER}d $CHAIN -listen -noconnect -bind=$DOCKER_NETWORK.2 -addnode=$DOCKER_NETWORK.1 -addnode=$DOCKER_NETWORK.3 -addnode=$DOCKER_NETWORK.4 -addnode=$DOCKER_NETWORK.5" &
docker_run_node 3 "cd /$COIN_NAME_LOWER ; ./src/${COIN_NAME_LOWER}d $CHAIN -listen -noconnect -bind=$DOCKER_NETWORK.3 -addnode=$DOCKER_NETWORK.1 -addnode=$DOCKER_NETWORK.2 -addnode=$DOCKER_NETWORK.4 -addnode=$DOCKER_NETWORK.5" &
docker_run_node 4 "cd /$COIN_NAME_LOWER ; ./src/${COIN_NAME_LOWER}d $CHAIN -listen -noconnect -bind=$DOCKER_NETWORK.4 -addnode=$DOCKER_NETWORK.1 -addnode=$DOCKER_NETWORK.2 -addnode=$DOCKER_NETWORK.3 -addnode=$DOCKER_NETWORK.5" &
docker_run_node 5 "cd /$COIN_NAME_LOWER ; ./src/${COIN_NAME_LOWER}d $CHAIN -listen -noconnect -bind=$DOCKER_NETWORK.5 -addnode=$DOCKER_NETWORK.1 -addnode=$DOCKER_NETWORK.2 -addnode=$DOCKER_NETWORK.3 -addnode=$DOCKER_NETWORK.4" &
echo "Docker containers should be up and running now. You may run the following command to check the network status:
#for i in \$(docker ps -q); do docker exec \$i /$COIN_NAME_LOWER/src/${COIN_NAME_LOWER}-cli $CHAIN getblockchaininfo; done"
echo "To ask the nodes to mine some blocks simply run:
#for i in \$(docker ps -q); do docker exec \$i /$COIN_NAME_LOWER/src/${COIN_NAME_LOWER}-cli $CHAIN generate 2 & done"
exit 1
;;
*)
cat <<EOF
Usage: $0 (start|stop|remove_nodes|clean_up)
- prepare: bootstrap environment and build
- start: run your new coin
- start_miner: start a single instance miner
- stop: simply stop the containers without removing them
- remove_nodes: remove the old docker container images. This will stop them first if necessary.
- clean_up: WARNING: this will stop and remove docker containers and network, source code, genesis block information and nodes data directory. (to start from scratch)
EOF
;;
esac
|
#!/usr/bin/env bash
set -e
# set -x
red="\033[0;31m"
reset="\033[0m"
timestamp=$(date +"%Y-%m-%d_%H-%M-%S")
UNIT_TEST_LOGFILE="logs/unit-test.log.$timestamp"
# Check provisioning settings and fail if not set to provision
for i in $(cat nodes.yml| grep 'provision: '| awk '{print $2}'); do
if ! $i; then
printf "You must set ${red}'provision: true' ${reset}in nodes.yml\n"
printf "in order to do execute a proper unit test."
exit
fi
done
if [ ! -d logs ]; then
mkdir logs 2>&1 | tee $UNIT_TEST_LOGFILE
fi
# Check ansible version
ansible --version 2>&1 | tee -a $UNIT_TEST_LOGFILE
ansible-lint playbook.yml 2>&1 | tee -a $UNIT_TEST_LOGFILE
# Basic Ansible syntax check
ansible-playbook playbook.yml --syntax-check 2>&1 | tee -a $UNIT_TEST_LOGFILE
# Spin up environment
vagrant up 2>&1 | tee -a $UNIT_TEST_LOGFILE
# Execute Ansible playbook again and check for idempotence
ansible-playbook -i hosts playbook.yml \
| (grep -q 'changed=0.*failed=0' && (echo 'Idempotence test: pass' && exit 0) \
|| (echo 'Idempotence test: fail' && exit 1)) 2>&1 | tee -a $UNIT_TEST_LOGFILE
# Clean up Vagrant environment
./scripts/cleanup.sh 2>&1 | tee -a $UNIT_TEST_LOGFILE
|
def get_index_value(list_a, index):
"""Returns the value of the element at the given index in the list."""
value = list_a[index]
return value
list_a = [1, 2, 3, 4, 5]
index = 3
value = get_index_value(list_a, index)
print(value) # 4 |
<filename>lc0403_frog_jump.py
"""Leetcode 403. Frog Jump
Hard
URL: https://leetcode.com/problems/frog-jump/
A frog is crossing a river. The river is divided into x units and at each unit
there may or may not exist a stone. The frog can jump on a stone, but it must
not jump into the water.
Given a list of stones' positions (in units) in sorted ascending order,
determine if the frog is able to cross the river by landing on the last stone.
Initially, the frog is on the first stone and assume the first jump must be 1 unit.
If the frog's last jump was k units, then its next jump must be either
k - 1, k, or k + 1 units. Note that the frog can only jump in the forward direction.
Note:
- The number of stones is >= 2 and is < 1,100.
- Each stone's position will be a non-negative integer < 231.
- The first stone's position is always 0.
Example 1:
[0,1,3,5,6,8,12,17]
There are a total of 8 stones.
The first stone at the 0th unit, second stone at the 1st unit,
third stone at the 3rd unit, and so on...
The last stone at the 17th unit.
Return true. The frog can jump to the last stone by jumping
1 unit to the 2nd stone, then 2 units to the 3rd stone, then
2 units to the 4th stone, then 3 units to the 6th stone,
4 units to the 7th stone, and 5 units to the 8th stone.
Example 2:
[0,1,2,3,4,8,9,11]
Return false. There is no way to jump to the last stone as
the gap between the 5th and 6th stone is too large.
"""
class SolutionStoneJumpDictDP(object):
def canCross(self, stones):
"""
:type stones: List[int]
:rtype: bool
Time complexity: O(n^2).
Space complexity: O(n^2).
"""
# Edge cases.
if stones[1] != 1:
return False
for i in range(3, len(stones)):
if stones[i] > stones[i - 1] * 2:
return False
# Apply DP with dict: stone->set(steps), with 1st jump in 1 unit.
stone_jumps_d = {stone: set() for stone in stones}
stone_jumps_d[1].add(1)
for i, stone in enumerate(stones):
# i is up to n - 2 since it is the last jump start.
if i == len(stones) - 1:
break
for jump in stone_jumps_d[stone]:
for next_jump in [jump - 1, jump, jump + 1]:
# Check if next jump is on a stone.
if next_jump > 0 and stone + next_jump in stone_jumps_d:
stone_jumps_d[stone + next_jump].add(next_jump)
# Return True if there are jumps on the last stone.
return bool(stone_jumps_d[stones[-1]])
class SolutionPositionJumpStacksDP(object):
def canCross(self, stones):
"""
:type stones: List[int]
:rtype: bool
Time complexity: O(n^2).
Space complexity: O(n^2).
"""
# Edge cases.
if stones[1] != 1:
return False
for i in range(3, len(stones)):
if stones[i] > stones[i - 1] * 2:
return False
# Use stone set for quickly check position.
stones_set = set(stones)
# Track positions and jumps, with initial jump = 0 due to 1st jump.
position_stack = [0]
jump_stack = [0]
while position_stack:
position = position_stack.pop()
jump = jump_stack.pop()
next_jumps = [jump - 1, jump, jump + 1]
for next_jump in next_jumps:
if next_jump <= 0:
continue
next_position = position + next_jump
if next_position == stones[-1]:
return True
elif next_position in stones_set:
position_stack.append(next_position)
jump_stack.append(next_jump)
return False
def main():
# Output: True
stones = [0,1,3,5,6,8,12,17]
print SolutionStoneJumpDictDP().canCross(stones)
print SolutionPositionJumpStacksDP().canCross(stones)
# Output: False
stones = [0,1,2,3,4,8,9,11]
print SolutionStoneJumpDictDP().canCross(stones)
print SolutionPositionJumpStacksDP().canCross(stones)
# Output: True
stones = [0,1]
print SolutionStoneJumpDictDP().canCross(stones)
print SolutionPositionJumpStacksDP().canCross(stones)
if __name__ == '__main__':
main()
|
#!/bin/bash
exec emacsclient --alternate-editor="emacs" "$@"
|
#!/bin/bash
### Merge the stdout et stderr in a single file
#$ -j y
### set array job indices 'min-max:interval'
#$ -t 1-59:1
#$ -l ct=1:00:00
#$ -l vmem=4G
#$ -l fsize=20G
#$ -l irods=1
#$ -l sps=1
#$ -o /sps/km3net/users/tgal/analysis/k40calib/logs
#$ -e /sps/km3net/users/tgal/analysis/k40calib/logs
#$ -P P_km3net
### set the name
#$ -N k40calib
#set -e
JPP_PATH="/sps/km3net/users/tgal/apps/jpp/trunk"
CSV_PATH="/sps/km3net/users/tgal/analysis/k40calib"
DET_ID=14
echo "Task id = $SGE_TASK_ID"
source $KM3NET_THRONG_DIR/src/python/pyenv.sh
source ${JPP_PATH}/setenv.sh ${JPP_PATH}
PWD_=$(pwd)
cd /usr/local/root/v5.34.23
source bin/thisroot.sh
cd "$PWD_"
km3pipe detx ${DET_ID} -o detector.detx
N=$(( $SGE_TASK_ID - 1 ))
for RUN in $(seq $(( $N * 100 + 1 )) $(( $N * 100 + 100))); do
echo "========================================="
echo " Calibrating run $RUN"
echo "========================================="
RUN_FILE="${DET_ID}_${RUN}.root"
CSV_FILE="${DET_ID}_${RUN}.csv"
MONITOR_FILE="${DET_ID}_${RUN}_monitor.root"
km3pipe retrieve ${DET_ID} ${RUN} -o ${RUN_FILE}
[ ! -f ${RUN_FILE} ] && continue
JMonitorK40 -a detector.detx -f ${RUN_FILE} -o ${MONITOR_FILE}
k40calib ${MONITOR_FILE} -o ${CSV_FILE}
cp ${CSV_FILE} ${CSV_PATH}
rm -f ${RUN_FILE}
rm -f ${MONITOR_FILE}
done
|
<gh_stars>0
import { apiUrl } from 'config';
import { fetchWrapper } from 'helpers';
export const cityService = {
getAll,
getById,
create,
update,
delete: _delete
};
const baseUrl = `${apiUrl}/cities`;
function getAll() {
return fetchWrapper.get(baseUrl);
}
function getById(cityId) {
return fetchWrapper.get(`${baseUrl}/${cityId}`);
}
function create(params) {
return fetchWrapper.post(baseUrl, params);
}
function update(cityId, params) {
return fetchWrapper.put(`${baseUrl}/${cityId}`, params);
}
// prefixed with underscored because delete is a reserved word in javascript
function _delete(cityId) {
return fetchWrapper.delete(`${baseUrl}/${cityId}`);
}
|
const mongoose = require('mongoose');
const logger_helper = require('../helpers/logger');
const pathOfCurrentWorkingDirectory = process.env.CURRENT_WORKING_DIRECTORY;
const Enum = require('enum');
const ProjectStatus = new Enum({'IDLE':0, 'PUBLISHING':1, 'BUILDING':2, 'CRAWLING':3, 'QUEUED':4, 'PUBLISHINGERROR':-1, 'BUILDINGERROR':-2, 'ERROR':-3});
exports.ProjectStatus = ProjectStatus;
const ProjectType = new Enum({'ERROR':-1 , 'CRAWL':0 , 'DRAGANDDROP':1 , 'NEWPROJECT':2});
exports.ProjectType = ProjectType;
const BucketNames = new mongoose.Schema({
source: {type: String, default: 'kitsune-buildtest-source'},
demo: {type: String, default: 'kitsune-buildtest-demo'},
placeholder: {type: String, default: 'kitsune-buildtest-placeholder'},
production: {type: String, default: 'kitsune-buildtest-production'}
}, {_id: false});
const KitsuneProjectSchema = new mongoose.Schema({
ProjectId: String,
ProjectName: String,
CreatedOn : Date,
UserEmail: String,
FaviconIconUrl: String,
Version: {type: Number, default: 0},
ProjectStatus: String,
ProjectType: String,
UpdatedOn: {type: Date, default: new Date().toISOString()},
SchemaId: String,
BucketNames: BucketNames
}, {_id: true});
const ResourceType = new Enum({'LINK': 'LINK', 'SCRIPT': 'SCRIPT', 'STYLE': 'STYLE', 'FILE': 'FILE', 'APPLICATION': 'APPLICATION'});
exports.ResourceType = ResourceType;
const PageType = new Enum({'DEFAULT': 'DEFAULT', 'LIST': 'LIST', 'DETAILS': 'DETAILS', 'SEARCH': 'SEARCH', 'PARTIAL': 'PARTIAL', 'LARAVEL' : 'LARAVEL'});
exports.PageType = PageType;
const KitsuneResourcesMetaData = new mongoose.Schema({
Keywords : [String]
},{_id : false});
const KitsuneProjectResourceSchema = new mongoose.Schema({
ProjectId: String,
SourcePath: String,
OptimizedPath: String,
ClassName: String,
UrlPattern: String,
UrlPatternRegex: String,
Version: {type: Number, default: 0},
IsStatic: {type: Boolean, default: true},
ResourceType: String,
PageType: String,
UpdatedOn: {type: Date, default: Date.now},
PublishedVersion: Number,
MetaData : KitsuneResourcesMetaData
});
KitsuneProjectResourceSchema
.virtual('LocalPath')
.get(function () {
return `${process.env.OPTIMIZERLOCALSTORAGE}/${this.AbsoluteSourcePath}`;
});
KitsuneProjectResourceSchema
.virtual('AbsoluteSourcePath')
.get(function () {
let cwd = (this.IsStatic) ? '': pathOfCurrentWorkingDirectory;
return this.ProjectId + cwd + this.SourcePath;
});
KitsuneProjectResourceSchema
.virtual('LocalOptimizedPath')
.get(function() {
return `${process.env.OPTIMIZERLOCALSTORAGE}/${this.AbsoluteOptimizedFilePath}`;
})
.set(function (value) {
let path = value.split('/');
path = path.slice(path.indexOf(this.ProjectId) + 1).join('/');
if(path.indexOf('/') == 0 )
this.OptimizedPath = `${path}`;
else
this.OptimizedPath = `/${path}`;
});
KitsuneProjectResourceSchema
.virtual('AbsoluteOptimizedFilePath')
.get(function () {
return this.ProjectId + pathOfCurrentWorkingDirectory + this.OptimizedPath;
});
// I am sorry for this property name
KitsuneProjectResourceSchema
.virtual('AbsoluteOptimizedUnmodifiedFilePath')
.get(function () {
return this.ProjectId + pathOfCurrentWorkingDirectory + this.SourcePath;
});
const BuildStages = new Enum(
{'Queued': 'Queued',
'Compiled': 'Compiled',
'QueuedBuild': 'QueuedBuild',
'Analyzer': 'Analyzer',
'Optimizer': 'Optimizer',
'Replacer': 'Replacer',
'Completed': 'Completed',
'Error': 'Error'});
exports.BuildStages = BuildStages;
const CompilerFiles = new mongoose.Schema({
LINK: {type: Number, default: 0},
TOTAL: {type: Number, default: 0}
}, {_id: false});
exports.CompilerFiles = CompilerFiles;
const AnalyzerFiles = new mongoose.Schema({
LINK: {type: Number, default: 0},
STYLE: {type: Number, default: 0},
TOTAL: {type: Number, default: 0},
TOTAL_LINK: {type: Number, default: 0},
TOTAL_STYLE: {type: Number, default: 0}
}, {_id: false});
exports.AnalyzerFiles = AnalyzerFiles;
const OptimizerFiles = new mongoose.Schema({
LINK: {type: Number, default: 0},
SCRIPT: {type: Number, default: 0},
STYLE: {type: Number, default: 0},
FILE: {type: Number, default: 0},
TOTAL: {type: Number, default: 0},
TOTAL_LINK: {type: Number, default: 0},
TOTAL_STYLE: {type: Number, default: 0},
TOTAL_SCRIPT: {type: Number, default: 0},
TOTAL_FILE: {type: Number, default: 0}
}, {_id: false});
exports.OptimizerFiles = OptimizerFiles;
const ReplacerFiles = new mongoose.Schema({
LINK: {type: Number, default: 0},
STYLE: {type: Number, default: 0},
TOTAL: {type: Number, default: 0},
TOTAL_LINK: {type: Number, default: 0},
TOTAL_STYLE: {type: Number, default: 0}
}, {_id: false});
exports.ReplacerFiles = ReplacerFiles;
const KError = new mongoose.Schema({
Message: String,
SourceMethod: String,
ErrorStackTrace : String,
Line : String,
Column : String,
SourcePath : String
}, {_id: false});
const KitsuneBuildStatusSchema = new mongoose.Schema({
CreatedOn: {type: Date},
ProjectId: String,
BuildVersion : Number,
Stage: String,
IsCompleted : Boolean,
Compiler: CompilerFiles,
Analyzer: AnalyzerFiles,
Optimizer: OptimizerFiles,
Replacer: ReplacerFiles,
Error : [KError],
Warning: [KError]
}, {_id: true, versionKey: false});
// UNUSED
// KitsuneBuildStatusSchema.methods.incrementScriptsProcessed = function () {
// const logger = logger_helper.get_logger(this.ProjectId);
// this.model('KitsuneBuildStatus').update({ProjectId: this.ProjectId}, {$inc: {ScriptsProcessed: 1}})
// .then(res => {
// logger.info(`database successfully updated for scripts for ProjectId : ${this.ProjectId}`);
// })
// .catch(err => {
// logger.error(`database updated failed for scripts for ProjectId : ${this.ProjectId}`);
// });
// };
//
// KitsuneBuildStatusSchema.methods.incrementStylesProcessed = function () {
// const logger = logger_helper.get_logger(this.ProjectId);
// this.model('KitsuneBuildStatus').update({ProjectId: this.ProjectId}, {$inc: {StylesProcessed: 1}})
// .then(res => {
// logger.info(`database successfully updated for styles for ProjectId : ${this.ProjectId}`);
// })
// .catch(err => {
// logger.error(`database updated failed for styles for ProjectId : ${this.ProjectId}`);
// });
// };
//
// KitsuneBuildStatusSchema.methods.incrementLinksProcessed = function () {
// const logger = logger_helper.get_logger(this.ProjectId);
// this.model('KitsuneBuildStatus').update({ProjectId: this.ProjectId}, {$inc: {LinksProcessed: 1}})
// .then(res => {
// logger.info(`database successfully updated for links for ProjectId : ${this.ProjectId}`);
// })
// .catch(err => {
// logger.error(`database updated failed for links for ProjectId : ${this.ProjectId}`);
// });
// };
//
// KitsuneBuildStatusSchema.methods.incrementFilesProcessed = function () {
// const logger = logger_helper.get_logger(this.ProjectId);
// this.model('KitsuneBuildStatus').update({ProjectId: this.ProjectId}, {$inc: {FilesProcessed: 1}})
// .then(res => {
// logger.info(`database successfully updated for files for ProjectId : ${this.ProjectId}`);
// })
// .catch(err => {
// logger.error(`database updated failed for files for ProjectId : ${this.ProjectId}`);
// });
// };
//
// KitsuneBuildStatusSchema.methods.incrementFilesModified = function () {
// const logger = logger_helper.get_logger(this.ProjectId);
// this.model('KitsuneBuildStatus').update({ProjectId: this.ProjectId}, {$inc: {FilesModified: 1}})
// .then(res => {
// logger.info(`database successfully updated for FilesModified for ProjectId : ${this.ProjectId}`);
// })
// .catch(err => {
// logger.error(`database updated failed for FilesModified for ProjectId : ${this.ProjectId}`);
// });
// };
const KitsuneProject = mongoose.model('KitsuneProjects',KitsuneProjectSchema,
process.env.COLLECTION_KitsuneProjects || 'KitsuneProjects');
exports.KitsuneProject = KitsuneProject;
const KitsuneProjectResources = mongoose.model('KitsuneResources', KitsuneProjectResourceSchema,
process.env.COLLECTION_KitsuneResources || 'KitsuneResources');
exports.KitsuneProjectResources = KitsuneProjectResources;
const KitsuneBuildStatus = mongoose.model('KitsuneBuildStatus', KitsuneBuildStatusSchema,
process.env.COLLECTION_KitsuneBuildStatus || 'KitsuneBuildStatus');
exports.KitsuneBuildStatus = KitsuneBuildStatus; |
#!/usr/bin/env bash
function run_darwin {
# https://source.android.com/setup/build/initializing#setting-up-a-mac-os-x-build-environment
if [[ ! -f espressobin_build.dmg || ! -f espressobin_build.dmg.sparseimage ]]; then
hdiutil create -type SPARSE -fs 'Case-sensitive Journaled HFS+' -size 40g ./espressobin_build.dmg
fi
hdiutil attach ./espressobin_build.dmg.sparseimage -mountpoint ./build/ \
|| hdiutil attach ./espressobin_build.dmg -mountpoint ./build/
run_build
hdiutil detach ./build/
}
function run_build {
docker run --rm -it \
--name espressobin-build \
--mount type=bind,source="$(pwd)"/bin/,target=/data/ \
--mount type=bind,source="$(pwd)"/build/,target=/build/ \
--mount type=bind,source="$(pwd)"/cache/,target=/cache/ \
espressobin/build:latest
}
function clean_build {
case "$(uname -s)" in
Linux*)
rm -rf build/*
;;
Darwin*)
rm espressobin_build.dmg*
;;
* )
printf "Uncertain how to clean build!\n"
;;
esac
}
while true; do
read -p "Reset build folders? [Y/N]: " yn
case $yn in
[Yy]* )
clean_build
break
;;
[Nn]* )
break
;;
* )
printf "Please answer [Yy]es or [Nn]o.\n"
;;
esac
done
docker build -t espressobin/build .
case "$(uname -s)" in
Linux*)
run_build
;;
Darwin*)
run_darwin
;;
* )
printf "OS currently unsupported!\n"
;;
esac
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
package io.opensphere.core.hud.awt;
import io.opensphere.core.model.GeographicBoxAnchor;
/** Internal frame to be displayed as a HUD window. */
public class HUDJInternalFrame implements HUDFrame
{
/**
* The geographic position to which the frame is attached. This may be null
* when the frame is not attached to a geographic position.
*/
private final GeographicBoxAnchor myGeoAnchor;
/** The Internal frame which is to be displayed. */
private final AbstractInternalFrame myInternalFrame;
/**
* Constructor.
*
* @param builder Builder which contains my settings.
*/
public HUDJInternalFrame(Builder builder)
{
myInternalFrame = builder.getInternalFrame();
myGeoAnchor = builder.getGeographicAnchor();
}
/**
* Get the geographicAnchor.
*
* @return the geographicAnchor
*/
public GeographicBoxAnchor getGeographicAnchor()
{
return myGeoAnchor;
}
/**
* Get the internalFrame.
*
* @return the internalFrame
*/
public AbstractInternalFrame getInternalFrame()
{
return myInternalFrame;
}
@Override
public String getTitle()
{
return myInternalFrame.getTitle();
}
@Override
public boolean isVisible()
{
return myInternalFrame.isVisible();
}
@Override
public void setVisible(boolean visible)
{
myInternalFrame.setVisible(visible);
}
/** Builder for the internal frame. */
public static class Builder
{
/**
* The geographic position to which the frame is attached. This may be
* null when the frame is not attached to a geographic position.
*/
private GeographicBoxAnchor myGeographicAnchor;
/** The Internal frame which is to be displayed. */
private AbstractInternalFrame myInternalFrame;
/**
* Get the geographicAnchor.
*
* @return the geographicAnchor
*/
public GeographicBoxAnchor getGeographicAnchor()
{
return myGeographicAnchor;
}
/**
* Get the internalFrame.
*
* @return the internalFrame
*/
public AbstractInternalFrame getInternalFrame()
{
return myInternalFrame;
}
/**
* Set the geographicAnchor.
*
* @param geographicAnchor the geographicAnchor to set
* @return the builder
*/
public Builder setGeographicAnchor(GeographicBoxAnchor geographicAnchor)
{
myGeographicAnchor = geographicAnchor;
return this;
}
/**
* Set the internalFrame.
*
* @param internalFrame the internalFrame to set
* @return the builder
*/
public Builder setInternalFrame(AbstractInternalFrame internalFrame)
{
myInternalFrame = internalFrame;
return this;
}
}
}
|
LIB_SPECS="spec/support/lib_specs.json"
PASSING_SPECS="spec/support/passing_specs.json"
FAILING_SPECS="spec/support/failing_specs.json"
NO_CF_PASSING_SPECS="spec/support/no_cf_passing_specs.json"
NO_CF_FAILING_SPECS="spec/support/no_cf_failing_specs.json"
CMD_BASE="node node_modules/.bin/jasmine JASMINE_CONFIG_PATH="
# Run unit tests
echo "### running all unit tests"
CMD=$CMD_BASE$LIB_SPECS
echo "### $CMD"
$CMD
[ "$?" -eq 0 ] || exit 1
echo
# Run all tests when the control flow is enabled
export SELENIUM_PROMISE_MANAGER=1
echo "### running all passing specs"
CMD=$CMD_BASE$PASSING_SPECS
echo "### $CMD"
$CMD
[ "$?" -eq 0 ] || exit 1
echo
EXPECTED_RESULTS="38 specs, 34 failures"
echo "### running all failing specs (expecting $EXPECTED_RESULTS)"
CMD=$CMD_BASE$FAILING_SPECS
echo "### $CMD"
res=`$CMD 2>/dev/null`
results_line=`echo "$res" | tail -2 | head -1`
echo "result: $results_line"
[ "$results_line" = "$EXPECTED_RESULTS" ] || exit 1
# Run only the async/await tests when the control flow is disabled
export SELENIUM_PROMISE_MANAGER=0
echo "### running async/await passing specs with control flow disabled"
CMD=$CMD_BASE$NO_CF_PASSING_SPECS
echo "### $CMD"
$CMD
[ "$?" -eq 0 ] || exit 1
echo
EXPECTED_RESULTS="19 specs, 17 failures"
echo "### running async/await failing specs (expecting $EXPECTED_RESULTS)"
CMD=$CMD_BASE$NO_CF_FAILING_SPECS
echo "### $CMD"
res=`$CMD 2>/dev/null`
results_line=`echo "$res" | tail -2 | head -1`
echo "result: $results_line"
[ "$results_line" = "$EXPECTED_RESULTS" ] || exit 1
# Run only the async/await tests with no scheduler
export JASMINEWD_TESTS_NO_SCHEDULER=1
echo "### running async/await passing specs with no scheduler"
CMD=$CMD_BASE$NO_CF_PASSING_SPECS
echo "### $CMD"
$CMD
[ "$?" -eq 0 ] || exit 1
echo
EXPECTED_RESULTS="19 specs, 17 failures"
echo "### running async/await failing specs (expecting $EXPECTED_RESULTS)"
CMD=$CMD_BASE$NO_CF_FAILING_SPECS
echo "### $CMD"
res=`$CMD 2>/dev/null`
results_line=`echo "$res" | tail -2 | head -1`
echo "result: $results_line"
[ "$results_line" = "$EXPECTED_RESULTS" ] || exit 1
echo "all pass"
|
module FindAPI
class Site < FindAPI::Resource
belongs_to :recruitment_cycle, through: :provider, param: :recruitment_cycle_year
belongs_to :provider, param: :provider_code
def name
location_name
end
def full_address
[address1, address2, address3, address4, postcode]
.reject(&:blank?)
.join(', ')
end
end
end
|
import json
h = {}
def process_csv_data(csv_data):
global h
for line_list in csv_data:
h[line_list[7]] = h.get(line_list[7], {"oorsprong": {}, "profiel": {}})
h[line_list[7]]["oorsprong"][line_list[3]] = h[line_list[7]]["oorsprong"].get(line_list[3], 0) + int(line_list[12])
h[line_list[7]]["profiel"][line_list[6]] = h[line_list[7]]["profiel"].get(line_list[6], 0) + int(line_list[12)
return h
# Example usage
csv_data = [
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M'],
['N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
]
updated_h = process_csv_data(csv_data)
out = json.dumps(updated_h)
with open('data2015.json', 'w') as f:
f.write(out)
print("JSON parsed and saved!") |
#!/bin/bash
set -e
require_root
check_semver $TOOL_VERSION
VERSION_CODENAME=$(. /etc/os-release && echo ${VERSION_CODENAME})
echo "deb http://ppa.launchpad.net/openjdk-r/ppa/ubuntu ${VERSION_CODENAME} main" | tee -a /etc/apt/sources.list.d/java.list
curl -sSL \
'http://keyserver.ubuntu.com/pks/lookup?op=get&search=0xDA1A4A13543B466853BAF164EB9B1D8886F44E2A' \
| apt-key add -
apt_install openjdk-${MAJOR}-jdk-headless
java -version
|
#!/usr/bin/env bash
set -ex
download_schema()
{
rm -rf ${1} && mkdir -p ${1}
for run in 1 2 3 4 5
do
curl --silent --fail https://codeload.github.com/elastic/apm-server/tar.gz/${2} | tar xzvf - --wildcards --directory=${1} --strip-components=1 "*/docs/spec/*"
result=$?
if [ $result -eq 0 ]; then break; fi
sleep 1
done
if [ $result -ne 0 ]; then exit $result; fi
mv -f ${1}/docs/spec/* ${1}/
rm -rf ${1}/docs
}
# parent directory
basedir=$(dirname "$0")/..
download_schema ${basedir}/.schemacache 6.4
echo "Done."
|
def triangle_type(x, y, z):
if x == y and y == z:
return "Equilateral Triangle"
elif x == y or y == z or x==z:
return "Isosceles Triangle"
else:
return "Scalene Triangle"
x = 5
y = 4
z = 6
print(triangle_type(x, y, z)) |
import { DiscountRule } from "./discount-rule";
import { Region } from "./region";
export declare class Discount {
id: string;
code: string;
is_dynamic: boolean;
rule_id: string;
rule: DiscountRule;
is_disabled: boolean;
parent_discount_id: string;
parent_discount: Discount;
starts_at: Date;
ends_at: Date;
valid_duration: string;
regions: Region[];
usage_limit: number;
usage_count: number;
created_at: Date;
updated_at: Date;
deleted_at: Date;
metadata: any;
private beforeInsert;
}
/**
* @schema discount
* title: "Discount"
* description: "Represents a discount that can be applied to a cart for promotional purposes."
* x-resourceId: discount
* properties:
* id:
* description: "The id of the Discount. Will be prefixed by `disc_`."
* type: string
* code:
* description: "A unique code for the discount - this will be used by the customer to apply the discount"
* type: string
* is_dynamic:
* description: "A flag to indicate if multiple instances of the discount can be generated. I.e. for newsletter discounts"
* type: boolean
* rule:
* description: "The Discount Rule that governs the behaviour of the Discount"
* anyOf:
* - $ref: "#/components/schemas/discount_rule"
* is_disabled:
* description: "Whether the Discount has been disabled. Disabled discounts cannot be applied to carts"
* type: boolean
* parent_discount_id:
* description: "The Discount that the discount was created from. This will always be a dynamic discount"
* type: string
* starts_at:
* description: "The time at which the discount can be used."
* type: string
* format: date-time
* ends_at:
* description: "The time at which the discount can no longer be used."
* type: string
* format: date-time
* regions:
* description: "The Regions in which the Discount can be used"
* type: array
* items:
* $ref: "#/components/schemas/region"
* usage_limit:
* description: "The maximum number of times that a discount can be used."
* type: integer
* usage_count:
* description: "The number of times a discount has been used."
* type: integer
* created_at:
* description: "The date with timezone at which the resource was created."
* type: string
* format: date-time
* updated_at:
* description: "The date with timezone at which the resource was last updated."
* type: string
* format: date-time
* deleted_at:
* description: "The date with timezone at which the resource was deleted."
* type: string
* format: date-time
* metadata:
* description: "An optional key-value map with additional information."
* type: object
*/
|
<html>
<head>
<title>Dog Information</title>
</head>
<body>
<form action="submit" method="POST">
<label>Name</label>
<input type="text" name="name" />
<label>Age</label>
<input type="text" name="age" />
<label>Breed</label>
<input type="text" name="breed" />
<label>Color</label>
<input type="text" name="color" />
<input type="submit" value="Submit" />
</form>
</body>
</html> |
import React from "react"
import { useStaticQuery, graphql } from "gatsby"
import Img from "gatsby-image"
const Image = () => {
const data = useStaticQuery(graphql`
query {
placeholderImage: file(relativePath: { eq: "Crispy/BorderLess.png" }) {
childImageSharp {
fluid(maxWidth: 808, quality: 90) {
...GatsbyImageSharpFluid
}
fixed(width: 808, height: 1558) {
...GatsbyImageSharpFixed
}
}
}
}
`)
return <Img style={{width: '100%', height: '100%'}} fluid={data.placeholderImage.childImageSharp.fluid} />
}
export default Image |
class Pipeline:
def __init__(self):
self.stages = []
def add_stage(self, func):
self.stages.append(func)
def run(self, input_data):
result = input_data
for stage in self.stages:
result = stage(result)
return result |
package io.opensphere.osh.aerialimagery.results;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.List;
import java.util.TimeZone;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import io.opensphere.core.control.ui.MenuBarRegistry;
import io.opensphere.core.control.ui.UIRegistry;
import io.opensphere.core.model.Altitude.ReferenceLevel;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.io.CancellableInputStream;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.core.util.taskactivity.CancellableTaskActivity;
import io.opensphere.osh.aerialimagery.model.PlatformMetadata;
import io.opensphere.osh.model.Field;
import io.opensphere.osh.model.Output;
/**
* Unit test for {@link LocationParser}.
*/
public class LocationParserTest
{
/**
* Tests parsing the locations.
*
* @throws IOException Bad IO.
*/
@Test
public void test() throws IOException
{
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
String testData = "2015-12-19T21:01:29.231Z,34.6905037,-86.5819168,183.99\n"
+ "2015-12-19T21:01:29.321Z,34.6905037,-86.5819167,183.98\n"
+ "2015-12-19T21:01:29.521Z,34.6905036,-86.5819167,183.99";
Output output = createTestOutput();
EasyMockSupport support = new EasyMockSupport();
UIRegistry uiRegistry = createUIRegistry(support, false);
support.replayAll();
List<PlatformMetadata> metadatas = New.list();
LocationParser parser = new LocationParser(uiRegistry);
ByteArrayInputStream stream = new ByteArrayInputStream(testData.getBytes(StringUtilities.DEFAULT_CHARSET));
parser.parse(output, new CancellableInputStream(stream, null), metadatas);
assertEquals(3, metadatas.size());
assertEquals("Sat Dec 19 21:01:29 GMT 2015", metadatas.get(0).getTime().toString());
assertEquals(231, metadatas.get(0).getTime().getTime() % 1000);
assertEquals(LatLonAlt.createFromDegreesMeters(34.6905037, -86.5819168, 183.99, ReferenceLevel.ELLIPSOID),
metadatas.get(0).getLocation());
assertEquals("Sat Dec 19 21:01:29 GMT 2015", metadatas.get(1).getTime().toString());
assertEquals(321, metadatas.get(1).getTime().getTime() % 1000);
assertEquals(LatLonAlt.createFromDegreesMeters(34.6905037, -86.5819167, 183.98, ReferenceLevel.ELLIPSOID),
metadatas.get(1).getLocation());
assertEquals("Sat Dec 19 21:01:29 GMT 2015", metadatas.get(2).getTime().toString());
assertEquals(521, metadatas.get(2).getTime().getTime() % 1000);
assertEquals(LatLonAlt.createFromDegreesMeters(34.6905036, -86.5819167, 183.99, ReferenceLevel.ELLIPSOID),
metadatas.get(2).getLocation());
support.verifyAll();
}
/**
* Tests parsing the locations and getting cancelled.
*
* @throws IOException Bad IO.
*/
@Test
public void testCancel() throws IOException
{
String testData = "2015-12-19T21:01:29.231Z,34.6905037,-86.5819168,183.99\n"
+ "2015-12-19T21:01:29.321Z,34.6905037,-86.5819167,183.98\n"
+ "2015-12-19T21:01:29.521Z,34.6905036,-86.5819167,183.99";
Output output = createTestOutput();
EasyMockSupport support = new EasyMockSupport();
UIRegistry uiRegistry = createUIRegistry(support, true);
support.replayAll();
List<PlatformMetadata> metadatas = New.list();
LocationParser parser = new LocationParser(uiRegistry);
ByteArrayInputStream stream = new ByteArrayInputStream(testData.getBytes(StringUtilities.DEFAULT_CHARSET));
parser.parse(output, new CancellableInputStream(stream, null), metadatas);
assertEquals(1, metadatas.size());
support.verifyAll();
}
/**
* Creates the test output object.
*
* @return The test {@link Output}.
*/
private Output createTestOutput()
{
Output output = new Output("platformLoc");
Field timeField = new Field("time");
timeField.setProperty("http://www.opengis.net/def/property/OGC/0/SamplingTime");
Field latField = new Field("lat");
Field lonField = new Field("lon");
Field alt = new Field("alt");
output.getFields().add(timeField);
output.getFields().add(latField);
output.getFields().add(lonField);
output.getFields().add(alt);
return output;
}
/**
* Creates an easy mocked {@link UIRegistry}.
*
* @param support Used to create the mock.
* @param isCancel True if we are to simulate the download being cancelled.
* @return The mocked {@link UIRegistry}.
*/
private UIRegistry createUIRegistry(EasyMockSupport support, boolean isCancel)
{
MenuBarRegistry menuRegistry = support.createMock(MenuBarRegistry.class);
menuRegistry.addTaskActivity(EasyMock.isA(CancellableTaskActivity.class));
if (isCancel)
{
EasyMock.expectLastCall().andAnswer(() ->
{
((CancellableTaskActivity)EasyMock.getCurrentArguments()[0]).setCancelled(true);
return null;
});
}
UIRegistry uiRegistry = support.createMock(UIRegistry.class);
EasyMock.expect(uiRegistry.getMenuBarRegistry()).andReturn(menuRegistry);
return uiRegistry;
}
}
|
from manim import *
from tests.helpers.graphical_units import set_test_scene
# Note: DO NOT COMMIT THIS FILE. The purpose of this template is to produce control data for graphical_units_data. As
# soon as the test data is produced, please revert all changes you made to this file, so this template file will be
# still available for others :)
# More about graphical unit tests: https://github.com/ManimCommunity/manim/wiki/Testing#graphical-unit-test
class YourClassHere(Scene):
def construct(self):
circle = Circle()
self.play(Animation(circle))
set_test_scene(
YourClassHere, "<module_name>"
) # <module_name> can be e.g. "geometry" or "movements"
|
public class Palindrome {
static boolean isPalindrome(String s)
{
int n = s.length();
for (int i = 0; i < n / 2; i++) {
if (s.charAt(i) != s.charAt(n - i - 1)) {
return false;
}
}
return true;
}
public static void main(String[] args)
{
String s = "madam";
if (isPalindrome(s)) {
System.out.print("Yes");
}
else {
System.out.print("No");
}
}
} |
#ifndef WAX_TO_JS
#define WAX_TO_JS
#include "text.c"
#include "parser.c"
#include "common.c"
str_t zero_to_js(type_t* typ){
str_t out = str_new();
if (typ->tag == TYP_INT){
str_add(&out,"0");
}else if (typ->tag == TYP_FLT){
str_add(&out,"0.0");
}else if (typ->tag == TYP_STT){
str_add(&out,"(null)");
}else if (typ->tag == TYP_ARR){
str_add(&out,"(null)");
}else if (typ->tag == TYP_VEC){
str_add(&out,"(null)");
}else if (typ->tag == TYP_MAP){
str_add(&out,"(null)");
}else if (typ->tag == TYP_STR){
str_add(&out,"(null)");
}else{
str_add(&out,"/*zero?*/");
}
return out;
}
str_t expr_to_js(expr_t* expr, int indent){
// print_syntax_tree(expr,4);
// printf("-----\n");
str_t out = str_new();
INDENT2(indent);
if (expr->key == EXPR_LET){
str_add(&out,"var ");
str_add(&out, ((tok_t*)(CHILD1->term))->val.data);
str_add(&out,"=");
str_add(&out,zero_to_js( (type_t*)(CHILD2->term) ).data);
}else if (expr->key == EXPR_SET){
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out,"=");
str_add(&out, expr_to_js(CHILD2,-1).data );
}else if (expr->key == EXPR_TERM){
tok_t* tok = ((tok_t*)(expr->term));
if (tok->tag == TOK_INT && tok->val.data[0] == '\''){
str_add(&out, tok->val.data);
str_add(&out, ".charCodeAt(0)");
}else{
str_add(&out, tok->val.data);
}
}else if (expr->key == EXPR_FADD || expr->key == EXPR_IADD ||
expr->key == EXPR_FSUB || expr->key == EXPR_ISUB ||
expr->key == EXPR_FMUL || expr->key == EXPR_IMUL ||
expr->key == EXPR_FDIV ||
expr->key == EXPR_BAND || expr->key == EXPR_LAND ||
expr->key == EXPR_BOR || expr->key == EXPR_LOR ||
expr->key == EXPR_IMOD || expr->key == EXPR_FMOD ||
expr->key == EXPR_XOR ||
expr->key == EXPR_SHL || expr->key == EXPR_SHR
){
str_add(&out, "((");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, ")");
str_add(&out, expr->rawkey.data);
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD2,-1).data );
str_add(&out, "))");
}else if (expr->key == EXPR_IGEQ || expr->key == EXPR_FGEQ ||
expr->key == EXPR_ILEQ || expr->key == EXPR_FLEQ ||
expr->key == EXPR_IGT || expr->key == EXPR_FGT ||
expr->key == EXPR_ILT || expr->key == EXPR_FLT
){
str_add(&out, "Number((");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, ")");
str_add(&out, expr->rawkey.data);
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD2,-1).data );
str_add(&out, "))");
}else if (expr->key == EXPR_IDIV){
str_add(&out, "(~~((");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, ")");
str_add(&out, expr->rawkey.data);
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD2,-1).data );
str_add(&out, ")))");
}else if (expr->key == EXPR_IEQ || expr->key == EXPR_FEQ || expr->key == EXPR_PTREQL || expr->key == EXPR_STREQL){
str_add(&out, "Number((");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, ")==(");
str_add(&out, expr_to_js(CHILD2,-1).data );
str_add(&out, "))");
}else if (expr->key == EXPR_INEQ || expr->key == EXPR_FNEQ || expr->key == EXPR_PTRNEQ || expr->key == EXPR_STRNEQ){
str_add(&out, "Number((");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, ")!=(");
str_add(&out, expr_to_js(CHILD2,-1).data );
str_add(&out, "))");
}else if (expr->key == EXPR_BNEG){
str_add(&out, "(");
str_add(&out, expr->rawkey.data);
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, "))");
}else if (expr->key == EXPR_LNOT){
str_add(&out, "Number(");
str_add(&out, expr->rawkey.data);
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD1,-1).data );
str_add(&out, "))");
}else if (expr->key == EXPR_IF){
str_add(&out, "if(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, "){\n");
str_add(&out, expr_to_js(CHILD2,indent).data);
INDENT2(indent);
str_add(&out, "}");
if (CHILD3){
str_add(&out, "else{\n");
str_add(&out, expr_to_js(CHILD3,indent).data);
INDENT2(indent);
str_add(&out, "}");
}
}else if (expr->key == EXPR_TIF){
str_add(&out, "((");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ")?(");
str_add(&out, expr_to_js(CHILD2,-1).data);
str_add(&out, "):(");
str_add(&out, expr_to_js(CHILD3,-1).data);
str_add(&out, "))");
}else if (expr->key == EXPR_WHILE){
str_add(&out, "while(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, "){\n");
str_add(&out, expr_to_js(CHILD2,indent).data);
INDENT2(indent);
str_add(&out, "}");
}else if (expr->key == EXPR_FOR){
str_add(&out, "for(var ");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, "=(");
str_add(&out, expr_to_js(CHILD2,-1).data);
str_add(&out, ");");
str_add(&out, expr_to_js(CHILD3,-1).data);
str_add(&out, ";");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, "+=(");
str_add(&out, expr_to_js(CHILD4,-1).data);
str_add(&out, ")){\n");
str_add(&out, expr_to_js(CHILDN,indent).data);
INDENT2(indent);
str_add(&out, "}");
}else if (expr->key == EXPR_FORIN){
str_add(&out, "for(const [");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ",");
str_add(&out, expr_to_js(CHILD2,-1).data);
str_add(&out, "] of Object['entries'](");
str_add(&out, expr_to_js(CHILD3,-1).data);
str_add(&out, ")){\n");
str_add(&out, expr_to_js(CHILDN,indent+1).data);
INDENT2(indent);
str_add(&out, "}");
}else if (expr->key == EXPR_FUNC){
str_add(&out, "function ");
list_node_t* it = expr->children.head;
str_t funcname = ((tok_t*)(CHILD1->term))->val;
str_add(&out, funcname.data);
str_add(&out, "(");
it = expr->children.head->next;
while(it){
expr_t* ex = (expr_t*)(it->data);
if (ex->key != EXPR_PARAM){
break;
}
if (it != expr->children.head->next){
str_add(&out,",");
}
str_add(&out, ((tok_t*)(((expr_t*)(((expr_t*)(it->data))->children.head->data))->term))->val.data);
it = it->next;
}
str_add(&out, ")");
if (((expr_t*)(it->data))->key == EXPR_RESULT ){
}
str_add(&out, "{\n");
str_add(&out, expr_to_js(CHILDN,indent).data);
INDENT2(indent);
str_add(&out, "}");
}else if (expr->key == EXPR_CALL){
str_t funcname = ((tok_t*)(CHILD1->term))->val;
str_add(&out, funcname.data);
str_add(&out, "(");
list_node_t* it = expr->children.head->next;
while(it){
expr_t* ex = (expr_t*)(it->data);
if (ex->key == EXPR_RESULT){
break;
}
if (it != expr->children.head->next){
str_add(&out,",");
}
str_add(&out, expr_to_js(((expr_t*)(it->data)),-1).data );
it = it->next;
}
str_add(&out, ")");
}else if (expr->key == EXPR_THEN || expr->key == EXPR_ELSE || expr->key == EXPR_DO || expr->key == EXPR_FUNCBODY){
list_node_t* it = expr->children.head;
if (!it){
str_add(&out,"\n");
}
while(it){
expr_t* ex = (expr_t*)(it->data);
if (it==(expr->children.head)){
str_add(&out,(char*)&expr_to_js(ex,indent+1).data[indent*2]);
}else{
str_add(&out,expr_to_js(ex,indent+1).data);
}
it = it->next;
}
indent=-1;
}else if (expr->key == EXPR_CAST){
type_t* typl = CHILD1->type;
type_t* typr = (type_t*)(CHILD2->term);
if (typl->tag == TYP_INT && typr->tag == TYP_FLT){
str_add(&out, expr_to_js(CHILD1,-1).data);
}else if (typl->tag == TYP_FLT && typr->tag == TYP_INT){
str_add(&out, "(~~(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, "))");
}else if (typl->tag == TYP_INT && typr->tag == TYP_STR){
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ").toString()");
}else if (typl->tag == TYP_FLT && typr->tag == TYP_STR){
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ").toString()");
}else if (typl->tag == TYP_STR && typr->tag == TYP_INT){
str_add(&out, "parseInt(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ")");
}else if (typl->tag == TYP_STR && typr->tag == TYP_FLT){
str_add(&out, "parseFloat(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ")");
}else{
str_add(&out, "(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out, ")");
}
}else if (expr->key == EXPR_RETURN){
str_add(&out,"return");
if (CHILD1){
str_add(&out," ");
str_add(&out,expr_to_js(CHILD1,-1).data);
}
}else if (expr->key == EXPR_STRUCT){
str_add(&out,"class ");
str_add(&out, ((tok_t*)(CHILD1->term))->val.data);
str_add(&out,"{\n");
list_node_t* it = expr->children.head->next;
while(it){
// expr_t* ex = (expr_t*)(it->data);
INDENT2(indent+1);
str_add(&out, ((tok_t*)(((expr_t*)(((expr_t*)(it->data))->children.head->data))->term))->val.data);
str_add(&out,"=");
str_add(&out,zero_to_js( (type_t*)(((expr_t*)(((expr_t*)(it->data))->children.head->next->data))->term) ).data);
str_add(&out,";\n");
it = it->next;
}
INDENT2(indent);
str_add(&out,"}");
}else if (expr->key == EXPR_NOTNULL){
str_add(&out,"(");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out,"!=null)");
}else if (expr->key == EXPR_SETNULL){
if (!CHILD2){
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out,"=(null)");
}else{
if (CHILD1->type->tag == TYP_STT){
str_add(&out,"(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"=null");
}else if (CHILD1->type->tag == TYP_ARR){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"]=null");
}else if (CHILD1->type->tag == TYP_VEC){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"]=null");
}
}
}else if (expr->key == EXPR_ALLOC){
type_t* typ = (type_t*)(CHILD1->term);
if (typ->tag == TYP_STT){
str_add(&out,"(new ");
str_add(&out,typ->name.data);
str_add(&out,"())");
}else if (typ->tag == TYP_ARR){
if (expr->children.len == 1){
str_add(&out,"[]");
}else{
str_add(&out,"[");
list_node_t* it = expr->children.head->next;
while (it){
if (it != expr->children.head->next){
str_add(&out,",");
}
str_add(&out,"(");
str_add(&out,expr_to_js((expr_t*)(it->data),-1).data);
str_add(&out,")");
it = it->next;
}
str_add(&out,"]");
}
}else if (typ->tag == TYP_VEC){
if (expr->children.len == 1){
char s[32];
sprintf(s,"%d",typ->size);
str_add(&out,"(new Array(");
str_add(&out,s);
str_add(&out,")['fill'](");
str_add(&out,zero_to_js(typ->elem0).data);
str_add(&out,"))");
}else{
str_add(&out,"[");
list_node_t* it = expr->children.head->next;
while (it){
if (it != expr->children.head->next){
str_add(&out,",");
}
str_add(&out,"(");
str_add(&out,expr_to_js((expr_t*)(it->data),-1).data);
str_add(&out,")");
it = it->next;
}
str_add(&out,"]");
}
}else if (typ->tag == TYP_MAP){
str_add(&out,"{}");
}else if (typ->tag == TYP_STR){
if (CHILD2){
str_add(&out,expr_to_js(CHILD2,-1).data);
}else{
str_add(&out,"\"\"");
}
}
}else if (expr->key == EXPR_FREE){
str_add(&out,"/*GC*/");
str_add(&out, expr_to_js(CHILD1,-1).data);
str_add(&out,"=(null)");
}else if (expr->key == EXPR_STRUCTGET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_STRUCTSET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"=");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_VECGET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"])");
}else if (expr->key == EXPR_VECSET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"]=");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_ARRGET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"])");
}else if (expr->key == EXPR_ARRSET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"]=");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_ARRINS){
str_add(&out,"(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").splice((");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"),0,(");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,"))");
}else if (expr->key == EXPR_ARRREM){
str_add(&out,"(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").splice((");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"),(");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,"))");
}else if (expr->key == EXPR_ARRCPY){
str_add(&out,"w_slice((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,"),(");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"),(");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,"))");
}else if (expr->key == EXPR_ARRLEN){
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,".length");
}else if (expr->key == EXPR_MAPLEN){
str_add(&out,"Object.keys(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").length");
}else if (expr->key == EXPR_MAPGET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"]??");
str_add(&out,zero_to_js(CHILD1->type->elem1).data);
str_add(&out,")");
}else if (expr->key == EXPR_MAPREM){
str_add(&out,"(delete (");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"])");
}else if (expr->key == EXPR_MAPSET){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")[");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"]=");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_STRLEN){
str_add(&out,"(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").length");
}else if (expr->key == EXPR_STRGET){
str_add(&out,"(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,").charCodeAt(");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_STRADD){
str_add(&out,"(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")+=String.fromCharCode(");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_STRCAT){
str_add(&out,"((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")+=(");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"))");
}else if (expr->key == EXPR_STRCPY){
str_add(&out,"w_slice((");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,"),(");
str_add(&out,expr_to_js(CHILD2,-1).data);
str_add(&out,"),(");
str_add(&out,expr_to_js(CHILD3,-1).data);
str_add(&out,"))");
}else if (expr->key == EXPR_PRINT){
str_add(&out,"console.log(");
str_add(&out,expr_to_js(CHILD1,-1).data);
str_add(&out,")");
}else if (expr->key == EXPR_EXTERN){
//skip
out.len-=2;
out.data[out.len] = 0;
indent=-1;
}else if (expr->key == EXPR_BREAK){
str_add(&out,"break");
}else if (expr->key == EXPR_ASM){
str_add(&out,str_unquote(expr_to_js(CHILD1,-1)).data);
indent=-1;
}else{
str_add(&out,"/**");
str_add(&out,expr->rawkey.data);
str_add(&out,"**/");
}
if (indent>=0){str_add(&out,";\n");}
return out;
}
str_t tree_to_js(str_t modname, expr_t* tree, map_t* functable, map_t* stttable, map_t* included){
// compile_tac_tree(tree);
str_t out = str_new();
str_add(&out,"/*****************************************\n * ");
str_add(&out,modname.data);
for (int i = 0; i < 38-modname.len; i++){
str_addch(&out,' ');
}
str_add(&out,"*\n *****************************************/\n");
str_add(&out,"/* Compiled by WAXC (Version ");
str_add(&out,__DATE__);
//str_add(&out,")*/\n\n\n;(function(){\n");
str_add(&out,")\n\n\n\n");
str_add(&out,"/*=== WAX Standard Library BEGIN ===*/\n");
str_addconst(&out,TEXT_std_js);
str_add(&out,"/*=== WAX Standard Library END ===*/\n\n");
str_add(&out,"/*=== User Code BEGIN ===*/\n\n");
list_node_t* it = tree->children.head;
while(it){
expr_t* expr = (expr_t*)(it->data);
if (expr->key == EXPR_LET && it->next){
expr_t* ex1 = (expr_t*)(it->next->data);
if (ex1->key == EXPR_SET){
expr_t* ex2 = (expr_t*)(ex1->children.head->data);
if (ex2->key == EXPR_TERM){
if (str_eq( &((tok_t*)(CHILD1->term))->val, ((tok_t*)(ex2->term))->val.data )){
str_add(&out,"\nvar ");
str_add(&out, ((tok_t*)(CHILD1->term))->val.data);
str_add(&out,"=");
str_add(&out,expr_to_js( (expr_t*)(ex1->children.head->next->data),-1).data);
str_add(&out,";\n");
it = it -> next -> next;
continue;
}
}
}
}
str_add(&out,expr_to_js(expr,0).data); // change 0 to 1 with uncommenting both lines 647 and 600 for IIFE mode!
it = it->next;
}
str_add(&out,"/*=== User Code END ===*/\n");
str_t mainstr = str_from("main",4);
func_t* fun = func_lookup(&mainstr,functable);
if (fun != NULL){
if (!(fun->params.len)){
str_add(&out,"(typeof process=='undefined')?main():process.exit(main());\n");
}else{
str_add(&out,"(typeof process=='undefined')?main([]):process.exit(main(process.argv.slice(1)));");
}
}
//str_add(&out,"})();");
return out;
}
#endif
|
import * as React from 'react';
import { SessionExpiration, IProps } from '../SessionExpiration';
import { localeProps } from 'shared/helpers/test';
import { shallow } from 'enzyme';
import toJson from 'enzyme-to-json';
import { Button } from 'shared/view/elements';
function getShallowComponent(props?: Partial<IProps>) {
const defaultProps: IProps = {
...localeProps,
isTimerStarted: false,
timerValue: 0,
startTimer: jest.fn(),
stopTimer: jest.fn(),
onClose: jest.fn(),
onSessionExpired: jest.fn(),
};
return shallow(<SessionExpiration {...defaultProps} {...props} />);
}
describe('Feature Auth', () => {
describe('container SessionExpiration', () => {
it('must rendered without crash', () => {
const component = getShallowComponent();
expect(toJson(component)).toMatchSnapshot();
});
it('endSessionTime: 320, must render timer with 05:20', () => {
const component = getShallowComponent({ timerValue: 320 });
expect(toJson(component)).toMatchSnapshot();
});
it('click on button must call stopTimer', () => {
const stopTimer = jest.fn();
const component = getShallowComponent(({ stopTimer, isTimerStarted: true }));
expect(stopTimer).toHaveBeenCalledTimes(0);
component.find(Button).simulate('click');
expect(stopTimer).toHaveBeenCalledTimes(1);
});
it('when component did mount must call startTimer', () => {
const startTimer = jest.fn();
const component = getShallowComponent(({ startTimer }));
component.find(Button).simulate('click');
expect(startTimer).toHaveBeenCalledTimes(1);
});
});
});
|
<reponame>akokhanovskyi/kaa
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.sync.platform;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.core.type.filter.AnnotationTypeFilter;
/**
* Provides ability to lookup and init {@link PlatformEncDec} instances
*
* @author <NAME>
*
*/
public class PlatformLookup {
/** The Constant LOG. */
private static final Logger LOG = LoggerFactory.getLogger(PlatformLookup.class);
public static final String DEFAULT_PROTOCOL_LOOKUP_PACKAGE_NAME = "org.kaaproject.kaa.server";
private PlatformLookup() {
}
public static Set<String> lookupPlatformProtocols(String... packageNames) {
ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(false);
scanner.addIncludeFilter(new AnnotationTypeFilter(KaaPlatformProtocol.class));
Set<BeanDefinition> beans = new HashSet<BeanDefinition>();
for(String packageName : packageNames){
beans.addAll(scanner.findCandidateComponents(packageName));
}
Set<String> protocols = new HashSet<>();
for (BeanDefinition bean : beans) {
protocols.add(bean.getBeanClassName());
}
return protocols;
}
public static Map<Integer, PlatformEncDec> initPlatformProtocolMap(Set<String> platformProtocols) {
Map<Integer, PlatformEncDec> platformEncDecMap = new HashMap<>();
for (String platformProtocol : platformProtocols) {
try {
Class<?> clazz = Class.forName(platformProtocol);
PlatformEncDec protocol = (PlatformEncDec) clazz.newInstance();
platformEncDecMap.put(protocol.getId(), protocol);
LOG.info("Successfully initialized platform protocol {}", platformProtocol);
} catch (ReflectiveOperationException e) {
LOG.error("Error during instantiation of platform protocol", e);
}
}
return platformEncDecMap;
}
}
|
import os
def yolo_format(size, box):
"""
Args:
size: size of the image [width, height]
box: coordinates [[xmin,ymin], [xmax, ymax]]
Returns: Converted yolo coordinates [[x,y], [w,h]]
"""
dw = 1. / size[0]
dh = 1. / size[1]
x = (box[0][0] + box[0][1]) / 2.0
y = (box[1][0] + box[1][1]) / 2.0
w = box[0][1] - box[0][0]
h = box[1][1] - box[1][0]
x = x * dw
w = w * dw
y = y * dh
h = h * dh
return [[x,y], [w,h]]
|
#!/bin/bash
REPEAT=4
for i in $(seq 1 1 $REPEAT)
do
echo "Run $i"
echo "******************************************"
./runner.sh
echo "******************************************"
done
|
#!/usr/bin/env bash
if [ ! -f "${1}" ]
then
echo "Usage: ${0} <config>" >&2
exit 1
fi
CONFIG="${1}"
echo "Using config '${CONFIG}'"
# shellcheck source=/dev/null
source "${CONFIG}"
# ------------- helpers --------------------------------------
is_callable() {
type -t "${@}" | grep -cE 'file|alias|function'
}
# Make a full, hard-link-only copy of LINK_DEST, if that
# exists. Afterwards rsync from the system (SRC) into TGT.
#
# Do this in a single step (time for some rsync magic):
# Use --link-dest flag which upon syncing SRC hard-links to LINK_DEST
# iff the file in SRC is the same.
create_rsync_snapshot() {
SRC="${1}"
TGT="${2}"
LINK_DEST="${3}"
EXCLUDES="${4}"
RSYNC_ARGS=(-va --delete --delete-excluded \
"$(readlink -m "${SRC}")/" \
"$(readlink -m "${TGT}")")
# ... with link-dest iff available
if [ -e "${EXCLUDES}" ]
then
RSYNC_ARGS=("${RSYNC_ARGS[@]}" "--exclude-from=${EXCLUDES}")
fi
# ... with link-dest iff available
if [ -d "${LINK_DEST}" ]
then
RSYNC_ARGS=("${RSYNC_ARGS[@]}" "--link-dest=$(readlink -m "${LINK_DEST}")")
fi
# ... run it.
rsync "${RSYNC_ARGS[@]}"
}
# Plain hardlink copy SRC -> TGT
create_hardlink_snapshot() {
SRC="${1}"
TGT="${2}"
# ... ignore everything else
cp -alv "${SRC}" "${TGT}"
}
# ------------- sanity checks --------------------------------------
if ! [[ "${NUM_SNAPSHOTS}" =~ ^[-0-9]+$ ]] || (( NUM_SNAPSHOTS < 1))
then
echo "NUM_SNAPSHOTS (${NUM_SNAPSHOTS}) is not an integer that is at least 1." >&2
exit 1
fi
if [ -z "${SNAPSHOT_PREFIX}" ]
then
SNAPSHOT_PREFIX="snapshot"
fi
if [ -z "${NUM_SNAPSHOT_PLACES}" ]
then
NUM_SNAPSHOT_PLACES=$(echo "${NUM_SNAPSHOTS}" \
| awk 'function ceil(v){ return (v == int(v))?v: int(v)+1} {printf "%d", ceil(log($1)/log(10))}')
fi
snapshot_path () {
printf "%s/${SNAPSHOT_PREFIX}-%0${NUM_SNAPSHOT_PLACES}d" "${BACKUP_HOME}" "${1}"
}
if [ "$(is_callable create_new_snapshot)" -ne 0 ]
then
echo "Config must not overwrite \`create_new_snapshot\`." >&2
exit 1
fi
case "${SNAPSHOT_CREATION_MODE}" in
'hardlink-rsync')
create_new_snapshot() {
create_rsync_snapshot "${@}"
}
;;
'hardlink')
create_new_snapshot() {
create_hardlink_snapshot "${@}"
}
;;
*)
echo "Unsupported SNAPSHOT_CREATION_MODE '${SNAPSHOT_CREATION_MODE}'." \
"Must be 'hardlink-rsync' or 'hardlink'." >&2
exit 1
esac
# ------------- the script itself --------------------------------------
# STEP 1: delete the oldest snapshot, if it exists:
OLDEST="$(snapshot_path $((NUM_SNAPSHOTS-1)))"
if [ -d "${OLDEST}" ]
then
rm -rfv "${OLDEST}"
fi
# STEP 2: shift remaining snapshots(s) back by one (including the first)
for NUM in $(seq $((NUM_SNAPSHOTS-2)) -1 0)
do
OLD="$(snapshot_path "${NUM}")"
if [ -d "${OLD}" ]
then
mv -v "${OLD}" "$(snapshot_path $((NUM+1)))"
fi
done
# STEP 3: Create a new first snapshot for all SNAPSHOT_ORIGINS.
LATEST="$(snapshot_path 0)"
mkdir -vp "${LATEST}"
for SRC in "${!SNAPSHOT_ORIGINS[@]}"
do
create_new_snapshot \
"${SRC}" \
"${LATEST}/${SNAPSHOT_ORIGINS[${SRC}]}" \
"$(snapshot_path 1)/${SNAPSHOT_ORIGINS[${SRC}]}" \
"${SNAPSHOT_EXCLUDES}"
done
# STEP 4: Sync snapshot config
cp -v "${CONFIG}" "${LATEST}/config.sh"
# STEP 5: update mtime of latest snapshot to reflect the snapshot time
touch "${LATEST}"
|
default_app_config = 'apps.checkout.config.CheckoutConfig'
|
<reponame>belliappa/promotego-org<gh_stars>1-10
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper')
describe "/contactto/new" do
before(:each) do
params[:id] = "obfuscated"
render '/contactto/new'
end
it "should have a form tag" do
response.should have_tag('form[action=?]', '/contactto/send_mail') do
with_tag('input[id=from]')
with_tag('textarea[id=message]')
with_tag('input[type=submit]')
with_tag('input[type=hidden][name=email][value=obfuscated]')
end
end
end
|
#!/usr/bin/env bash
#############################################################################
# Copyright (c) 2017-2018 SiteWare Corp. All right reserved
#############################################################################
UNUSED_USER_ID=21338
UNUSED_GROUP_ID=21337
UNUSED_DOCKER_GROUP_ID=21336
# Find the package manager, Ubunut uses apt-get, AML uses yum
(type apt-get &> /dev/null) && DOCKER_PKGUPD="apt-get -y update"
(type apt-get &> /dev/null) && DOCKER_PKGMGR="apt-get -y install"
(type yum &> /dev/null) && DOCKER_PKGUPD="true"
(type yum &> /dev/null) && DOCKER_PKGMGR="yum -y install"
# The ensure_group_id_is_free and ensure_user_id_is_free functions come from here
# https://github.com/schmidigital/permission-fix/blob/master/LICENSE
# MIT License
function ensure_group_id_is_free() {
local DOCKER_GROUP=$1
local HOST_GROUP_ID=$2
local UNUSED_ID=$3
echo "EGIF: Check if group with ID $HOST_GROUP_ID already exists"
DOCKER_GROUP_OLD=`getent group $HOST_GROUP_ID | cut -d: -f1`
if [ -z "$DOCKER_GROUP_OLD" ]; then
echo "EGIF: Group ID is free. Good."
elif [ x"$DOCKER_GROUP_OLD" = x"DOCKER_GROUP" ]; then
echo "EGIF: Group ID is taken by the right group"
else
echo "EGIF: Group ID is already taken by group: $DOCKER_GROUP_OLD"
echo "EGIF: Changing the ID of $DOCKER_GROUP_OLD group to $UNUSED_GROUP_ID"
groupmod -o -g $UNUSED_ID $DOCKER_GROUP_OLD
fi
#echo "Changing the ID of $DOCKER_GROUP group to $HOST_GROUP_ID"
#groupmod -o -g $HOST_GROUP_ID $DOCKER_GROUP || true
echo "EGIF: Finished"
echo "EGIF: -- -- -- -- --"
}
function ensure_user_id_is_free() {
local DOCKER_USER=$1
local HOST_USER_ID=$2
local UNUSED_ID=$3
# Setting User Permissions
echo "EUIF: Check if user with ID $HOST_USER_ID already exists"
DOCKER_USER_OLD=`getent passwd $HOST_USER_ID | cut -d: -f1`
if [ -z "$DOCKER_USER_OLD" ]; then
echo "EUIF: User ID is free. Good."
elif [ x"$DOCKER_USER_OLD" = x"DOCKER_USER" ]; then
echo "EUIF: USER ID is taken by the right user"
else
echo "EUIF: User ID is already taken by user: $DOCKER_USER_OLD"
echo "EUIF: Changing the ID of $DOCKER_USER_OLD to 21337"
usermod -o -u $UNUSED_ID $DOCKER_USER_OLD
fi
#echo "Changing the ID of $DOCKER_USER user to $HOST_USER_ID"
#usermod -o -u $HOST_USER_ID $DOCKER_USER || true
echo "EUIF: Finished"
}
if [ x"$USER" != x"" ] ; then
(type yum &> /dev/null) && $DOCKER_PKGMGR shadow-utils # for usermod etc
(type sudo &> /dev/null) || ($DOCKER_PKGUPD && $DOCKER_PKGMGR sudo)
if [ x"$GROUP_ID" != x"" -a x"$(getent group $GROUP_ID | cut -d: -f1)" != x"$USER" ] ; then
ensure_group_id_is_free $USER $GROUP_ID $UNUSED_GROUP_ID
(type yum &> /dev/null) && groupadd --gid $GROUP_ID $USER
(type apt-get &> /dev/null) && addgroup --gid $GROUP_ID $USER
fi
if [ x"$USER_ID" != x"" -a x"$(getent passwd $USER_ID | cut -d: -f1)" != x"$USER" ] ; then
ensure_user_id_is_free $USER $USER_ID $UNUSED_USER_ID
(type yum &> /dev/null) && adduser \
--no-create-home --uid $USER_ID --gid $GROUP_ID $USER
(type apt-get &> /dev/null) && adduser \
--disabled-password \
--no-create-home \
--gecos '' \
--uid $USER_ID \
--ingroup $USER $USER
echo "$USER ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/$USER
fi
fi
if [ x"$DOCKER_GROUP_ID" != x"" -a \
x"$(getent group $HOST_USER_ID | cut -d: -f1)" != x"docker" ] ; then
ensure_group_id_is_free docker $DOCKER_GROUP_ID $UNUSED_DOCKER_GROUP_ID
(type yum &> /dev/null) && groupadd --gid $DOCKER_GROUP_ID docker
(type apt-get &> /dev/null) && addgroup --gid $DOCKER_GROUP_ID docker
usermod -aG docker $USER
fi
if [ x"$SD2_EP_SSH" = x"1" ]; then
(type sshd &> /dev/null) || ($DOCKER_PKGUPD && $DOCKER_PKGMGR openssh-server)
(type yum &> /dev/null) && service sshd start
(type apt-get &> /dev/null) && service ssh start
fi
if [ -n "$SD2_EP_TZ" ] ; then
export TZ=$SD2_EP_TZ
ln -snf /usr/share/zoneinfo/$TZ /etc/localtime
echo $TZ > /etc/timezone
fi
[ -n "$SD2_EP_SCRIPT" ] && $SD2_EP_SCRIPT
if [ x"$SD2_EP_SHELL" = x"1" ]; then
sudo -i -u $USER
fi
if [ x"$SD2_EP_DAEMON" = x"1" ]; then
sleep infinity
fi
|
#!/bin/sh
#
# Copyright (c) 2008 Matthew Ogilvie
# Parts adapted from other tests.
#
test_description='git-cvsserver -kb modes
tests -kb mode for binary files when accessing a git
repository using cvs CLI client via git-cvsserver server'
. ./test-lib.sh
marked_as () {
foundEntry="$(grep "^/$2/" "$1/CVS/Entries")"
if [ x"$foundEntry" = x"" ] ; then
echo "NOT FOUND: $1 $2 1 $3" >> "${WORKDIR}/marked.log"
return 1
fi
test x"$(grep "^/$2/" "$1/CVS/Entries" | cut -d/ -f5)" = x"$3"
stat=$?
echo "$1 $2 $stat '$3'" >> "${WORKDIR}/marked.log"
return $stat
}
not_present() {
foundEntry="$(grep "^/$2/" "$1/CVS/Entries")"
if [ -r "$1/$2" ] ; then
echo "Error: File still exists: $1 $2" >> "${WORKDIR}/marked.log"
return 1;
fi
if [ x"$foundEntry" != x"" ] ; then
echo "Error: should not have found: $1 $2" >> "${WORKDIR}/marked.log"
return 1;
else
echo "Correctly not found: $1 $2" >> "${WORKDIR}/marked.log"
return 0;
fi
}
check_status_options() {
(cd "$1" &&
GIT_CONFIG="$git_config" cvs -Q status "$2" > "${WORKDIR}/status.out" 2>&1
)
if [ x"$?" != x"0" ] ; then
echo "Error from cvs status: $1 $2" >> "${WORKDIR}/marked.log"
return 1;
fi
got="$(sed -n -e 's/^[ ]*Sticky Options:[ ]*//p' "${WORKDIR}/status.out")"
expect="$3"
if [ x"$expect" = x"" ] ; then
expect="(none)"
fi
test x"$got" = x"$expect"
stat=$?
echo "cvs status: $1 $2 $stat '$3' '$got'" >> "${WORKDIR}/marked.log"
return $stat
}
cvs >/dev/null 2>&1
if test $? -ne 1
then
skip_all='skipping git-cvsserver tests, cvs not found'
test_done
fi
if ! test_have_prereq PERL
then
skip_all='skipping git-cvsserver tests, perl not available'
test_done
fi
"$PERL_PATH" -e 'use DBI; use DBD::SQLite' >/dev/null 2>&1 || {
skip_all='skipping git-cvsserver tests, Perl SQLite interface unavailable'
test_done
}
unset GIT_DIR GIT_CONFIG
WORKDIR=$(pwd)
SERVERDIR=$(pwd)/gitcvs.git
git_config="$SERVERDIR/config"
CVSROOT=":fork:$SERVERDIR"
CVSWORK="$(pwd)/cvswork"
CVS_SERVER=git-cvsserver
export CVSROOT CVS_SERVER
rm -rf "$CVSWORK" "$SERVERDIR"
test_expect_success 'setup' '
echo "Simple text file" >textfile.c &&
echo "File with embedded NUL: Q <- there" | q_to_nul > binfile.bin &&
mkdir subdir &&
echo "Another text file" > subdir/file.h &&
echo "Another binary: Q (this time CR)" | q_to_cr > subdir/withCr.bin &&
echo "Mixed up NUL, but marked text: Q <- there" | q_to_nul > mixedUp.c &&
echo "Unspecified" > subdir/unspecified.other &&
echo "/*.bin -crlf" > .gitattributes &&
echo "/*.c crlf" >> .gitattributes &&
echo "subdir/*.bin -crlf" >> .gitattributes &&
echo "subdir/*.c crlf" >> .gitattributes &&
echo "subdir/file.h crlf" >> .gitattributes &&
git add .gitattributes textfile.c binfile.bin mixedUp.c subdir/* &&
git commit -q -m "First Commit" &&
git clone -q --bare "$WORKDIR/.git" "$SERVERDIR" >/dev/null 2>&1 &&
GIT_DIR="$SERVERDIR" git config --bool gitcvs.enabled true &&
GIT_DIR="$SERVERDIR" git config gitcvs.logfile "$SERVERDIR/gitcvs.log"
'
test_expect_success 'cvs co (default crlf)' '
GIT_CONFIG="$git_config" cvs -Q co -d cvswork master >cvs.log 2>&1 &&
test x"$(grep '/-k' cvswork/CVS/Entries cvswork/subdir/CVS/Entries)" = x""
'
rm -rf cvswork
test_expect_success 'cvs co (allbinary)' '
GIT_DIR="$SERVERDIR" git config --bool gitcvs.allbinary true &&
GIT_CONFIG="$git_config" cvs -Q co -d cvswork master >cvs.log 2>&1 &&
marked_as cvswork textfile.c -kb &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes -kb &&
marked_as cvswork mixedUp.c -kb &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h -kb &&
marked_as cvswork/subdir unspecified.other -kb
'
rm -rf cvswork cvs.log
test_expect_success 'cvs co (use attributes/allbinary)' '
GIT_DIR="$SERVERDIR" git config --bool gitcvs.usecrlfattr true &&
GIT_CONFIG="$git_config" cvs -Q co -d cvswork master >cvs.log 2>&1 &&
marked_as cvswork textfile.c "" &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes -kb &&
marked_as cvswork mixedUp.c "" &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h "" &&
marked_as cvswork/subdir unspecified.other -kb
'
rm -rf cvswork
test_expect_success 'cvs co (use attributes)' '
GIT_DIR="$SERVERDIR" git config --bool gitcvs.allbinary false &&
GIT_CONFIG="$git_config" cvs -Q co -d cvswork master >cvs.log 2>&1 &&
marked_as cvswork textfile.c "" &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes "" &&
marked_as cvswork mixedUp.c "" &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h "" &&
marked_as cvswork/subdir unspecified.other ""
'
test_expect_success 'adding files' '
(cd cvswork &&
(cd subdir &&
echo "more text" > src.c &&
GIT_CONFIG="$git_config" cvs -Q add src.c >cvs.log 2>&1 &&
marked_as . src.c "" &&
echo "psuedo-binary" > temp.bin
) &&
GIT_CONFIG="$git_config" cvs -Q add subdir/temp.bin >cvs.log 2>&1 &&
marked_as subdir temp.bin "-kb" &&
cd subdir &&
GIT_CONFIG="$git_config" cvs -Q ci -m "adding files" >cvs.log 2>&1 &&
marked_as . temp.bin "-kb" &&
marked_as . src.c ""
)
'
test_expect_success 'updating' '
git pull gitcvs.git &&
echo 'hi' > subdir/newfile.bin &&
echo 'junk' > subdir/file.h &&
echo 'hi' > subdir/newfile.c &&
echo 'hello' >> binfile.bin &&
git add subdir/newfile.bin subdir/file.h subdir/newfile.c binfile.bin &&
git commit -q -m "Add and change some files" &&
git push gitcvs.git >/dev/null &&
(cd cvswork &&
GIT_CONFIG="$git_config" cvs -Q update
) &&
marked_as cvswork textfile.c "" &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes "" &&
marked_as cvswork mixedUp.c "" &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h "" &&
marked_as cvswork/subdir unspecified.other "" &&
marked_as cvswork/subdir newfile.bin -kb &&
marked_as cvswork/subdir newfile.c "" &&
echo "File with embedded NUL: Q <- there" | q_to_nul > tmpExpect1 &&
echo "hello" >> tmpExpect1 &&
cmp cvswork/binfile.bin tmpExpect1
'
rm -rf cvswork
test_expect_success 'cvs co (use attributes/guess)' '
GIT_DIR="$SERVERDIR" git config gitcvs.allbinary guess &&
GIT_CONFIG="$git_config" cvs -Q co -d cvswork master >cvs.log 2>&1 &&
marked_as cvswork textfile.c "" &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes "" &&
marked_as cvswork mixedUp.c "" &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h "" &&
marked_as cvswork/subdir unspecified.other "" &&
marked_as cvswork/subdir newfile.bin -kb &&
marked_as cvswork/subdir newfile.c ""
'
test_expect_success 'setup multi-line files' '
( echo "line 1" &&
echo "line 2" &&
echo "line 3" &&
echo "line 4 with NUL: Q <-" ) | q_to_nul > multiline.c &&
git add multiline.c &&
( echo "line 1" &&
echo "line 2" &&
echo "line 3" &&
echo "line 4" ) | q_to_nul > multilineTxt.c &&
git add multilineTxt.c &&
git commit -q -m "multiline files" &&
git push gitcvs.git >/dev/null
'
rm -rf cvswork
test_expect_success 'cvs co (guess)' '
GIT_DIR="$SERVERDIR" git config --bool gitcvs.usecrlfattr false &&
GIT_CONFIG="$git_config" cvs -Q co -d cvswork master >cvs.log 2>&1 &&
marked_as cvswork textfile.c "" &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes "" &&
marked_as cvswork mixedUp.c -kb &&
marked_as cvswork multiline.c -kb &&
marked_as cvswork multilineTxt.c "" &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h "" &&
marked_as cvswork/subdir unspecified.other "" &&
marked_as cvswork/subdir newfile.bin "" &&
marked_as cvswork/subdir newfile.c ""
'
test_expect_success 'cvs co another copy (guess)' '
GIT_CONFIG="$git_config" cvs -Q co -d cvswork2 master >cvs.log 2>&1 &&
marked_as cvswork2 textfile.c "" &&
marked_as cvswork2 binfile.bin -kb &&
marked_as cvswork2 .gitattributes "" &&
marked_as cvswork2 mixedUp.c -kb &&
marked_as cvswork2 multiline.c -kb &&
marked_as cvswork2 multilineTxt.c "" &&
marked_as cvswork2/subdir withCr.bin -kb &&
marked_as cvswork2/subdir file.h "" &&
marked_as cvswork2/subdir unspecified.other "" &&
marked_as cvswork2/subdir newfile.bin "" &&
marked_as cvswork2/subdir newfile.c ""
'
test_expect_success 'cvs status - sticky options' '
check_status_options cvswork2 textfile.c "" &&
check_status_options cvswork2 binfile.bin -kb &&
check_status_options cvswork2 .gitattributes "" &&
check_status_options cvswork2 mixedUp.c -kb &&
check_status_options cvswork2 multiline.c -kb &&
check_status_options cvswork2 multilineTxt.c "" &&
check_status_options cvswork2/subdir withCr.bin -kb &&
check_status_options cvswork2 subdir/withCr.bin -kb &&
check_status_options cvswork2/subdir file.h "" &&
check_status_options cvswork2 subdir/file.h "" &&
check_status_options cvswork2/subdir unspecified.other "" &&
check_status_options cvswork2/subdir newfile.bin "" &&
check_status_options cvswork2/subdir newfile.c ""
'
test_expect_success 'add text (guess)' '
(cd cvswork &&
echo "simpleText" > simpleText.c &&
GIT_CONFIG="$git_config" cvs -Q add simpleText.c
) &&
marked_as cvswork simpleText.c ""
'
test_expect_success 'add bin (guess)' '
(cd cvswork &&
echo "simpleBin: NUL: Q <- there" | q_to_nul > simpleBin.bin &&
GIT_CONFIG="$git_config" cvs -Q add simpleBin.bin
) &&
marked_as cvswork simpleBin.bin -kb
'
test_expect_success 'remove files (guess)' '
(cd cvswork &&
GIT_CONFIG="$git_config" cvs -Q rm -f subdir/file.h &&
(cd subdir &&
GIT_CONFIG="$git_config" cvs -Q rm -f withCr.bin
)) &&
marked_as cvswork/subdir withCr.bin -kb &&
marked_as cvswork/subdir file.h ""
'
test_expect_success 'cvs ci (guess)' '
(cd cvswork &&
GIT_CONFIG="$git_config" cvs -Q ci -m "add/rm files" >cvs.log 2>&1
) &&
marked_as cvswork textfile.c "" &&
marked_as cvswork binfile.bin -kb &&
marked_as cvswork .gitattributes "" &&
marked_as cvswork mixedUp.c -kb &&
marked_as cvswork multiline.c -kb &&
marked_as cvswork multilineTxt.c "" &&
not_present cvswork/subdir withCr.bin &&
not_present cvswork/subdir file.h &&
marked_as cvswork/subdir unspecified.other "" &&
marked_as cvswork/subdir newfile.bin "" &&
marked_as cvswork/subdir newfile.c "" &&
marked_as cvswork simpleBin.bin -kb &&
marked_as cvswork simpleText.c ""
'
test_expect_success 'update subdir of other copy (guess)' '
(cd cvswork2/subdir &&
GIT_CONFIG="$git_config" cvs -Q update
) &&
marked_as cvswork2 textfile.c "" &&
marked_as cvswork2 binfile.bin -kb &&
marked_as cvswork2 .gitattributes "" &&
marked_as cvswork2 mixedUp.c -kb &&
marked_as cvswork2 multiline.c -kb &&
marked_as cvswork2 multilineTxt.c "" &&
not_present cvswork2/subdir withCr.bin &&
not_present cvswork2/subdir file.h &&
marked_as cvswork2/subdir unspecified.other "" &&
marked_as cvswork2/subdir newfile.bin "" &&
marked_as cvswork2/subdir newfile.c "" &&
not_present cvswork2 simpleBin.bin &&
not_present cvswork2 simpleText.c
'
echo "starting update/merge" >> "${WORKDIR}/marked.log"
test_expect_success 'update/merge full other copy (guess)' '
git pull gitcvs.git master &&
sed "s/3/replaced_3/" < multilineTxt.c > ml.temp &&
mv ml.temp multilineTxt.c &&
git add multilineTxt.c &&
git commit -q -m "modify multiline file" >> "${WORKDIR}/marked.log" &&
git push gitcvs.git >/dev/null &&
(cd cvswork2 &&
sed "s/1/replaced_1/" < multilineTxt.c > ml.temp &&
mv ml.temp multilineTxt.c &&
GIT_CONFIG="$git_config" cvs update > cvs.log 2>&1
) &&
marked_as cvswork2 textfile.c "" &&
marked_as cvswork2 binfile.bin -kb &&
marked_as cvswork2 .gitattributes "" &&
marked_as cvswork2 mixedUp.c -kb &&
marked_as cvswork2 multiline.c -kb &&
marked_as cvswork2 multilineTxt.c "" &&
not_present cvswork2/subdir withCr.bin &&
not_present cvswork2/subdir file.h &&
marked_as cvswork2/subdir unspecified.other "" &&
marked_as cvswork2/subdir newfile.bin "" &&
marked_as cvswork2/subdir newfile.c "" &&
marked_as cvswork2 simpleBin.bin -kb &&
marked_as cvswork2 simpleText.c "" &&
echo "line replaced_1" > tmpExpect2 &&
echo "line 2" >> tmpExpect2 &&
echo "line replaced_3" >> tmpExpect2 &&
echo "line 4" | q_to_nul >> tmpExpect2 &&
cmp cvswork2/multilineTxt.c tmpExpect2
'
test_done
|
package com.knyaz.testtask.api.rx_tasks;
import io.reactivex.Observable;
public abstract class ApiTask<R> extends ObservableTask<R> {
public ApiTask(boolean executeOnNewThread) {
super(executeOnNewThread);
}
public ApiTask() {
super(true);
}
@Override
public Observable<R> getTask() {
return super.getTask();
}
} |
def min_edit_distance(str1, str2):
m = len(str1)
n = len(str2)
lookup = [[0 for x in range(n + 1)] for x in range(m + 1)]
# fill in the first row
for i in range(0, n + 1):
lookup[0][i] = i
# fill in the first column
for j in range(0, m + 1):
lookup[j][0] = j
# fill the lookup table in bottom-up manner
for i in range(1, m + 1):
for j in range(1, n + 1):
if str1[i-1] == str2[j-1]:
lookup[i][j] = lookup[i-1][j-1]
else:
lookup[i][j] = 1 + min(lookup[i-1][j],
lookup[i][j-1],
lookup[i-1][j-1])
return lookup[m][n] |
<filename>libs/sdk-ui-ext/src/internal/components/pluggableVisualizations/PluggableColumnBarCharts.tsx
// (C) 2019 GoodData Corporation
import get from "lodash/get";
import set from "lodash/set";
import {
bucketIsEmpty,
bucketsItems,
IInsight,
IInsightDefinition,
insightBucket,
insightBuckets,
} from "@gooddata/sdk-model";
import { arrayUtils } from "@gooddata/util";
import {
BucketNames,
getIntersectionPartAfter,
IDrillEvent,
IDrillEventIntersectionElement,
} from "@gooddata/sdk-ui";
import { AXIS } from "../../constants/axis";
import { BUCKETS } from "../../constants/bucket";
import { MAX_CATEGORIES_COUNT, MAX_STACKS_COUNT, UICONFIG, UICONFIG_AXIS } from "../../constants/uiConfig";
import { drillDownFromAttributeLocalId } from "../../utils/ImplicitDrillDownHelper";
import {
IBucketOfFun,
IDrillDownContext,
IExtendedReferencePoint,
IImplicitDrillDown,
IReferencePoint,
IVisConstruct,
} from "../../interfaces/Visualization";
import {
getAllCategoriesAttributeItems,
getDateItems,
getFilteredMeasuresForStackedCharts,
getStackItems,
isDateBucketItem,
isNotDateBucketItem,
} from "../../utils/bucketHelper";
import {
getReferencePointWithSupportedProperties,
isStackingMeasure,
isStackingToPercent,
removeImmutableOptionalStackingProperties,
setSecondaryMeasures,
} from "../../utils/propertiesHelper";
import { setColumnBarChartUiConfig } from "../../utils/uiConfigHelpers/columnBarChartUiConfigHelper";
import { PluggableBaseChart } from "./baseChart/PluggableBaseChart";
import { addIntersectionFiltersToInsight, modifyBucketsAttributesForDrillDown } from "./drillDownUtil";
export class PluggableColumnBarCharts extends PluggableBaseChart {
constructor(props: IVisConstruct) {
super(props);
// set default to DUAL to get the full supported props list
// and will be updated in getExtendedReferencePoint
this.axis = AXIS.DUAL;
this.supportedPropertiesList = this.getSupportedPropertiesList();
}
public getExtendedReferencePoint(referencePoint: IReferencePoint): Promise<IExtendedReferencePoint> {
// reset the list to retrieve full 'referencePoint.properties.controls'
this.supportedPropertiesList = this.getSupportedPropertiesList();
return super.getExtendedReferencePoint(referencePoint).then((ext: IExtendedReferencePoint) => {
let newExt = setSecondaryMeasures(ext, this.secondaryAxis);
this.axis = get(newExt, UICONFIG_AXIS, AXIS.PRIMARY);
// filter out unnecessary stacking props for some specific cases such as one measure or empty stackBy
this.supportedPropertiesList = removeImmutableOptionalStackingProperties(
newExt,
this.getSupportedPropertiesList(),
);
newExt = getReferencePointWithSupportedProperties(newExt, this.supportedPropertiesList);
return setColumnBarChartUiConfig(newExt, this.intl);
});
}
public isOpenAsReportSupported(): boolean {
return (
super.isOpenAsReportSupported() &&
!haveManyViewItems(this.currentInsight) &&
!isStackingMeasure(this.visualizationProperties) &&
!isStackingToPercent(this.visualizationProperties)
);
}
private adjustIntersectionForColumnBar(
source: IInsight,
event: IDrillEvent,
): IDrillEventIntersectionElement[] {
const stackBucket = insightBucket(source, BucketNames.STACK);
const hasStackByAttributes = stackBucket && !bucketIsEmpty(stackBucket);
const intersection = event.drillContext.intersection;
return hasStackByAttributes ? arrayUtils.shiftArrayRight(intersection) : intersection;
}
private addFiltersForColumnBar(source: IInsight, drillConfig: IImplicitDrillDown, event: IDrillEvent) {
const clicked = drillDownFromAttributeLocalId(drillConfig);
const reorderedIntersection = this.adjustIntersectionForColumnBar(source, event);
const cutIntersection = getIntersectionPartAfter(reorderedIntersection, clicked);
return addIntersectionFiltersToInsight(source, cutIntersection);
}
public getInsightWithDrillDownApplied(source: IInsight, drillDownContext: IDrillDownContext): IInsight {
const withFilters = this.addFiltersForColumnBar(
source,
drillDownContext.drillDefinition,
drillDownContext.event,
);
return modifyBucketsAttributesForDrillDown(withFilters, drillDownContext.drillDefinition);
}
protected configureBuckets(extendedReferencePoint: IExtendedReferencePoint): void {
const buckets: IBucketOfFun[] = get(extendedReferencePoint, BUCKETS, []);
const measures = getFilteredMeasuresForStackedCharts(buckets);
const dateItems = getDateItems(buckets);
const categoriesCount: number = get(
extendedReferencePoint,
[UICONFIG, BUCKETS, BucketNames.VIEW, "itemsLimit"],
MAX_CATEGORIES_COUNT,
);
const allAttributesWithoutStacks = getAllCategoriesAttributeItems(buckets);
let views = allAttributesWithoutStacks.slice(0, categoriesCount);
const hasDateItemInViewByBucket = views.some(isDateBucketItem);
let stackItemIndex = categoriesCount;
let stacks = getStackItems(buckets);
if (dateItems.length && !hasDateItemInViewByBucket) {
const extraViewItems = allAttributesWithoutStacks.slice(0, categoriesCount - 1);
views = [dateItems[0], ...extraViewItems];
stackItemIndex = categoriesCount - 1;
}
if (!stacks.length && measures.length <= 1 && allAttributesWithoutStacks.length > stackItemIndex) {
stacks = allAttributesWithoutStacks
.slice(stackItemIndex, allAttributesWithoutStacks.length)
.filter(isNotDateBucketItem)
.slice(0, MAX_STACKS_COUNT);
}
set(extendedReferencePoint, BUCKETS, [
{
localIdentifier: BucketNames.MEASURES,
items: measures,
},
{
localIdentifier: BucketNames.VIEW,
items: views,
},
{
localIdentifier: BucketNames.STACK,
items: stacks,
},
]);
}
}
function haveManyViewItems(insight: IInsightDefinition): boolean {
return bucketsItems(insightBuckets(insight, BucketNames.VIEW)).length > 1;
}
|
define( [
"qunit",
"jquery",
"ui/widgets/checkboxradio"
], function( QUnit, $ ) {
QUnit.module( "Checkboxradio: methods" );
$.each( [ "checkbox", "radio" ], function( index, value ) {
QUnit.test( value + ": refresh", function( assert ) {
var widget, icon,
checkbox = value === "checkbox",
input = $( "#" + value + "-method-refresh" );
assert.expect( checkbox ? 11 : 8 );
input.checkboxradio();
widget = input.checkboxradio( "widget" );
icon = widget.find( ".ui-icon" );
assert.strictEqual( icon.length, 1,
"There is initally one icon" );
icon.remove();
input.checkboxradio( "refresh" );
icon = widget.find( ".ui-icon" );
assert.strictEqual( icon.length, 1,
"Icon is recreated on refresh if absent" );
assert.hasClasses( icon, "ui-icon-blank" );
if ( checkbox ) {
assert.lacksClasses( icon, "ui-icon-check" );
}
assert.lacksClasses( widget, "ui-checkboxradio-checked" );
input.prop( "checked", true );
input.checkboxradio( "refresh" );
if ( checkbox ) {
assert.hasClasses( icon, "ui-icon-check" );
}
assert[ !checkbox ? "hasClasses" : "lacksClasses" ]( icon, "ui-icon-blank" );
assert.hasClasses( widget, "ui-checkboxradio-checked" );
input.prop( "checked", false );
input.checkboxradio( "refresh" );
assert.hasClasses( icon, "ui-icon-blank" );
if ( checkbox ) {
assert.lacksClasses( icon, "ui-icon-check" );
}
assert.lacksClasses( widget, "ui-checkboxradio-checked" );
} );
// SKIPPED: because this test does not work with jQuery >= 3.4.0
// QUnit.test( value + ": destroy", function( assert ) {
// assert.expect( 1 );
// assert.domEqual( "#" + value + "-method-destroy", function() {
// $( "#" + value + "-method-destroy" ).checkboxradio().checkboxradio( "destroy" );
// } );
// } );
QUnit.test( value + ": disable / enable", function( assert ) {
assert.expect( 4 );
var input = $( "#" + value + "-method-disable" ),
widget = input.checkboxradio().checkboxradio( "widget" );
input.checkboxradio( "disable" );
assert.hasClasses( widget, "ui-state-disabled" );
assert.strictEqual( input.is( ":disabled" ), true,
value + " is disabled when disable is called" );
input.checkboxradio( "enable" );
assert.lacksClasses( widget, "ui-state-disabled" );
assert.strictEqual( input.is( ":disabled" ), false,
value + " has disabled prop removed when enable is called" );
} );
QUnit.test( value + ": widget returns the label", function( assert ) {
assert.expect( 1 );
var input = $( "#" + value + "-method-refresh" ),
label = $( "#" + value + "-method-refresh-label" );
input.checkboxradio();
assert.strictEqual( input.checkboxradio( "widget" )[ 0 ], label[ 0 ],
"widget method returns label" );
} );
} );
QUnit.test( "Input wrapped in a label preserved on refresh", function( assert ) {
var input = $( "#label-with-no-for" ).checkboxradio(),
element = input.checkboxradio( "widget" );
assert.expect( 1 );
input.checkboxradio( "refresh" );
assert.strictEqual( input.parent()[ 0 ], element[ 0 ], "Input preserved" );
} );
} );
|
#!/usr/bin/env bash
#
# Generate new rails server
#
#
project="restingrail"
rm -rf $project
rails new $project --api
cd $project
echo "gem 'elasticsearch'" >> Gemfile
bundle install
rm -rf ./bin
rake app:update:bin
#bundle install --binstubs
# [
# {
# "sid":"1",
# "cid":"1",
# "signature":"[OSSEC] Interface entered in promiscuous(sniffing) mode.",
# "signature_gen":"10001",
# "signature_id":"5104",
# "signature_rev":"1",
# "timestamp":"2018-11-02 19:32:58",
# "unified_event_id":"1",
# "unified_event_ref":"1",
# "unified_ref_time":"2018-11-02 19:32:58",
# "priority":"8",
# "class":"",
# "status":"0",
# "src_ip":"0",
# "dst_ip":"0",
# "src_port":null,
# "dst_port":null,
# "icmp_type":null,
# "icmp_code":null,
# "ip_proto":"0",
# "ip_ver":"0",
# "ip_hlen":"0",
# "ip_tos":"0",
# "ip_len":"0",
# "ip_id":"0",
# "ip_flags":"0",
# "ip_off":"0",
# "ip_ttl":"0",
# "ip_csum":"0",
# "last_modified":null,
# "last_uid":null,
# "abuse_queue":null,
# "abuse_sent":null
# }
# ]
./bin/rails generate model Elastic \
sid:string \
cid:string \
signature:string \
signature_gen:string \
signature_id:string \
signature_rev:string \
timestamp:string \
unified_event_id:string \
unified_event_ref:string \
unified_ref_time:string \
priority:string \
class:string \
status:string \
src_ip:string \
dst_ip:string \
src_port:string \
dst_port:string \
icmp_type:string \
icmp_code:string \
ip_proto:string \
ip_ver:string \
ip_hlen:string \
ip_tos:string \
ip_len:string \
ip_id:string \
ip_flags:string \
ip_off:string \
ip_ttl:string \
ip_csum:string \
last_modified:string \
last_uid:string \
abuse_queue:string \
abuse_sent:string
./bin/rails db:migrate
#./bin/rails generate controller api new create update edit destroy index show
./bin/rails generate controller api v1
sed -i 's/get/put/' config/routes.rb
cp -av ../api_controller.rb app/controllers/api_controller.rb
./bin/rails server
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/*
* Purchased.java
*
* Created on 24 Dec, 2018, 8:42:56 AM
*/
package cars;
import javax.swing.ImageIcon;
import java.sql.*;
import javax.swing.JOptionPane;
import java.awt.Cursor;
/**
*
* @author DEll
*/
public class Purchased extends javax.swing.JFrame {
/** Creates new form Purchased */
public Purchased() {
initComponents();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jCheck_Orders_BT = new javax.swing.JButton();
jPurchased_L = new javax.swing.JLabel();
jExit_L = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setIconImage(new ImageIcon("D:\\Apps\\k\\a\\s\\form.jpg").getImage());
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowOpened(java.awt.event.WindowEvent evt) {
formWindowOpened(evt);
}
});
getContentPane().setLayout(null);
jPanel1.setBackground(new java.awt.Color(0, 102, 102));
jPanel1.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Congratulation", javax.swing.border.TitledBorder.LEFT, javax.swing.border.TitledBorder.ABOVE_TOP, new java.awt.Font("Nyala", 0, 36), new java.awt.Color(255, 255, 0))); // NOI18N
jPanel1.setOpaque(false);
jLabel1.setFont(new java.awt.Font("Nyala", 0, 24));
jLabel1.setForeground(new java.awt.Color(0, 255, 255));
jLabel1.setText("You have purchased");
jCheck_Orders_BT.setFont(new java.awt.Font("Nyala", 0, 18));
jCheck_Orders_BT.setText("Check Orders");
jCheck_Orders_BT.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
jCheck_Orders_BTMouseEntered(evt);
}
});
jCheck_Orders_BT.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jCheck_Orders_BTActionPerformed(evt);
}
});
jPurchased_L.setBackground(new java.awt.Color(255, 255, 0));
jPurchased_L.setFont(new java.awt.Font("Tahoma", 1, 14));
jPurchased_L.setForeground(new java.awt.Color(255, 255, 0));
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(344, 344, 344)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 278, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(113, 113, 113)
.addComponent(jCheck_Orders_BT))
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPurchased_L, javax.swing.GroupLayout.PREFERRED_SIZE, 322, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 233, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 30, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 7, Short.MAX_VALUE)
.addComponent(jPurchased_L, javax.swing.GroupLayout.PREFERRED_SIZE, 33, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jCheck_Orders_BT, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
getContentPane().add(jPanel1);
jPanel1.setBounds(80, 60, 360, 223);
jExit_L.setFont(new java.awt.Font("Nyala", 0, 18));
jExit_L.setForeground(new java.awt.Color(204, 0, 0));
jExit_L.setText("Exit");
jExit_L.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jExit_LMouseClicked(evt);
}
public void mouseEntered(java.awt.event.MouseEvent evt) {
jExit_LMouseEntered(evt);
}
});
getContentPane().add(jExit_L);
jExit_L.setBounds(450, 10, 29, 24);
jLabel3.setIcon(new javax.swing.ImageIcon(getClass().getResource("/cars/purch.jpg"))); // NOI18N
getContentPane().add(jLabel3);
jLabel3.setBounds(0, -10, 580, 320);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jCheck_Orders_BTActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jCheck_Orders_BTActionPerformed
// TODO add your handling code here:
new Your_Orders().setVisible(true);
dispose();
}//GEN-LAST:event_jCheck_Orders_BTActionPerformed
private void jExit_LMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jExit_LMouseClicked
// TODO add your handling code here:
new Vehicle().setVisible(true);
dispose();
}//GEN-LAST:event_jExit_LMouseClicked
private void jExit_LMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jExit_LMouseEntered
// TODO add your handling code here:
jExit_L.setCursor(new Cursor(Cursor.HAND_CURSOR));
}//GEN-LAST:event_jExit_LMouseEntered
private void jCheck_Orders_BTMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jCheck_Orders_BTMouseEntered
// TODO add your handling code here:
jCheck_Orders_BT.setCursor(new Cursor(Cursor.HAND_CURSOR));
}//GEN-LAST:event_jCheck_Orders_BTMouseEntered
private void formWindowOpened(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowOpened
// TODO add your handling code here:
try{
Class.forName("java.sql.Driver");
Connection con=(Connection)DriverManager.getConnection
("jdbc:mysql://localhost:3306/cars","root","");
Statement stmt=con.createStatement();
String query="select *from orders;";
ResultSet rs=stmt.executeQuery(query);
rs.last();
String model=rs.getString("model_no");
String vehicle=rs.getString("vehicle");
int price=rs.getInt("price");
jPurchased_L.setText("1 "+vehicle+" with Model_no "+model+" of price Rs. "+price);
con.close();
stmt.close();
rs.close();
}catch(Exception e){
JOptionPane.showMessageDialog(this, e.getMessage());
}
}//GEN-LAST:event_formWindowOpened
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new Purchased().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jCheck_Orders_BT;
private javax.swing.JLabel jExit_L;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JPanel jPanel1;
private javax.swing.JLabel jPurchased_L;
// End of variables declaration//GEN-END:variables
}
|
using System;
using System.Collections.Generic;
public class UniqueQueue<T>
{
private Queue<T> queue;
private HashSet<T> set;
public UniqueQueue()
{
queue = new Queue<T>();
set = new HashSet<T>();
}
public void Enqueue(T item)
{
if (!set.Contains(item))
{
queue.Enqueue(item);
set.Add(item);
}
}
public T Dequeue()
{
T item = queue.Dequeue();
set.Remove(item);
return item;
}
public T Peek()
{
return queue.Peek();
}
public bool Contains(T item)
{
return set.Contains(item);
}
} |
#!/usr/bin/env bash
if [ "$(whoami)" == "wim" ]; then
echo "Name is Wim"
else
echo "Name is not Wim"
fi
FILE="test.sh"
if [ -f "$FILE" ]; then
echo "File $FILE exists"
else
echo "File $FILE does not exist"
fi
NUMBER=2
if [ $NUMBER -eq 1 ]; then
echo "Number is 1"
elif [ $NUMBER -eq 2 ]; then
echo "Number is 2"
elif [ $NUMBER -eq 3 ]; then
echo "Number is 3"
else
echo "Other number"
fi
test "$(whoami)" != "root" && echo "You should run this script with root."
check_root() {
if [ $EUID -ne 0 ]; then
echo "This script must run as root" >&2
return 1
fi
}
if check_root; then
echo "You are connected as root."
else
echo "You should be connected as root."
fi
DIR="testdirectory"
[ -d "$DIR" ] || echo "Directory $DIR does not exist."
DIR="."
[ -d "$DIR" ] && echo "Directory $DIR does exist."
[[ -f "$0" && -r "$0" ]] && echo "File $0 exists and is readable."
NUMBER=2
case $NUMBER in
1 ) echo "Number is 1"
;;
2 ) echo "Number is 2"
;;
3 ) echo "Number is 3"
;;
* ) echo "Other number"
esac
echo -n "Type a digit or a letter: "
read -r character
case $character in
# Check for letters.
[[:lower:]] | [[:upper:]] ) echo "You typed the letter $character"
;;
# Check for digits.
[0-9] ) echo "You typed the digit $character"
;;
# Check for anything else.
* ) echo "You did not type a letter or a digit"
esac
|
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <arpa/inet.h>
#define PORT 9876
#define MAX_CLIENTS 5
#define BUFFER_SIZE 1024
int main() {
int server_fd, new_socket, valread;
struct sockaddr_in address;
int opt = 1;
int addrlen = sizeof(address);
char buffer[BUFFER_SIZE] = {0};
char *response = "Server: Message received\n";
// Create a socket
if ((server_fd = socket(AF_INET, SOCK_STREAM, 0)) == 0) {
perror("Socket creation failed");
exit(EXIT_FAILURE);
}
// Set socket options
if (setsockopt(server_fd, SOL_SOCKET, SO_REUSEADDR | SO_REUSEPORT, &opt, sizeof(opt))) {
perror("Setsockopt failed");
exit(EXIT_FAILURE);
}
address.sin_family = AF_INET;
address.sin_addr.s_addr = INADDR_ANY;
address.sin_port = htons(PORT);
// Bind the socket to the specified port
if (bind(server_fd, (struct sockaddr *)&address, sizeof(address)) < 0) {
perror("Bind failed");
exit(EXIT_FAILURE);
}
// Listen for incoming connections
if (listen(server_fd, MAX_CLIENTS) < 0) {
perror("Listen failed");
exit(EXIT_FAILURE);
}
while (1) {
// Accept incoming connection
if ((new_socket = accept(server_fd, (struct sockaddr *)&address, (socklen_t *)&addrlen)) < 0) {
perror("Accept failed");
exit(EXIT_FAILURE);
}
// Read data from the client
valread = read(new_socket, buffer, BUFFER_SIZE);
printf("Client: %s\n", buffer);
// Send response to the client
send(new_socket, response, strlen(response), 0);
printf("Response sent\n");
close(new_socket); // Close the connection
}
return 0;
} |
#!/bin/sh -e
if [ "${1}" = --ci-mode ]; then
shift
mkdir -p build/log
tidy -config tidy.conf -modify -file build/log/tidy-reformat.log web/index.html
else
tidy -config tidy.conf -modify web/index.html
fi
|
#
# Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# @test
# @bug 7121600 8016838
# @summary Redefine a big class.
# @author Daniel D. Daugherty
#
# @key intermittent
# @modules java.instrument
# java.management
# @run shell MakeJAR3.sh RedefineBigClassAgent 'Can-Redefine-Classes: true'
# @run build BigClass RedefineBigClassApp NMTHelper
# @run shell/timeout=600 RedefineBigClass.sh
#
if [ "${TESTJAVA}" = "" ]
then
echo "TESTJAVA not set. Test cannot execute. Failed."
exit 1
fi
if [ "${COMPILEJAVA}" = "" ]
then
COMPILEJAVA="${TESTJAVA}"
fi
echo "COMPILEJAVA=${COMPILEJAVA}"
if [ "${TESTSRC}" = "" ]
then
echo "TESTSRC not set. Test cannot execute. Failed."
exit 1
fi
if [ "${TESTCLASSES}" = "" ]
then
echo "TESTCLASSES not set. Test cannot execute. Failed."
exit 1
fi
JAVAC="${COMPILEJAVA}"/bin/javac
JAVA="${TESTJAVA}"/bin/java
# Does this VM support the 'detail' level of NMT?
"${JAVA}" ${TESTVMOPTS} -XX:NativeMemoryTracking=detail -version
if [ "$?" = 0 ]; then
NMT=-XX:NativeMemoryTracking=detail
else
NMT=-XX:NativeMemoryTracking=summary
fi
"${JAVA}" ${TESTVMOPTS} \
-Xlog:redefine+class+load=debug,redefine+class+load+exceptions=info ${NMT} \
-javaagent:RedefineBigClassAgent.jar=BigClass.class \
-classpath "${TESTCLASSES}" RedefineBigClassApp \
> output.log 2>&1
result=$?
cat output.log
if [ "$result" = 0 ]; then
echo "PASS: RedefineBigClassApp exited with status of 0."
else
echo "FAIL: RedefineBigClassApp exited with status of $result"
exit "$result"
fi
MESG="Exception"
grep "$MESG" output.log
result=$?
if [ "$result" = 0 ]; then
echo "FAIL: found '$MESG' in the test output"
result=1
else
echo "PASS: did NOT find '$MESG' in the test output"
result=0
fi
exit $result
|
#!/usr/bin/env bash
set -ex
targets=(
testdata/repo/zip_directory/autod.zip
testdata/repo/ref_zipped
testdata/repo/zip_binary/autod.zip
)
for target in "${targets[@]}"; do
sum=$(shasum -a 256 "${target}" | cut -d' ' -f1)
echo "sum:${sum}"
echo "target:${target}"
grep -l -r "${target}?checksum=sha256" . | while IFS= read -r f; do
echo "updating:${f}"
gsed -i -e "s|${target}?checksum=sha256:[a-z0-9]{64}|${target}?checksum=sha256:${sum}|g" "${f}"
done
done
|
<filename>src/lib/units/utils.ts
import { Data as LayerDataRaw } from './create';
export const calcUnitsQtyInRawLayerData = ({ unitsData }:LayerDataRaw):number =>
unitsData.reduce((totalQty, { qty }) => totalQty + qty, 0);
|
package HxCKDMS.bows.proxy;
public class ServerProxy extends CommonProxy {
@Override
public void preinit() {
}
@Override
public void init() {
}
@Override
public void postinit() {
}
}
|
import { Station } from '../../index';
describe('public/FailPaths/common', () => {
it('non-existing folder - throws error', () => {
const station = new Station();
expect(() => {
station.addFolder('biba'); // non-existing
}).toThrow();
});
it.todo('track was deleted while playback - revalidates folders');
it.todo('track has some problems while reading - skip it');
it.todo('playlist was scrambled so that the frist playing track is non existing - revalidates folders');
it.todo('track has some problems on stream - skip it');
});
|
<reponame>donysukardi/goober
import { h, Component } from 'preact';
import { styled, setPragma } from '../goober';
setPragma(h);
interface IButtonProps {
clicked: boolean;
color?: string;
}
const Button = styled<IButtonProps>('button')`
background: ${props => (props.color ? props.color : 'black')};
`;
class TestComp extends Component {
public render () {
return (
<div>
<Button clicked={false}></Button>
</div>
)
};
}
|
#!/bin/sh
# This script is generated by genbootstrap.sh
# to regenerate, run "make bootstrap"
mkdir -p obj/lib/bio
mkdir -p obj/lib/bld.sub
mkdir -p obj/lib/crypto
mkdir -p obj/lib/date
mkdir -p obj/lib/escfmt
mkdir -p obj/lib/fileutil
mkdir -p obj/lib/flate
mkdir -p obj/lib/http
mkdir -p obj/lib/inifile
mkdir -p obj/lib/iter
mkdir -p obj/lib/json
mkdir -p obj/lib/math
mkdir -p obj/lib/regex
mkdir -p obj/lib/std
mkdir -p obj/lib/sys
mkdir -p obj/lib/testr
mkdir -p obj/lib/thread
mkdir -p obj/mbld
pwd=`pwd`
set -x
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/config.myr
as -g -o obj/mbld/cpufeatures.o mbld/cpufeatures+posixy-x64.s
as -g -o obj/lib/thread/tls-impl.o lib/thread/tls-impl+osx-x64.s
as -g -o obj/lib/thread/atomic-impl.o lib/thread/atomic-impl+x64.s
as -g -o obj/lib/thread/start.o lib/thread/start+osx-x64.s
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/common.myr
as -g -o obj/lib/std/getbp.o lib/std/getbp+posixy-x64.s
$pwd/6/6m -O obj -I obj/lib/sys lib/std/option.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/traits.myr
as -g -o obj/lib/std/memops-impl.o lib/std/memops-impl+posixy-x64.s
$pwd/6/6m -O obj -I obj/lib/sys lib/std/fltbits.myr
as -g -o obj/lib/std/sjlj-impl.o lib/std/sjlj-impl+posixy-x64.s
$pwd/6/6m -O obj -I obj/lib/sys lib/std/endian.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/extremum.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/sjlj+x64.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/swap.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/slfill.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/result.myr
as -g -o obj/lib/sys/syscall.o lib/sys/syscall+osx-x64.s
$pwd/6/6m -O obj lib/sys/systypes.myr
$pwd/6/6m -O obj lib/sys/sys+osx-x64.myr
$pwd/6/6m -O obj lib/sys/ifreq+osx.myr
as -g -o obj/lib/sys/util.o lib/sys/util+posixy-x64.s
$pwd/6/6m -O obj lib/sys/syserrno+osx.myr
ar -rcs obj/lib/sys/libsys.a obj/lib/sys/sys.o obj/lib/sys/syserrno.o obj/lib/sys/util.o obj/lib/sys/systypes.o obj/lib/sys/ifreq.o obj/lib/sys/syscall.o
$pwd/muse/muse -o obj/lib/sys/libsys.use -p sys obj/lib/sys/sys.use obj/lib/sys/syserrno.use obj/lib/sys/systypes.use obj/lib/sys/ifreq.use
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/types+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/errno.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/pledge.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/types.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/strfind.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/memops.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/clear.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/sleq.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/hassuffix.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/backtrace+x64.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/units.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/cstrconv.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/syswrap+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/mkpath.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/now.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/consts.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/die.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/slcp.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/syswrap-ss+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/sleep.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/chartype.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/utf.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/cmp.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/sort.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/search.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/hasprefix.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/chomp.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/strstrip.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/introspect.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/varargs.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/wait+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/threadhooks.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/bytealloc.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/alloc.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/slurp.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/mk.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/slput.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/htab.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/slpush.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/striter.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/intparse.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/strsplit.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/strbuf.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/intfmt.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/sldup.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/bigint.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/fltparse.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/fltfmt.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/dirname.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/dir+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/diriter.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/fndup.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/strjoin.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/getcwd.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/slpop.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/sljoin.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/strreplace.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/getint.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/hashfuncs.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/bitset.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/putint.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/readall.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/blat.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/writeall.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/fmt.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/env+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/execvp.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/spork.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/assert.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/rand.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/pathjoin.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/mktemp.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/optparse.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/netaddr.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/ipparse.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/fmtfuncs.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/resolve+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/dialparse+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/dial+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/listen+posixy.myr
$pwd/6/6m -O obj -I obj/lib/sys lib/std/try.myr
ar -rcs obj/lib/std/libstd.a obj/lib/std/resolve.o obj/lib/std/intfmt.o obj/lib/std/result.o obj/lib/std/try.o obj/lib/std/ipparse.o obj/lib/std/alloc.o obj/lib/std/sleq.o obj/lib/std/putint.o obj/lib/std/sljoin.o obj/lib/std/slpop.o obj/lib/std/syswrap.o obj/lib/std/getint.o obj/lib/std/strsplit.o obj/lib/std/slfill.o obj/lib/std/writeall.o obj/lib/std/fltfmt.o obj/lib/std/hasprefix.o obj/lib/std/swap.o obj/lib/std/fmt.o obj/lib/std/netaddr.o obj/lib/std/varargs.o obj/lib/std/diriter.o obj/lib/std/getcwd.o obj/lib/std/blat.o obj/lib/std/optparse.o obj/lib/std/pathjoin.o obj/lib/std/readall.o obj/lib/std/strjoin.o obj/lib/std/threadhooks.o obj/lib/std/sjlj.o obj/lib/std/extremum.o obj/lib/std/endian.o obj/lib/std/rand.o obj/lib/std/sldup.o obj/lib/std/sleep.o obj/lib/std/wait.o obj/lib/std/introspect.o obj/lib/std/fltparse.o obj/lib/std/fndup.o obj/lib/std/strbuf.o obj/lib/std/strreplace.o obj/lib/std/assert.o obj/lib/std/spork.o obj/lib/std/slpush.o obj/lib/std/strstrip.o obj/lib/std/htab.o obj/lib/std/hashfuncs.o obj/lib/std/slput.o obj/lib/std/sjlj-impl.o obj/lib/std/fltbits.o obj/lib/std/striter.o obj/lib/std/types.o obj/lib/std/cstrconv.o obj/lib/std/units.o obj/lib/std/backtrace.o obj/lib/std/syswrap-ss.o obj/lib/std/die.o obj/lib/std/mk.o obj/lib/std/hassuffix.o obj/lib/std/memops-impl.o obj/lib/std/pledge.o obj/lib/std/utf.o obj/lib/std/slurp.o obj/lib/std/dialparse.o obj/lib/std/bytealloc.o obj/lib/std/mktemp.o obj/lib/std/consts.o obj/lib/std/chomp.o obj/lib/std/dir.o obj/lib/std/search.o obj/lib/std/memops.o obj/lib/std/fmtfuncs.o obj/lib/std/strfind.o obj/lib/std/env.o obj/lib/std/dirname.o obj/lib/std/clear.o obj/lib/std/listen.o obj/lib/std/sort.o obj/lib/std/cmp.o obj/lib/std/now.o obj/lib/std/intparse.o obj/lib/std/traits.o obj/lib/std/mkpath.o obj/lib/std/option.o obj/lib/std/dial.o obj/lib/std/errno.o obj/lib/std/chartype.o obj/lib/std/bigint.o obj/lib/std/bitset.o obj/lib/std/getbp.o obj/lib/std/slcp.o obj/lib/std/execvp.o
$pwd/muse/muse -o obj/lib/std/libstd.use -p std obj/lib/std/resolve.use obj/lib/std/intfmt.use obj/lib/std/result.use obj/lib/std/try.use obj/lib/std/ipparse.use obj/lib/std/alloc.use obj/lib/std/sleq.use obj/lib/std/putint.use obj/lib/std/sljoin.use obj/lib/std/slpop.use obj/lib/std/syswrap.use obj/lib/std/getint.use obj/lib/std/strsplit.use obj/lib/std/slfill.use obj/lib/std/writeall.use obj/lib/std/fltfmt.use obj/lib/std/hasprefix.use obj/lib/std/swap.use obj/lib/std/fmt.use obj/lib/std/netaddr.use obj/lib/std/varargs.use obj/lib/std/diriter.use obj/lib/std/getcwd.use obj/lib/std/blat.use obj/lib/std/optparse.use obj/lib/std/pathjoin.use obj/lib/std/readall.use obj/lib/std/strjoin.use obj/lib/std/threadhooks.use obj/lib/std/sjlj.use obj/lib/std/extremum.use obj/lib/std/endian.use obj/lib/std/rand.use obj/lib/std/sldup.use obj/lib/std/sleep.use obj/lib/std/wait.use obj/lib/std/introspect.use obj/lib/std/fltparse.use obj/lib/std/fndup.use obj/lib/std/strbuf.use obj/lib/std/strreplace.use obj/lib/std/assert.use obj/lib/std/spork.use obj/lib/std/slpush.use obj/lib/std/strstrip.use obj/lib/std/htab.use obj/lib/std/hashfuncs.use obj/lib/std/slput.use obj/lib/std/fltbits.use obj/lib/std/striter.use obj/lib/std/types.use obj/lib/std/cstrconv.use obj/lib/std/units.use obj/lib/std/backtrace.use obj/lib/std/syswrap-ss.use obj/lib/std/die.use obj/lib/std/mk.use obj/lib/std/hassuffix.use obj/lib/std/pledge.use obj/lib/std/utf.use obj/lib/std/slurp.use obj/lib/std/dialparse.use obj/lib/std/bytealloc.use obj/lib/std/mktemp.use obj/lib/std/consts.use obj/lib/std/chomp.use obj/lib/std/dir.use obj/lib/std/search.use obj/lib/std/memops.use obj/lib/std/fmtfuncs.use obj/lib/std/strfind.use obj/lib/std/env.use obj/lib/std/dirname.use obj/lib/std/clear.use obj/lib/std/listen.use obj/lib/std/sort.use obj/lib/std/cmp.use obj/lib/std/now.use obj/lib/std/intparse.use obj/lib/std/traits.use obj/lib/std/mkpath.use obj/lib/std/option.use obj/lib/std/dial.use obj/lib/std/errno.use obj/lib/std/chartype.use obj/lib/std/bigint.use obj/lib/std/bitset.use obj/lib/std/slcp.use obj/lib/std/execvp.use
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/types.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/regex/types.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/regex/interp.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/types.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/bio.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/iter.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/mem.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/fd.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/geti.myr
$pwd/6/6m -O obj -I obj/lib/std -I obj/lib/sys lib/bio/puti.myr
ar -rcs obj/lib/bio/libbio.a obj/lib/bio/puti.o obj/lib/bio/geti.o obj/lib/bio/fd.o obj/lib/bio/mem.o obj/lib/bio/bio.o obj/lib/bio/types.o obj/lib/bio/iter.o
$pwd/muse/muse -o obj/lib/bio/libbio.use -p bio obj/lib/bio/puti.use obj/lib/bio/geti.use obj/lib/bio/fd.use obj/lib/bio/mem.use obj/lib/bio/bio.use obj/lib/bio/types.use obj/lib/bio/iter.use
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/tls+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/spawn+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/ncpu.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/atomic.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/futex+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/sem+futex.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/future.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/do.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/mutex+futex.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/condvar+osx.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/queue.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/hookstd.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/waitgrp+futex.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/thread/rwlock+futex.myr
ar -rcs obj/lib/thread/libthread.a obj/lib/thread/atomic.o obj/lib/thread/future.o obj/lib/thread/hookstd.o obj/lib/thread/condvar.o obj/lib/thread/rwlock.o obj/lib/thread/common.o obj/lib/thread/start.o obj/lib/thread/waitgrp.o obj/lib/thread/spawn.o obj/lib/thread/queue.o obj/lib/thread/mutex.o obj/lib/thread/atomic-impl.o obj/lib/thread/sem.o obj/lib/thread/tls-impl.o obj/lib/thread/do.o obj/lib/thread/ncpu.o obj/lib/thread/futex.o obj/lib/thread/tls.o obj/lib/thread/types.o
$pwd/muse/muse -o obj/lib/thread/libthread.use -p thread obj/lib/thread/atomic.use obj/lib/thread/future.use obj/lib/thread/hookstd.use obj/lib/thread/condvar.use obj/lib/thread/rwlock.use obj/lib/thread/common.use obj/lib/thread/waitgrp.use obj/lib/thread/spawn.use obj/lib/thread/queue.use obj/lib/thread/mutex.use obj/lib/thread/sem.use obj/lib/thread/do.use obj/lib/thread/ncpu.use obj/lib/thread/futex.use obj/lib/thread/tls.use obj/lib/thread/types.use
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/opts.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/syssel.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/libs.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/util.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/build.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/install.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/parse.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/regex/ranges.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std lib/regex/compile.myr
ar -rcs obj/lib/regex/libregex.a obj/lib/regex/interp.o obj/lib/regex/ranges.o obj/lib/regex/types.o obj/lib/regex/compile.o
$pwd/muse/muse -o obj/lib/regex/libregex.use -p regex obj/lib/regex/interp.use obj/lib/regex/ranges.use obj/lib/regex/types.use obj/lib/regex/compile.use
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/subtest.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/test.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/deps.myr
$pwd/6/6m -O obj -I obj/lib/sys -I obj/lib/std -I obj/lib/bio -I obj/lib/regex -I obj/lib/thread mbld/main.myr
ld -static -pagezero_size 0x100000000 -no_pie -o obj/mbld/mbld $pwd/rt/_myrrt.o obj/mbld/deps.o obj/mbld/main.o obj/mbld/util.o obj/mbld/cpufeatures.o obj/mbld/libs.o obj/mbld/syssel.o obj/mbld/config.o obj/mbld/opts.o obj/mbld/subtest.o obj/mbld/types.o obj/mbld/test.o obj/mbld/install.o obj/mbld/parse.o obj/mbld/build.o -Lobj/lib/thread -lthread -Lobj/lib/bio -lbio -Lobj/lib/regex -lregex -Lobj/lib/std -lstd -Lobj/lib/sys -lsys
true
|
package org.zalando.intellij.swagger.annotator;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.Annotator;
import com.intellij.psi.PsiElement;
import org.jetbrains.annotations.NotNull;
import org.zalando.intellij.swagger.StringUtils;
import org.zalando.intellij.swagger.file.FileDetector;
import org.zalando.intellij.swagger.intention.reference.CreateJsonReferenceIntentionAction;
import org.zalando.intellij.swagger.traversal.JsonTraversal;
import org.zalando.intellij.swagger.traversal.path.swagger.MainPathResolver;
import org.zalando.intellij.swagger.validator.value.ReferenceValidator;
import org.zalando.intellij.swagger.validator.value.SchemesValidator;
import org.zalando.intellij.swagger.validator.value.ValuesValidator;
public class JsonValidValueAnnotator implements Annotator {
@Override
public void annotate(
@NotNull final PsiElement psiElement, @NotNull final AnnotationHolder annotationHolder) {
if (new FileDetector().isMainSwaggerJsonFile(psiElement.getContainingFile())) {
final ValuesValidator valuesValidator =
new ValuesValidator(
new JsonTraversal(),
new MainPathResolver(),
new ReferenceValidator(
new CreateJsonReferenceIntentionAction(
StringUtils.removeAllQuotes(psiElement.getText()))),
new SchemesValidator());
valuesValidator.validate(psiElement, annotationHolder);
}
}
}
|
<gh_stars>1-10
// The Vue build version to load with the `import` command
// (runtime-only or standalone) has been set in webpack.base.conf with an alias.
import Vue from 'vue'
import App from './App'
import router from './router'
import VueGAPI from 'vue-gapi'
import { APITHING, CLIENTTHING } from './secrets'
import moment from 'moment'
import axios from 'axios'
Vue.config.productionTip = false;
const apiConfig = {
apiKey: APITHING,
clientId: CLIENTTHING + '.apps.googleusercontent.com',
discoveryDocs: ['https://www.googleapis.com/discovery/v1/apis/calendar/v3/rest'],
scope: 'https://www.googleapis.com/auth/calendar.readonly'
}
Vue.use(VueGAPI, apiConfig);
/* eslint-disable no-new */
new Vue({
el: '#app',
router,
render: h => h(App)
});
|
#! /usr/bin/bash
set -e
set -o xtrace
podman run --pod acquisition --rm -ti -v `pwd`/bluesky_config/scripts:'/app' -w '/app' bluesky python3 roi_reduction_consumer.py
|
#!/bin/sh
set -e
ROOTDIR=dist
BUNDLE=${ROOTDIR}/BLOOD_BOUND_MASTERNODE-Qt.app
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature.tar.gz
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
for i in `grep -v CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff $i -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff $i -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${TARGETFILE}.sign"
DIRNAME="`dirname ${SIGNFILE}`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if=$i of=${SIGNFILE} bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
for i in `grep CodeResources ${TEMPLIST}`; do
TARGETFILE="${BUNDLE}/`echo ${i} | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: "${TARGETFILE}""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C ${TEMPDIR} -czf ${OUT} .
rm -rf ${TEMPDIR}
echo "Created ${OUT}"
|
package gtd
import (
"bytes"
"fmt"
"os"
"path/filepath"
"strings"
"time"
mapset "github.com/deckarep/golang-set"
"github.com/urfave/cli"
)
const (
ExitCodeOK int = iota // 0
ExitCodeError // 1
ExitCodeFileError // 2
timeFormat = "2006-01-02"
)
var Version = "0.1.1"
func Run(args []string) int {
app := cli.NewApp()
app.Name = "gtd"
app.Version = Version
app.Usage = "todo app"
app.Author = "<NAME>"
app.Email = "<EMAIL>"
app.Commands = []cli.Command{
{
Name: "add",
Aliases: []string{"a"},
Action: addTodoAction,
Usage: "add todo",
Flags: []cli.Flag{
cli.StringFlag{
Name: "parent, p",
Usage: "specify parent `TODO_NUM` (ex: gtd add -p 1 task)",
},
cli.StringFlag{
Name: "tag, t",
Usage: "add `TAG` to todo (ex: gtd add -t life task)",
},
cli.BoolFlag{
Name: "memo, m",
Usage: "add `MEMO` to todo (ex: gtd add -m task)",
},
},
},
{
Name: "list",
Aliases: []string{"l"},
Usage: "list todo",
Action: listTodoAction,
Flags: []cli.Flag{
cli.BoolFlag{
Name: "all, a",
Usage: "show all todos",
},
},
},
{
Name: "tags",
Aliases: []string{"t"},
Usage: "list tags",
Action: tagTodoAction,
Flags: []cli.Flag{
cli.BoolFlag{
Name: "all, a",
Usage: "show all tags",
},
},
},
{
Name: "done",
Aliases: []string{"d"},
Usage: "done todo",
Action: doneTodoAction,
},
{
Name: "clean",
Aliases: []string{"c"},
Usage: "clean done todo",
Action: cleanTodoAction,
},
{
Name: "delete",
Aliases: []string{"d"},
Usage: "delete todo",
Action: deleteTodoAction,
},
{
Name: "setting",
Aliases: []string{"s"},
Usage: "edit config file",
Action: settingTodoAction,
},
{
Name: "memo",
Aliases: []string{"m"},
Usage: "edit memo file associated with task (ex, gtd memo 4)",
Action: memoTodoAction,
},
}
return msg(app.Run(os.Args))
}
func msg(err error) int {
if err != nil {
fmt.Fprintf(os.Stderr, "%s: %v\n", os.Args[0], err)
return ExitCodeError
}
return ExitCodeOK
}
func ask(prompt string) bool {
fmt.Print(prompt, ": ")
var stdin string
fmt.Scan(&stdin)
return stdin == "y" || stdin == "Y"
}
func addTodoAction(c *cli.Context) error {
var title string
var tag string
var parentnum string
var memo string
var todos Todos
var cfg config
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("Failed to read todofile: %v", err)
}
if c.Args().Present() {
title = c.Args().First()
tag = c.String("tag")
parentnum = c.String("parent")
} else {
cli.ShowCommandHelp(c, "add")
return fmt.Errorf("Failed to parse options")
}
if c.Bool("memo") {
var buf bytes.Buffer
var stdin string
if cfg.MemoDir == "" {
return fmt.Errorf("please setting MemoDir ")
}
if cfg.FilterCmd == "" {
return fmt.Errorf("please setting FilterCmd (peco/fzf)")
}
if ask("existing file? (y/n)") {
cmd := fmt.Sprintf("find %s -type f | %s", cfg.MemoDir, cfg.FilterCmd)
cfg.filtercmd(cmd, &buf)
} else {
fmt.Println("new file name: ")
fmt.Scan(&stdin)
stdin = "/" + stdin
cmd := fmt.Sprintf("find %s -type d | %s", cfg.MemoDir, cfg.FilterCmd)
cfg.filtercmd(cmd, &buf)
}
cmdresult := buf.String()
memo = strings.TrimRight(cmdresult, "\n") + stdin
}
var date = time.Now().Format(timeFormat)
todo := Todo{
Title: title,
Done: false,
Tag: tag,
Memo: memo,
Date: date,
}
parentnumlist, err := parseTodoNum(parentnum)
todolist, err := appendTodo(todos.Todos, todo, parentnumlist)
if err != nil {
return fmt.Errorf("Failed to append task: %v", err)
}
todos.SetTodos(todolist)
return todos.marshallJson(cfg.GtdFile)
}
func listTodoAction(c *cli.Context) error {
var cfg config
var todos Todos
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("Failed to read jsonfile: %v", err)
}
fmt.Println("\u2705 YOUR TO DO LIST")
if c.Bool("all") {
displayAllTodo(todos.Todos, "", "")
return nil
}
displayTodo(todos.Todos, "", "")
return nil
}
func tagTodoAction(c *cli.Context) error {
var cfg config
var todos Todos
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("failed to read jsonfile: %v", err)
}
if c.Bool("all") {
tags := mapset.NewSet()
displayAllTags(todos.Todos, tags)
} else if tag := c.Args().First(); tag != "" {
displayTagTodo(todos.Todos, tag, "")
} else {
cli.ShowCommandHelp(c, "list")
return fmt.Errorf("Failed to parse options")
}
return nil
}
func doneTodoAction(c *cli.Context) error {
var todos Todos
var cfg config
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
if !c.Args().Present() {
return fmt.Errorf("Failed to parse options")
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("failed to read jsonfile: %v", err)
}
todonum := c.Args().First()
todonumlist, err := parseTodoNum(todonum)
if err != nil {
return fmt.Errorf("Failed to parse number: %v", err)
}
todolist, err := doneTodo(todos.Todos, todonumlist)
if err != nil {
return fmt.Errorf("Failed to done todo: %v", err)
}
todos.SetTodos(todolist)
return todos.marshallJson(cfg.GtdFile)
}
func cleanTodoAction(c *cli.Context) error {
var cfg config
var todos Todos
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("failed to read jsonfile: %v", err)
}
todolist, err := cleanAllTodos(todos.Todos)
todos.SetTodos(todolist)
return todos.marshallJson(cfg.GtdFile)
}
func deleteTodoAction(c *cli.Context) error {
var cfg config
var todos Todos
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("failed to read jsonfile: %v", err)
}
todonum := c.Args().First()
todonumlist, err := parseTodoNum(todonum)
if err != nil {
return fmt.Errorf("Failed to parse number: %v", err)
}
todolist, err := deleteTodos(todos.Todos, todonumlist)
todos.SetTodos(todolist)
return todos.marshallJson(cfg.GtdFile)
}
func settingTodoAction(c *cli.Context) error {
var cfg config
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
dir := os.Getenv("HOME")
dir = filepath.Join(dir, ".config", "gtd")
file := filepath.Join(dir, "config.toml")
return cfg.runcmd(cfg.Editor, file)
}
func memoTodoAction(c *cli.Context) error {
var cfg config
var todos Todos
err := cfg.load()
if err != nil {
return fmt.Errorf("falid to load configfile: %v", err)
}
err = todos.UnmarshallJson(cfg.GtdFile)
if err != nil {
return fmt.Errorf("failed to read jsonfile: %v", err)
}
todonum := c.Args().First()
todonumlist, err := parseTodoNum(todonum)
if err != nil {
return fmt.Errorf("Failed to parse number: %v", err)
}
todolist, err := searchTodo(todos.Todos, todonumlist)
if err != nil {
return fmt.Errorf("Failed to access todo: %v", err)
}
return cfg.runcmd(cfg.Editor, todolist.Memo)
}
|
#!/bin/bash
echo -e "\n"
echo -e "=================================="
echo -e "| MD Newspapers Data Converter |"
echo -e "=================================="
INPUT_FILE=$1
header="id,state,city,county,title,year_pub_start,year_pub_end,lccn,oclc,issn, \
subscription_req,owner_producer,url,image_type,full_text_search,date_avail_start, \
date_avail_end,issues,comments"
# Set up temp.csv file with column headings to receive data
echo $header > temp.csv
# Translate all CR to LF chars (squeezing [-s] any resulting double LF chars into one),
# and translate all VT chars to pipes, passing the cleaned data to the temp file.
echo -e "• Translating line endings and converting vertical tabs to pipe chars ..."
tr -s '\r\n' '\n' < $INPUT_FILE | tr -s '\v' '\|' >> temp.csv
# Validate the resulting cleaned-up CSV file using csvclean.
echo -en "• Validating resulting file with csvclean ... "
csvclean -n temp.csv
# Separate the instrumentation column from the others, deleting spaces.
echo -e "• Separating and cleaning the 'instrumentation' column ... "
csvcut -c instrumentation temp.csv | tr -d ' ' > instrumentation.csv
# Separate the additional info field and translate internal returns
# (previously converted to pipes) into semi-colons, removing any leading
# spaces, duplicate semi-colons, and extra trailing spaces.
echo -e "• Separating and cleaning the 'additional info' column ... "
csvcut -c additional_info temp.csv |
sed -E -e 's/ *\|+ */; /g' -e 's/ "$/"/' > addinfo.csv
# Push all data except [-C] the previous two columns into a separate container.
csvcut -C instrumentation,additional_info temp.csv > others.csv
# Apply a series of transformations to the data, and pass cleaned data to new file
echo -e "• Applying sed rules to clean and remove remaining extraneous characters... "
# Replace multiple PIPEs with single PIPE
sed -E 's/\|+/\|/g' others.csv |
# Trim extra spaces between values in a multivalued field
sed -E 's/\ *\|\ */\|/g' |
# Trim extra space between fields
sed -E 's/\ *,/,/g' |
# Trim beginning space inside quotes
sed -E -e 's/^\"\ */\"/g' -e 's/,\"\ */,\"/g' |
# Trim trailing space inside quotes
sed -E -e 's/\ *\",/\",/g' -e 's/\ *\"$/\"/g' |
# Remove trailing PIPE in a field
sed -E -e 's/\|\"$/\"/g' -e 's/\|\",/\",/g' > others-clean.csv
echo -e "• Joining resulting data files into result.csv ... "
csvjoin others-clean.csv instrumentation.csv addinfo.csv > result.csv
echo -e "• Cleaning up and removing temporary files ... "
rm addinfo.csv others.csv instrumentation.csv temp.csv others-clean.csv
echo -e $(wc -l < result.csv) "lines written. Conversion complete! Goodbye!\n"
|
import React from "react";
import PropTypes from "prop-types";
import MenuItem from "@material-ui/core/MenuItem";
import Menu from "@material-ui/core/Menu";
import MenuIcon from "@material-ui/icons/SettingsOutlined";
import { IconButton, Box } from "@material-ui/core";
import { POLLS_STATES } from "../../../Modules/pollsOperations";
import PollsDialogsContext from "../../../Contexts/PollsDialogsContext";
const PollsMenu = ({ poll }) => {
const { state } = poll;
const [menuAnchorEl, setMenuAnchorEl] = React.useState(null);
const openMenu = Boolean(menuAnchorEl);
const {
setOpenUpdateDialogPoll,
setOpenLaunchDialogPoll,
setOpenStopDialogPoll,
setOpenDeleteDialogPoll
} = React.useContext(PollsDialogsContext);
const closeMenu = () => {
setMenuAnchorEl(null);
};
const handleMenuClose = (e) => {
e.stopPropagation();
closeMenu(null);
};
const handleMenu = (e) => {
e.stopPropagation();
setMenuAnchorEl(e.currentTarget);
};
const handleUpdatePollClick = (e) => {
e.stopPropagation();
closeMenu();
setOpenUpdateDialogPoll(poll);
};
const handleLaunchPollClick = (e) => {
e.stopPropagation();
closeMenu();
setOpenLaunchDialogPoll(poll);
};
const handleStopPollClick = (e) => {
e.stopPropagation();
closeMenu();
setOpenStopDialogPoll(poll);
};
const handleDeletePollClick = (e) => {
e.stopPropagation();
closeMenu();
setOpenDeleteDialogPoll(poll);
};
return (
<Box align="right">
<IconButton color="primary" aria-label="Call menu" onClick={handleMenu}>
<MenuIcon />
</IconButton>
<Menu
id="menu-appbar"
anchorEl={menuAnchorEl}
keepMounted
open={openMenu}
onClose={handleMenuClose}
>
{state !== POLLS_STATES.TERMINATED && (
<MenuItem onClick={handleUpdatePollClick}>Edit</MenuItem>
)}
{state === POLLS_STATES.DRAFT && (
<MenuItem onClick={handleLaunchPollClick}>Launch</MenuItem>
)}
{state === POLLS_STATES.TERMINATED && (
<MenuItem onClick={handleLaunchPollClick}>Re-launch</MenuItem>
)}
{state === POLLS_STATES.PUBLISHED && (
<MenuItem onClick={handleStopPollClick}>Stop</MenuItem>
)}
<MenuItem onClick={handleDeletePollClick}>Delete</MenuItem>
</Menu>
</Box>
);
};
PollsMenu.propTypes = {
poll: PropTypes.object.isRequired
};
export default PollsMenu;
|
def compare_alphanumeric(s1, s2):
len1 = len(s1)
len2 = len(s2)
i = 0
j = 0
flag = 0
while i < len1 and j < len2:
if s1[i].isdigit() and s2[j].isdigit():
if int(s1[i]) > int(s2[j]):
flag = 1
break
elif int(s1[i]) < int(s2[j]):
flag = -1
break
if s1[i] > s2[j]:
flag= 1
break
if s1[i] < s2[j]:
flag = -1
break
i += 1
j += 1
if flag == 0:
if len1 > len2:
flag = 1
elif len1 < len2:
flag = -1
return flag |
def capitalize_words(string):
words = string.split(' ')
capitalized_words = [word.capitalize() for word in words]
return ' '.join(capitalized_words) |
<reponame>apsislabs/apsis-website
import React from "react";
import Layout from "../components/layout";
import Hero from "../components/hero";
import Clients from "../components/clients";
import ContentBlock from "../components/contentblock";
import ServiceList from "../components/Services/servicelist";
import DiagonalBackground from "../components/diagonalbackground";
import OpenSource from "../components/OpenSource/OpenSource";
import About from "../components/Footer/about";
import indexStyles from "../styles/pages/index.module.scss";
import buttonStyles from "../styles/components/button.module.scss";
import Button from "gatsby-link";
import { graphql } from "gatsby";
import Navigation from "../components/navigation";
import Helmet from "react-helmet";
import { Animated } from "react-animated-css";
class IndexPage extends React.Component {
render() {
const blueWideButtonClasses = `${buttonStyles.button__blue} ${
buttonStyles.button__wide
}`;
const grayWideButtonClasses = `${buttonStyles.button__gray} ${
buttonStyles.button__wide
}`;
return (
<Layout>
<Helmet title="Apsis Labs | A Seattle Software Development Company" />
<Navigation blue={false} />
<Hero
displayname="Hero"
imgSrc={this.props.data.hero.childImageSharp.fluid.src}
>
<Animated animationIn="fadeInUp">
<h1>Need an app? We can help.</h1>
<h3>
We are developers focused on efficient solutions to real world
problems.
</h3>
</Animated>
<span className={indexStyles.heroButton}>
<Button className={blueWideButtonClasses} to="/services">
Our Process
</Button>
<Button className={grayWideButtonClasses} to="/contact">
Hire Us
</Button>
</span>
</Hero>
<Clients
clientOne={this.props.data.clientWeber.childImageSharp.fluid}
clientOneAlt="<NAME>"
clientOneUrl="https://www.webershandwick.com/"
clientTwo={this.props.data.clientNatera.childImageSharp.fluid}
clientTwoAlt="Natera"
clientTwoUrl="https://www.natera.com/"
clientThree={this.props.data.clientTray.childImageSharp.fluid}
clientThreeAlt="Tray.io"
clientThreeUrl="https://tray.io/"
clientFour={this.props.data.clientBoku.childImageSharp.fluid}
clientFourAlt="Boku"
clientFourUrl="https://www.boku.com/"
/>
<ContentBlock title="Your industry is our business." vertical={false}>
Our craft is software development but our specialty is understanding
your business, your processes, and your priorities. A robust
understanding of your business goals is crucial for us to determine
how to serve you best. We take the time to ensure you’re not paying
extra for something that’s over-engineered. We want to strike the
right balance among the factors that are important to you: effective
features and functionality, budget, speed, technology maintenance
costs, and more.
<ServiceList />
</ContentBlock>
<DiagonalBackground>
<OpenSource />
</DiagonalBackground>
<ContentBlock title="Idea. Implement. Iterate." vertical={false}>
<div>
At Apsis we embrace a process we call “almost agile.” Based on the
idea that the best proving ground for new software is real-world
users, we focus on rapid development and deployment followed by
iteration based on user feedback. Our small project teams of 2-3
developers are organized based on your project needs, and work in
small, focused sprints to maximize adaptability.
</div>
<div className={indexStyles.serviceButton}>
<Button className={blueWideButtonClasses} to="/services">
Our Services
</Button>
</div>
</ContentBlock>
<Clients
clientOne={this.props.data.clientCascade.childImageSharp.fluid}
clientOneAlt="Cascade Bicycle Club"
clientOneUrl="https://www.cascade.org/"
clientTwo={this.props.data.clientDragonFoundry.childImageSharp.fluid}
clientTwoAlt="Dragon Foundry"
clientTwoUrl="http://www.dragonfoundry.com/"
clientThree={this.props.data.clientColumbia.childImageSharp.fluid}
clientThreeAlt="Columbia University"
clientThreeUrl="https://www.columbia.edu/"
clientFour={this.props.data.clientCallidus.childImageSharp.fluid}
clientFourAlt="Call<NAME>"
clientFourUrl="https://www.calliduscloud.com/"
/>
<Clients
clientOne={this.props.data.clientPicturiffic.childImageSharp.fluid}
clientOneAlt="Picturiffic"
clientOneUrl="https://picturiffic.apsis.io/"
clientTwo={this.props.data.clientArkatechture.childImageSharp.fluid}
clientTwoAlt="Arkatechture"
clientTwoUrl="https://www.arkatechture.com/"
clientThree={
this.props.data.clientThinkingBaseball.childImageSharp.fluid
}
clientThreeAlt="Thinking Baseball"
clientThreeUrl="https://www.thinkingbaseball.com/"
clientFour={this.props.data.clientCoverMyTest.childImageSharp.fluid}
clientFourAlt="Cover My Test"
clientFourUrl="https://covermytest.com/"
/>
<About
fluid={this.props.data.building.childImageSharp.fluid}
alt="Our Seattle Office"
/>
</Layout>
);
}
}
export default IndexPage;
export const fluidImage = graphql`
fragment fluidImage on File {
childImageSharp {
fluid(maxWidth: 140) {
...GatsbyImageSharpFluid_tracedSVG
}
}
}
`;
export const fluidHero = graphql`
fragment fluidHero on File {
childImageSharp {
fluid(maxWidth: 1920) {
...GatsbyImageSharpFluid
}
}
}
`;
export const pageQuery = graphql`
query {
clientBoku: file(relativePath: { eq: "images/clients/boku.png" }) {
...fluidImage
}
clientNatera: file(relativePath: { eq: "images/clients/natera.png" }) {
...fluidImage
}
clientCallidus: file(relativePath: { eq: "images/clients/callidus.png" }) {
...fluidImage
}
clientWeber: file(relativePath: { eq: "images/clients/weber.png" }) {
...fluidImage
}
clientArkatechture: file(
relativePath: { eq: "images/clients/arkatechture.png" }
) {
...fluidImage
}
clientCascade: file(relativePath: { eq: "images/clients/cascade.png" }) {
...fluidImage
}
clientColumbia: file(relativePath: { eq: "images/clients/columbia.png" }) {
...fluidImage
}
clientCoverMyTest: file(
relativePath: { eq: "images/clients/covermytest.png" }
) {
...fluidImage
}
clientDragonFoundry: file(
relativePath: { eq: "images/clients/dragonfoundry.png" }
) {
...fluidImage
}
clientPicturiffic: file(
relativePath: { eq: "images/clients/picturiffic.png" }
) {
...fluidImage
}
clientThinkingBaseball: file(
relativePath: { eq: "images/clients/thinkingbaseball.png" }
) {
...fluidImage
}
clientTray: file(relativePath: { eq: "images/clients/trayio.png" }) {
...fluidImage
}
hero: file(relativePath: { eq: "images/hero/hero3.jpg" }) {
childImageSharp {
fluid(maxWidth: 1400) {
src
}
}
}
building: file(relativePath: { eq: "images/posts/jack-straw-office.jpg" }) {
...fluidHero
}
}
`;
|
X = dataset.iloc[:, 0]
y = dataset.iloc[:, 1]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
# Feature Extraction
count_vector = CountVectorizer()
X_train = count_vector.fit_transform(X_train)
X_test = count_vector.transform(X_test)
# Fitting Naive Bayes to the Training set
classifier = MultinomialNB()
classifier.fit(X_train, y_train) |
package knokko.blocks;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
public class HyperBlock extends Block {
protected Item dropItem;
public HyperBlock(Material material, String name, String tool, CreativeTabs tab, int hardness, int resistance, int toolLevel) {
super(material);
setUnlocalizedName(name);
setHardness(hardness);
setResistance(resistance);
setCreativeTab(tab);
setHarvestLevel(tool, toolLevel);
}
public HyperBlock setDropItem(Item item){
dropItem = item;
return this;
}
public Item getItemDropped(IBlockState state, Random random, int fortune){
return dropItem;
}
}
|
<gh_stars>0
#define HAS_VTK 1
#define DO_AND 1
#define DO_OR 2
#define DO_XOR 3
#define DO_ANOTB 4
#include "LaMaskBoolOperations.h"
#include <numeric>
/*
* Author:
* Dr. <NAME>
* Department of Biomedical Engineering, King's College London
* Email: <EMAIL> 'dot' <EMAIL>
* Copyright (c) 2017
*
* This application performs boolean ooerations on masks
*/
int main(int argc, char * argv[])
{
char* input_f1, *output_f, *input_f2;
bool foundArgs1 = false, foundArgs2 = false, foundArgs3 = false;
int method = DO_OR;
if (argc >= 1)
{
for (int i = 1; i < argc; i++) {
if (i + 1 != argc) {
if (string(argv[i]) == "-i1") {
input_f1 = argv[i + 1];
foundArgs1 = true;
}
else if (string(argv[i]) == "-o") {
output_f = argv[i + 1];
foundArgs2 = true;
}
else if (string(argv[i]) == "-i2") {
input_f2 = argv[i + 1];
foundArgs3 = true;
}
else if (string(argv[i]) == "-m") {
method = atoi(argv[i + 1]);
}
}
}
}
if (!(foundArgs1 && foundArgs2 && foundArgs3))
{
cerr << "Cheeck your parameters\n\nUsage:"
"\nPerforms boolean operations on two masks"
"\n(Mandatory)\n\t-i1 <mask1> \n\t-i2 <mask2>\n\t-o <output filename>\n"
"== Optional =="
"\n\t-m <which operation> (1=AND, 2=OR, 3-XOR, 4-A_NOT_B)" << endl;
exit(1);
}
else
{
LaImage* mask1 = new LaImage(input_f1);
LaImage* mask2 = new LaImage(input_f2);
LaImage* img_out = new LaImage();
LaMaskBoolOperations* algorithm = new LaMaskBoolOperations();
algorithm->SetInputData(mask1);
algorithm->SetInputData2(mask2);
switch (method)
{
case DO_AND:
algorithm->SetBooleanOperationToAND();
break;
case DO_OR:
algorithm->SetBooleanOperationToOR();
break;
case DO_XOR:
algorithm->SetBooleanOperationToXOR();
break;
case DO_ANOTB:
algorithm->SetBooleanOperationToANOTB();
break;
}
algorithm->Update();
img_out = algorithm->GetOutput();
img_out->Export(output_f);
}
} |
#!/usr/bin/env sh
apt-get update
apt-get install -y qtcreator
apt-get install -y clang-format
apt-get install -y libclang-common-8-dev
apt-get install -y vim
apt-get install -y tmux
apt-get install -y mc
apt-get install -y git
apt-get install -y git-gui
apt-get install -y ros-noetic-video-stream-opencv
apt-get install -y ros-noetic-teleop-twist-keyboard
apt-get install -y ros-noetic-rosparam-shortcuts
apt-get install -y ros-noetic-rqt-robot-steering
apt-get install -y ros-noetic-gazebo-ros-pkgs
apt-get install -y ros-noetic-gazebo-ros-control
apt-get install -y ros-noetic-ros-controllers
apt-get install -y ros-noetic-genpy
apt-get install -y libgpiod-dev
apt-get install -y ros-noetic-costmap-2
apt-get install -y ros-noetic-octomap-msgs
apt-get install -y ros-noetic-octomap-server
apt-get install -y ros-noetic-amcl
apt-get install -y ros-noetic-move-base
apt-get install -y ros-noetic-pointcloud-to-laserscan
ln -s /usr/bin/python3 /usr/bin/python
|
package io.github.intellij.dlanguage.run;
import com.intellij.mock.MockVirtualFile;
import com.intellij.testFramework.UsefulTestCase;
/**
* @author <NAME> (singingbush)
* created on 19/03/2021
*/
public class DlangVirtualFileVisitorTest extends UsefulTestCase {
public void testVisitFile() {
final DlangVirtualFileVisitor visitor = new DlangVirtualFileVisitor(null);
visitor.visitFile(new MockVirtualFile("source.d"));
visitor.visitFile(new MockVirtualFile("source.di"));
visitor.visitFile(new MockVirtualFile("source.java"));
assertEquals(2, visitor.getDlangSources().size());
assertFalse(visitor.getDlangSources().stream().anyMatch(f -> f.contains("source.java")));
}
}
|
#!/bin/bash
docker build -t ydp/docker-kaldi-gstreamer-server:v1 .
|
/**
* Republish a single package to github `@types` mirror by name.
*
* Usage:
* $ node dist/republish-single-github-mirror-package.js aframe
*
* This program will fail if the github version of the package is up-to-date.
* You need to set the environment variable GH_API_TOKEN to a token with publish rights to the `@types` org on github.
*/
import yargs = require("yargs");
import { ChangedPackages } from "./lib/versions";
import { defaultLocalOptions } from "./lib/common";
import {
logUncaughtErrors,
loggerWithErrors,
logger,
NpmPublishClient,
UncachedNpmInfoClient,
withNpmCache,
Registry
} from "@definitelytyped/utils";
import { getLatestTypingVersion } from "@definitelytyped/retag";
import { getDefinitelyTyped, AllPackages } from "@definitelytyped/definitions-parser";
import generatePackages from "./generate-packages";
import { publishTypingsPackage } from "./lib/package-publisher";
if (!module.parent) {
const name = yargs.argv.name as string;
console.log(name);
logUncaughtErrors(async () => {
const dt = await getDefinitelyTyped(defaultLocalOptions, loggerWithErrors()[0]);
const allPackages = await AllPackages.read(dt);
const changedPackages = await changeFromName(allPackages, name);
await generatePackages(dt, allPackages, changedPackages, /*tgz*/ false);
const ghClient = await NpmPublishClient.create(process.env.GH_API_TOKEN as string, {}, Registry.Github);
await publishTypingsPackage(
ghClient,
changedPackages.changedTypings[0],
/*dry*/ false,
logger()[0],
Registry.Github
);
});
}
async function changeFromName(allPackages: AllPackages, name: string): Promise<ChangedPackages> {
const pkg = allPackages.tryGetLatestVersion(name);
if (pkg) {
return withNpmCache(new UncachedNpmInfoClient(), async infoClient => {
const version = await getLatestTypingVersion(pkg, infoClient);
return {
changedTypings: [{ pkg, version, latestVersion: undefined }],
changedNotNeededPackages: []
};
});
} else {
throw new Error("could not find package: " + name);
}
}
|
echo removing _coverage dir
rm -rf _coverage
rm junit.xml
echo patching dune file
gsed -i '$i \(preprocess (pps bisect_ppx))\' jose/dune
echo running tests
BISECT_ENABLE=yes REPORT_PATH=./junit.xml esy test --force
esy echo "#{self.target_dir / 'default' / 'test'}"
cp $(esy echo "#{self.target_dir / 'default' / 'test' / 'junit.xml'}") ./junit.xml
echo copying bisect files
cp $(esy echo "#{self.target_dir / 'default' / 'test'}")/[bisect]* ./
echo generating reports
esy bisect-ppx-report html
esy bisect-ppx-report summary
echo reseting files
git checkout jose/dune
rm bisect*
rm test.{exe,ml}
|
#include<bits/stdc++.h>
using namespace std;
struct ListNode {
int val;
ListNode *next;
ListNode(int x) : val(x), next(NULL) {}
};
// Brute force Solution
ListNode *getIntersectionNode(ListNode *headA, ListNode *headB) {
ListNode *curA = headA, *curB = headB;
while(curA) {
curB = headB;
while(curB) {
if(curA == curB) {
return curA;
}
curB = curB->next;
}
curA = curA->next;
}
return nullptr;
}
// Hashset or Unordered_Set Approach
ListNode *getIntersectionNode2(ListNode *headA, ListNode *headB) {
unordered_set<ListNode *> st;
ListNode *curA = headA, *curB = headB;
while(curA) {
st.insert(curA);
curA = curA->next;
}
while(curB) {
if(st.find(curB) != st.end()) {
return curB;
}
curB = curB->next;
}
return nullptr;
}
int main() {
return 0;
} |
import SwiftUI
// Example usage of the SearchBar in a SwiftUI view
struct ContentView: View {
@State private var searchText: String = ""
var body: some View {
VStack {
SearchBar(text: $searchText, onSearchButtonClicked: performSearch)
Text("Search Text: \(searchText)")
}
}
func performSearch() {
// Implement search functionality based on the searchText
// This method will be called when the search button is clicked
print("Performing search for: \(searchText)")
}
} |
import pandas as pd
def clean_and_merge_data(full_data08, full_data12, new_13):
full_data12 = full_data12[full_data12.year != 13]
full_data12 = pd.concat([new_13, full_data12], join="inner")
full_data12["year"] = full_data12["year"] + 2000
full_data08 = full_data08.rename(columns={"zip code": "zipcode"})
full_data08 = full_data08.iloc[1:]
full_data = pd.concat([full_data08, full_data12], join="inner")
full_data = full_data.drop(["Unnamed: 0"], axis=1)
return full_data |
/*:
* @target MZ
* @author <NAME>
* @plugindesc The gear engine plugin who handle the core of a crafting system
*
* @param recipes
* @desc The crafting recipes database
* @type struct<craftingRecipe>[]
* @default []
*/
/*~struct~craftingRecipe:
* @param header
* @desc the core data of a recipe
* @type struct<headerStruct>
*
* @param conditions
* @desc the recipe required ingredients
* @type struct<craftingCondition>[]
* @default []
*
* @param results
* @desc the list of items you would receive on result
* @type struct<StructName>[]
* @default []
*/
/*~struct~headerStruct:
*
* @param name
* @type text
* @text name
* @desc the recipe name
*
* @param icondIndex
* @type number
* @text icon index
* @desc the recipe icon
*
* @param description
* @type text
* @text description
* @desc the recipe description
*
* @param type
* @desc the recipe type
* @type select
* @default ITEM
* @option ALL
* @value 0
* @option ITEM
* @value 1
* @option ARMOR
* @value 2
* @option WEAPON
* @value 3
*/
/*~struct~craftingCondition:
*
* @param id
* @type number
* @text id
* @desc the element id to fetch (such as item , armor etc)
*
* @param type
* @desc the item type
* @type select
* @default ITEM
* @option ALL
* @value ALL
* @option ITEM
* @value ITEM
* @option ARMOR
* @value ARMOR
* @option WEAPON
* @value WEAPON
*
* @param amount
* @type number
* @text amount
* @desc the number of ingredient required
*/
/*~struct~craftingResult:
*
* @param id
* @type number
* @text id
* @desc the element id to fetch (such as item , armor etc)
*
* @param type
* @desc the item type
* @type select
* @default ITEM
* @option ALL
* @value ALL
* @option ITEM
* @value ITEM
* @option ARMOR
* @value ARMOR
* @option WEAPON
* @value WEAPON
*
* @param amount
* @type number
* @text amount
* @desc the number of ingredient required
*/
//=============================================================================
// ** NOTICE **
//-----------------------------------------------------------------------------
// The code below is generated by a compiler, and is not well suited for human
// reading. If you are interested on the source code, please take a look at
// the Github repository for this plugin!
//=============================================================================
this.Gear = this.Gear || {};
this.Gear.CraftingCore = (function (exports) {
'use strict';
class Config {
static init() {
const rawParams = Gem.ParamManager.find();
this.PARAMS = Gem.ParamManager.register(this.pluginName, rawParams);
}
}
Config.pluginName = "gear-craftingCore";
(function (ItemType) {
ItemType[ItemType["ALL"] = 0] = "ALL";
ItemType[ItemType["ITEM"] = 1] = "ITEM";
ItemType[ItemType["ARMOR"] = 2] = "ARMOR";
ItemType[ItemType["WEAPON"] = 3] = "WEAPON";
})(exports.ItemType || (exports.ItemType = {}));
class Game_Crafting {
constructor() {
this.initialize(...arguments);
}
initialize(...args) {
this.buildRecipesList();
}
buildRecipesList() {
this._recipes = Config.PARAMS.recipes;
}
inventory() {
return window.$gameParty.items();
}
recipe(id) {
return this._recipes[id];
}
name(id) {
return this._recipes[id].header.name;
}
icon(id) {
return this._recipes[id].header.iconIndex;
}
recipeType(id) {
return this._recipes[id].header.type;
}
conditions(id) {
return this._recipes[id].conditions;
}
condition(id, conditionId) {
return this._recipes[id].conditions[conditionId];
}
results(id) {
return this._recipes[id].results;
}
result(id, resultId) {
return this._recipes[id].results[resultId];
}
canCraft(id, ...args) {
const { conditions } = this._recipes[id];
return conditions.every((condition) => {
return this.inventory().some((item) => {
return item.id === condition.id && window.$gameParty.numItems(item) >= condition.amount;
});
});
}
isItem(id) {
return this._recipes[id].header.type === exports.ItemType.ITEM;
}
isWeapon(id) {
return this._recipes[id].header.type === exports.ItemType.WEAPON;
}
isArmor(id) {
return this._recipes[id].header.type === exports.ItemType.ARMOR;
}
onCraft(id, ...args) {
const { conditions } = this._recipes[id];
for (let i = 0; i < conditions.length; i++) {
const item = this.fetchItemData(this.condition(id, i), conditions[i].id);
window.$gameParty.loseItem(item, conditions[i].amount, false);
}
this.onResults(id);
}
onResults(id, ...args) {
const { results } = this._recipes[id];
for (let i = 0; i < results.length; i++) {
const item = this.fetchItemData(this.result(id, i), results[i].id);
window.$gameParty.gainItem(item, results[i].amount);
}
this.onEndCrafting();
}
onEndCrafting(...args) {
console.log("crafting done!");
}
fetchItemData(item, id) {
switch (item.type) {
case exports.ItemType.ITEM:
return window.$dataItems[id];
case exports.ItemType.ARMOR:
return window.$dataArmors[id];
case exports.ItemType.WEAPON:
return window.$dataArmors[id];
default:
throw new Error("no such item in the database!");
}
}
findIndexByName(name) {
let index = 0;
for (let i = 0; i < this._recipes.length; i++) {
if (this._recipes[i].header.name === name) {
index = i;
break;
}
}
return index;
}
}
Config.init();
exports.Config = Config;
exports.Game_Crafting = Game_Crafting;
return exports;
}({}));
//# sourceMappingURL=gear-crafting-core.debug.js.map
|
<filename>client/src/utils/otherMedicalConditions.ts
export const otherConditions = [
'Acquired Immune Deficiency Syndrome (AIDS)',
'Agitation of Alzheimer’s Disease',
'Arnold-Chiari Malformation and Syringomelia',
'Cachexia/Wasting Syndrome',
'Causalgia',
'Chronic Inflammatory Demyelinating Polyneuropathy',
'CRPS (Complex Regional Pain Syndromes Type II)',
'Dystonia',
'Hepatitis C',
'Hydrocephalus',
'Interstitial Cystitis',
'Lupus',
'Muscular Dystrophy',
'Myasthenia Gravis',
'Myoclonus',
'Nail-Patella Syndrome',
'Neurofibromatosis',
'Parkinson’s Disease',
'Post Traumatic Stress Disorder (PTSD)',
'Reflex Sympathetic Dystrophy',
'Residual Limb Pain',
'RSD (Complex Regional Pain Syndromes Type I)',
'Severe Fibromyalgia',
'Sjogren’s Syndrome',
'Spinal Cord Disease, including but not limited to:',
'Spinocerebellar Ataxia (SCA)',
'Terminal Illness',
'Tourette’s Syndrome',
]
export const spinalConditions = [
'rachnoiditis',
'Fibrous Dysplasia',
'Hydromyelia',
'Post-Concussion Syndrome',
'Rheumatoid Arthritis (RA)',
'Spinal Cord Injury',
'Syringomyelia',
'Tarlov Cysts',
'Traumatic Brain Injury (TBI)'
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.