text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
usage() {
echo "Usage: $0 [-u]";
exit 1
}
make_sdist() {
python setup.py sdist
}
sign_sdist() {
gpg --detach-sign -a dist/${1}-${2}.tar.gz
}
upload_sdist() {
[ -z "${3}" ] || repo="--repository-url ${3}"
twine upload ${repo} dist/${1}-${2}.tar.gz*
}
# --- Main ---
pkg=pydrac
version=`cat VERSION`
upload=false
while getopts :uh opt; do
case "$opt" in
h) usage ;;
u) upload=true ;;
\?) echo "Unknown option $OPTARG"; exit 2 ;;
:) echo "Option -$OPTARG requires an argument."; exit 2 ;;
esac
done
read -p "Build sdist for $pkg v$version ? (^C to abort)"
make_sdist
sign_sdist "$pkg" "$version"
$upload && {
read -p "Upload sdist ${pkg}-${version} ? (^C to abort)"
upload_sdist ${repo} "$pkg" "$version" "${2}"
}
echo "All done."
|
/*
* This file is part of inertia, licensed under the MIT License (MIT).
*
* Copyright (c) vectrix.space <https://vectrix.space/>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package space.vectrix.inertia.util;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import space.vectrix.inertia.util.functional.ThrowableConsumer;
import space.vectrix.inertia.util.functional.ThrowableFunction;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
/* package */ final class CustomIteratorImpl<T, E> implements CustomIterator<E> {
private final Iterator<T> backingIterator;
private final Function<T, E> mapper;
private final Consumer<E> remove;
private Predicate<E> filter = ignored -> true;
private E next;
private E current;
/* package */ CustomIteratorImpl(final @NonNull Iterator<@NonNull T> backingIterator,
final @NonNull ThrowableFunction<@NonNull T, @Nullable E, ? extends Throwable> mapper,
final @NonNull ThrowableConsumer<@NonNull E, ? extends Throwable> remove) {
this.backingIterator = backingIterator;
this.mapper = mapper;
this.remove = remove;
this.next = this.nextValue();
}
@Override
public @NonNull CustomIterator<E> with(final @NonNull Predicate<? super E> predicate) {
this.filter = this.filter.and(predicate);
this.next = this.retryValue();
return this;
}
@Override
public @NonNull CustomIterator<E> without(final @NonNull Predicate<? super E> predicate) {
this.filter = this.filter.and(predicate.negate());
this.next = this.retryValue();
return this;
}
@Override
public boolean hasNext() {
return this.next != null;
}
@Override
public @NonNull E next() {
if((this.current = this.next) == null) throw new NoSuchElementException();
this.next = this.nextValue();
return this.current;
}
@Override
public void remove() {
if(this.current == null) throw new IllegalStateException();
this.remove.accept(this.current);
this.current = null;
}
private @Nullable E nextValue() {
T next;
E value;
while(this.backingIterator.hasNext()) {
if((next = this.backingIterator.next()) != null
&& (value = this.mapper.apply(next)) != null
&& this.filter.test(value)) {
return value;
}
}
return null;
}
private @Nullable E retryValue() {
if(this.filter.test(this.next)) return this.next;
return this.nextValue();
}
}
|
// $URL: svn+ssh://mrwhat@ssh.boim.com/home/mrwhat/svn/Walker/trunk/com/boim/walker/WalkerMetric.java $
// $Id: WalkerMetric.java 402 2014-02-21 17:20:14Z mrwhat $
// Implement a QualityMetric for optimization of a Walker linkage.
// This should be abstract, but for now it may have a little
// code specific to Klann or Jansen linkages
package com.boim.walker;
import java.util.Arrays;
import com.boim.optimize.*;
public class WalkerMetric implements QualityMetric {
public WalkerLinkage _linkage;
public int _nSteps;
public double _contactThresh; // how close to ground to be "on ground"
public double _crankLen; // let crank length be fixed.
public String desc;
public double _reachWeight, _dutyCycleWeight, _clearanceDistanceWeight,
_stepHeightWeight, _lenWeight, _trackWeight, _orbitLenWeight;
// _footSpeedWeight, _flatnessWeight
public TankTrackMetric _stepMetric;
public NelderMeadSimplex _stepOptimizer;
public WalkerMetric(WalkerLinkage wl)
{
_linkage = wl;
_nSteps = 128;
_contactThresh = 4; // cm from lowest to be considered on ground
_crankLen = 17.5; // cm
// _linkage.scale(_crankLen/_linkage._AC);
_reachWeight = 2; // how much to weight each stat in metric
_dutyCycleWeight = 8;
//_footSpeedWeight = 2;
//_flatnessWeight = 3;
_stepHeightWeight = 3;
_lenWeight = 1;
_trackWeight = 3;
_orbitLenWeight = 1;
_clearanceDistanceWeight = 1;
_stepMetric = new TankTrackMetric(_contactThresh);
_stepOptimizer = new NelderMeadSimplex();
_stepOptimizer.searchForMinima();
_stepOptimizer._verbose=0; // turn off ALL disgnostic output
_stepOptimizer._startStep = 50; // very small initial search step, relative to _verySmall. Initial guess should be good
_stepOptimizer._verySmall = new double [] {0.01, 0.01, 0.01, 0.001, 0.01}; // xoffset(cm), yoffset(cm), stride-length(cm), stride-rate(cm/step), initial-step-offset(cm)
}
private class StrideStats {
public double xHi, xLo, dxMean, dxVar, dutyCycle, yVar, stepMedian, orbitLen;
public int startIdx, stopIdx; // index (modulo) where ground contact starts and stops
public double _stepPath[][];
StrideStats(double[][] G, double yMin, double contactTol) {
orbitLen = orbitLength(G);
double yThresh = yMin+contactTol; // yMin is negative... move up for ground contact threshhold
double[] dx = findDutyCycle(G, yThresh);
int nActiveSteps = stopIdx-startIdx;
if (nActiveSteps < 0) nActiveSteps += G.length;
nActiveSteps++;
//System.err.format("Step range %d..%d (%d)%n", startIdx, stopIdx, nActiveSteps);
dutyCycle = ((double)nActiveSteps) / G.length;
_stepPath = new double[nActiveSteps][];
xHi = xLo = dxMean = 0;
stepMedian = 1;
dxVar = yVar = 100;
int n=0;
dxVar = yVar = 0;
xHi = -999;
xLo = 999;
if (startIdx < 0) return; // degenerate
double sw=0; // sum of weights, for WEIGHTED stats
double yBar = 0;
int i = startIdx-1;
// do Y stats over duty cycle only
while (i != stopIdx) {
i++;
if (i >= G.length) i=0;
// weight relative to how close we are to bottom of step
double w = (yThresh-G[i][1]+0.4*contactTol)/(contactTol*1.4);
double y = G[i][1];
if (n < nActiveSteps)
_stepPath[n++] = G[i].clone();
else
System.err.println("How did n get out of range?");
if (G[i][0] < xLo) xLo = G[i][0];
if (G[i][0] > xHi) xHi = G[i][0];
yBar += w*y;
yVar += w*y*y;
sw += w;
}
if (dutyCycle < 0.4) {
yVar = 100;
return; // degenerate
}
yBar /= sw;
yVar = (yVar/sw) - (yBar*yBar);
// do dx stats for any position under the threshhold
sw=0;
for (n=i=0; i < G.length; i++) {
if (G[i][1] < yThresh) {
// weight relative to how close we are to bottom of step
double w = (yThresh-G[i][1]+0.4*contactTol)/(contactTol*1.4);
//double y = G[i][1];
//if (G[i][0] < xLo) xLo = G[i][0];
//if (G[i][0] > xHi) xHi = G[i][0];
double dxi = dx[i];
dxMean += w*dxi;
dxVar += w*dxi*dxi;
sw += w;
n++;
}
}
dxMean /= sw;
dxVar = (dxVar/sw)-(dxMean*dxMean);
// compute median step height, when NOT in ground contact
double[] yStep = new double[G.length-nActiveSteps];
i = stopIdx+1;
if (i >= G.length) i=0;
n=0;
while (i != startIdx) {
double y = G[i][1];
if (y==0) // error code
return;
yStep[n++] = y-yBar;
i++;
if (i >= G.length) i=0;
}
Arrays.sort(yStep);
stepMedian = yStep[(G.length-nActiveSteps)/2];
}
private double orbitLength(double[][] G) {
double dTot = dist(G[0],G[G.length-1]);
for (int i=1; i < G.length; i++)
dTot += dist(G[i-1],G[i]);
return dTot;
}
public double dist(double[] a, double[] b) {
if (a.length != b.length)
return -1.0;
double d = 0;
for (int i=0; i < a.length; i++) {
double di = a[i] - b[i];
d += di*di;
}
d = Math.sqrt(d);
return d;
}
private double[] findDutyCycle(double[][] G, double yThresh) {
// let's define duty cycle to part of cycle where foot is
// moving in the "forward" direction, AND is below yThresh.
int n = G.length;
double[] dx = new double[n];
dx[0] = G[1][0] - G[n-1][0]; // use both neighbors, actually 2*dx
dx[n-1] = G[0][0] - G[n-2][0];
double dxWalking = 0; // average of dx while foot is low, assume this is "walking" direction
int i;
for (i=1; i < n-1; i++) {
dx[i] = G[i+1][0] - G[i-1][0];
if (G[i][1] < yThresh) dxWalking += dx[i];
}
stopIdx = startIdx = -1;
// start in a non-walking part of cycle
for (i=0; i < n; i++) {
if (G[i][1] > yThresh) {
startIdx = i;
break;
}
}
if (i >= n) {
startIdx = stopIdx = -1; // degenerate cycle
return dx;
}
// search forward until we see a walking sample
for (i=0; i < n; i++) {
int i0 = moduloCount(i+startIdx,n);
if ( (dx[i0] * dxWalking > 0) && (G[i0][1] < yThresh) ) {
startIdx = i0;
break;
}
}
if (i >= n) {
startIdx = stopIdx = -1; // degenerate cycle
return dx;
}
// keep scanning forward until we see a non-walking sample
for (i=0; i < n; i++) {
int i0 = moduloCount(i+startIdx,n);
if ( (dx[i0] * dxWalking > 0) && (G[i0][1] < yThresh) ) {
stopIdx = i0;
} else {
break;
}
}
return dx;
}
private int moduloCount(int ii, int n) {
int i = ii;
while (i >= n) i -= n;
while (i < 0) i += n;
return i;
}
}
public double metric(double[] x)
{
setState(x);
double nonPhysicalPenalty = 1.0;//_linkage.check(); // >1 if paramaters (nearly) exceed physical limits
boolean nonPhysical = false; // true if THIS linkage found to be non-physical
double GyMin = 999;
double GyMax =-999;
double GxMin = 999;
double GxMax =-999;
double undesirableGeometry = 0;
// This was added specificly to keep Jansen CH link away from B axle.
// if there is not a similar bumping-into-other links on other linkages, just have this method
// always return 1.0
double linkClearanceDist = 99; // how far links are from bumping into each other
double[][] G = new double[_nSteps][2];
int i;
for (i=0; i < _nSteps; i++) {
double theta = (i * 2 * Math.PI) / _nSteps;
_linkage.update(theta);
double cd = _linkage.clearanceDistance();
if (cd < linkClearanceDist)
linkClearanceDist = cd; // note closest (worst) clearance
G[i] = _linkage.getPos('G');
if (G[i][1] == 0) { // non-physical
nonPhysicalPenalty *= 1.1;
if (!nonPhysical) { // check if already added
// nonPhysical = true; _linkage._badList.add(getState());
}
} else {
if (G[i][1] < GyMin) GyMin = G[i][1];
if (G[i][1] > GyMax) GyMax = G[i][1];
if (G[i][0] < GxMin) GxMin = G[i][0];
if (G[i][0] > GxMax) GxMax = G[i][0];
// Check for "undesirable" geometry at current crank angle
double de = _linkage.undesirable();
if (de > 0)
undesirableGeometry += de; //Math.pow(de,.25)*0.01;
}
}
//double stridePeak = GyMax-GyMin;
double thresh = GyMin;
if (thresh > -20) {
// for these walkers, anything higher than that
// must be from an error, that reports 0,0 foot location
thresh = -20;
}
StrideStats stat = new StrideStats(G,thresh,_contactThresh);
double trackQ = 99;
double len = 1;
double reach = 1;
if (stat.startIdx >= 0) {
// use a metric of how tank-like the stride is instead
// of the footSpeed/flatness measures
_stepMetric.setMeasurement(stat._stepPath);
double[] stepStats = _stepMetric.getState();
stepStats = _stepOptimizer.optimize(_stepMetric,stepStats);
trackQ = _stepMetric.metric(stepStats);
// try to make metric a "quality" metric, the larger the better
len = stat.xHi-stat.xLo; // stride length
//len = Math.abs(stepStats[2]); // flat part of track-model fit
// for now, default linkages are always LHP.
// so the worst reach position is: (we want this far from axis)
reach = Math.abs(stat.xHi / GyMin);
// we should try to add a metric for how simple/smooth
// the return path is, to avoid those over-fits with odd foot motion
}
double reachFactor = (reach < .01) ? 1 : Math.pow(reach*100,_reachWeight);
double dutyCycleFactor = Math.pow(stat.dutyCycle,_dutyCycleWeight);
//double footSpeedFactor = Math.pow(stat.dxVar,_footSpeedWeight);
//double flatnessFactor = Math.pow(stat.yVar,_flatnessWeight);
double stepHeightFactor = Math.pow(stat.stepMedian,_stepHeightWeight); // need to weight this heavily for Jansen, which tends toward a flat step.
double lenFactor = Math.pow(len, _lenWeight);
double trackQfactor = Math.pow(trackQ,_trackWeight);
double orbitLenFactor = Math.pow(stat.orbitLen, _orbitLenWeight); // attempt at avoiding odd, high foot motions... worked only somewhat.
double clearanceFactor = Math.pow(linkClearanceDist, _clearanceDistanceWeight);
double q = lenFactor * reachFactor;
q *= stepHeightFactor;
q *= dutyCycleFactor;
//q /= footSpeedFactor; // consistency of stride speed
//q /= flatnessFactor; // flatness of stride
q /= trackQfactor;
q /= orbitLenFactor;
q *= clearanceFactor;
// provide a metric "Description" which might be
// helpful to humans needing to interpret linkage "quality"
// is a less rigorous manner
//desc = String.format("duty=%.1f speedSD=%.1f flatSD=%.2f reach=%.1f stepHeight=%.1f len=%.1f",
// stat.dutyCycle*100,
// Math.sqrt(stat.dxVar)*_nSteps, // variance/cycle (not step)
// Math.sqrt(stat.yVar),reach*100,stat.stepMedian,len);
desc = String.format("duty=%.1f trackFit=%.2f reach=%.1f stepHeight=%.1f len=%.1f",
stat.dutyCycle*100,trackQ,reach*100,stat.stepMedian,len);
if (nonPhysicalPenalty > 1.0) {
double pp = intPow(nonPhysicalPenalty, 4);
q /= pp;
}
if (undesirableGeometry > 1.0) {
double pp = Math.pow(undesirableGeometry,.2);
q /= pp;
}
//System.out.printf("%g %f %f %f %f %f %f %f %f %f %f %f %f%n",
// q,x[0],x[1],x[2],x[3],x[4],x[5],x[6],x[7],x[8],x[9],x[10],x[11]);
return(q);
}
public static double intPow(double x, int k)
{
if (k<=0) return(1.0);
double z = x;
for (int i=1; i < k; i++) z *= x;
return z;
}
public void setState(double[] x) {
_linkage.setState(x);
}
public double[] getState() {
double[] x = _linkage.getState();
return x;
}
}
|
#!/bin/sh
#PBS -q batch
#PBS -l walltime=500:00:00
#PBS -l nodes=1:ppn=1
#PBS -l pmem=14gbs
# -- run in the current working (submission) directory --
cd $PBS_O_WORKDIR
chmod g=wx $PBS_JOBNAME
export SUBJECTS_DIR=/home/pmurphy/meg_data/surprise/MRIs/fs_converted/
IDs=( 'KSV' 'EXJ' 'TSJ' 'JTB' 'EXF' 'ECB' 'EMB' 'TFD' 'GSB' 'EXG' 'OMF' 'NIF' 'DHB' 'HBC' 'QNV' 'DCB' 'TNB' 'PDP')
xhemireg --s ${IDs[var11]} 1> /home/pmurphy/meg_data/surprise/MRIs/fs_converted/Log_files/"$PBS_JOBID"1.out 2> /home/pmurphy/meg_data/surprise/MRIs/fs_converted/Log_files/"$PBS_JOBID"1.err
surfreg --s ${IDs[var11]} --t fsaverage_sym --lh 1> /home/pmurphy/meg_data/surprise/MRIs/fs_converted/Log_files/"$PBS_JOBID"1.out 2> /home/pmurphy/meg_data/surprise/MRIs/fs_converted/Log_files/"$PBS_JOBID"1.err
surfreg --s ${IDs[var11]} --t fsaverage_sym --lh --xhemi 1> /home/pmurphy/meg_data/surprise/MRIs/fs_converted/Log_files/"$PBS_JOBID"1.out 2> /home/pmurphy/meg_data/surprise/MRIs/fs_converted/Log_files/"$PBS_JOBID"1.err
|
/**
* Launcher classes to start up new HUD windows.
*/
package io.opensphere.hud.launcher;
|
#!/bin/sh
file="../cloud-utils/environment.properties"
if [ -f "$file" ]
then
echo "$file found."
while IFS="=" read -r key value; do
case "$key" in
"opc.identity.domain") OPC_DOMAIN="$value" ;;
"dbcs.dba.password") DBA_PASS="$value" ;;
"ssh.privatekey") PK_FILE="$value" ;;
"jcs.instance.admin.user.1") WLS_USER="$value" ;;
"jcs.instance.admin.password.1") WLS_PASSWORD="$value" ;;
"jcs.instance.1") JCS_INSTANCE="$value" ;;
"dbcs.instance.1") DBCS_INSTANCE="$value" ;;
esac
done < "$file"
else
echo "$file not found."
exit
fi
echo "Identity domain: $OPC_DOMAIN"
cd ../cloud-utils
JCS_IP=`mvn -Dgoal=jcs-get-ip | awk '/Public IP address of the JCS instance: /{print $NF}'`
echo "Java Cloud Service Public IP address: $JCS_IP"
DBCS_IP=`mvn -Dgoal=dbcs-get-ip | awk '/Public IP address of the DBCS instance: /{print $NF}'`
echo "Database Cloud Service Public IP address: $DBCS_IP"
cd ../techco-app
mvn install
./init-dbcs-pdb.sh system $DBA_PASS ../$PK_FILE $DBCS_IP
echo "Database Cloud Service has been prepared."
cp etc/wlst/deploy-TechCo.py.template etc/wlst/deploy-TechCo.py
sed "s|@wls.admin.user@|$WLS_USER|g; s|@wls.admin.password@|$WLS_PASSWORD|g; s|@database.dba.pass@|$DBA_PASS|g; s|@jcs.instance@|$JCS_INSTANCE|g; s|@database.instance.1@|$DBCS_INSTANCE|g; s|@identity.domain@|$OPC_DOMAIN|g;" -i etc/wlst/deploy-TechCo.py
scp -oStrictHostKeyChecking=no -i ../$PK_FILE target/TechCo-ECommerce-1.0-SNAPSHOT.war etc/wlst/deploy-TechCo.py etc/wlst/deploy-TechCo.sh opc@$JCS_IP:/tmp
ssh -oStrictHostKeyChecking=no -i ../$PK_FILE opc@$JCS_IP "chmod 755 /tmp/deploy-TechCo.py /tmp/deploy-TechCo.sh"
ssh -t -oStrictHostKeyChecking=no -i ../$PK_FILE opc@$JCS_IP "sudo su - oracle -c /tmp/deploy-TechCo.sh"
rm etc/wlst/deploy-TechCo.py
echo "TechCo application has been deployed."
|
<gh_stars>10-100
package com.roadrover.btservice.bluetooth;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
/**
* 蓝牙设备对象 </br>
* 1) 用户调用 searchNewDevice 接口时,返回该对象 </br>
* 2) 用户调用 getPairedDevice 接口时,返回该对象列表 </br>
*/
public class BluetoothDevice implements Parcelable {
public String name = "";
public String addr = "";
protected BluetoothDevice(Parcel in) {
readFromParcel(in);
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(name);
dest.writeString(addr);
}
public void readFromParcel(Parcel source) {
if (null != source) {
name = source.readString();
addr = source.readString();
}
}
@Override
public int describeContents() {
return 0;
}
public static final Creator<BluetoothDevice> CREATOR = new Creator<BluetoothDevice>() {
@Override
public BluetoothDevice createFromParcel(Parcel in) {
return new BluetoothDevice(in);
}
@Override
public BluetoothDevice[] newArray(int size) {
return new BluetoothDevice[size];
}
};
/**
* 创建一个蓝牙设备
* @param addr
* @param name
* @return
*/
public static BluetoothDevice createDevice(String addr, String name) {
BluetoothDevice device = CREATOR.createFromParcel(null);
device.name = name;
device.addr = addr;
return device;
}
@Override
public boolean equals(Object o) {
if (null == o || !(o instanceof BluetoothDevice)) {
return false;
}
BluetoothDevice bluetoothDevice = (BluetoothDevice) o;
if (bluetoothDevice == this || TextUtils.equals(bluetoothDevice.addr, addr)) {
return true;
}
return false;
}
}
|
<gh_stars>1-10
"use strict";
/**
* Utility module for validating email addresses
*
* @module email-validator
*/
module.exports = {
messages: {
format: "Invalid email format.",
localPart: "Invalid local-part.",
hostname: "Invalid hostname.",
required: "Email address is required.",
},
/**
* # Email Validation
*
* ## Overview
*
* Returns and error object describing the validity of the input as an email address.
* - If the input represents a valid email address, the object returned should be empty.
* - If the input represents an invalid email address, the object returned should have a single attribute named `email`
* containing the message provided by this module that best describes the failure.
*
* An email is considered valid if it contains a properly formatted `local-part`,
* and a properly formatted domain (hostname) joined by an `@` symbol.
*
* The input parameter can not be null or empty, and can only contain on `@` symbol.
*
* ## Format
* The input should fail with the `format` message if
* - The input does not contain just one `@` character.
*
* ## Local-Part
* The input should fail with the `localPart` message if the `local-part`:
* - begins or ends with a dot (`.`).
* - contains single or double quotes.
* - consists of only whitespace.
*
* ## Domain
* The input should fail with the `hostname` message if the hostname part:
* - begins or ends with a hyphen.
* - begins or ends with a dot.
* - exceeds 253 characters in length.
* - consists of only whitespace.
*
* @returns {Object}
* @param {String} input
*/
validate: function (input) {
return null;
},
};
|
#include <iostream>
using namespace std;
class array {
private:
int length;
int size;
int *a;
public:
array() {
cout << "enter length: ";
cin >> length;
size = length;
a = new int[length];
cout << "enter the elements: ";
for (int i = 0; i < length; i++) {
cin >> a[i];
}
}
void display() {
cout << "Elements of the array: ";
for (int i = 0; i < length; i++) {
cout << a[i] << " ";
}
cout << endl;
}
~array() {
delete[] a;
}
};
int main() {
array arr;
arr.display();
return 0;
} |
from picode import picode
from os import listdir
from os.path import isfile, join
import os
dir = os.path.dirname(__file__)
codes_dir = os.path.join(dir, "codes")
images_dir = os.path.join(dir, "images")
if not os.path.exists(images_dir):
os.makedirs(images_dir)
code_files = [f for f in listdir(codes_dir) if isfile(join(codes_dir, f))]
for code_file in code_files:
im = picode.to_pic(file_path=codes_dir + "/" + code_file)
im.save(images_dir + "\\" + code_file.replace(".", "_") + ".png")
|
<filename>src/main/scala/Algorithms/Sorting/CountingSort.scala<gh_stars>0
package Algorithms.Sorting
/**
* Created by MikBac on 31.08.2020
*/
object CountingSort {
/**
* Generates an array [0, max]
*
* @param list : List[Int]
* @return sorted list
*/
def sort(list: List[Int]): List[Int] = {
val max = list.max
val h = Array.fill(max + 1) {
0
}
var ans: List[Int] = List()
list.foreach(numb => h(numb) = h(numb) + 1)
h.zipWithIndex.foreach((numb) => for (i <- 0 until numb._1) ans = numb._2 :: ans)
ans
}
/**
* Generates an array [min, max]
*
* @param list : List[Int]
* @return sorted list
*/
def sortSmallArray(list: List[Int]): List[Int] = {
val min = list.min
val max = list.max
val h = Array.fill(max + 1 - min) {
0
}
var ans: List[Int] = List()
list.foreach(numb => h(numb - min) = h(numb - min) + 1)
h.zipWithIndex.foreach((numb) => for (i <- 0 until numb._1) ans = numb._2 + min :: ans)
ans
}
def main(args: Array[String]): Unit = {
val list = List(3, 2, 4, 3, 2, 4, 5, 23, 23, 5, 232, 2, 100, 11, 6, 3, 4, 2)
println(sort(list))
println(sortSmallArray(list))
}
}
|
<filename>src/authoring/panels/tabbable/ConsolePanel.java
package authoring.panels.tabbable;
import authoring.Panel;
import authoring.PanelController;
import javafx.scene.control.TextArea;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import util.Console;
public class ConsolePanel implements Panel{
private static final String PANEL = "panel";
private static final String CONSOLE = "Console";
private Pane myPane;
private TextArea consoleField = new TextArea();
private Console myConsole;
public ConsolePanel() {
myPane = new Pane();
myPane.getStyleClass().add(PANEL);
consoleField.setEditable(false);
}
@Override
public Region getRegion() {
return myPane;
}
@Override
public void setController(PanelController controller) {
//TODO: Create controller
}
@Override
public String title(){
return CONSOLE;
}
} |
<filename>node_modules/react-icons-kit/ionicons/alertCircled.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.alertCircled = void 0;
var alertCircled = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M476.7,422.2L270.1,72.7c-2.9-5-8.3-8.7-14.1-8.7c-5.9,0-11.3,3.7-14.1,8.7L35.3,422.2c-2.8,5-4.8,13-1.9,17.9\r\n\t\tc2.9,4.9,8.2,7.9,14,7.9h417.1c5.8,0,11.1-3,14-7.9C481.5,435.2,479.5,427.1,476.7,422.2z M288,400h-64v-48h64V400z M288,320h-64\r\n\t\tV176h64V320z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M476.7,422.2L270.1,72.7c-2.9-5-8.3-8.7-14.1-8.7c-5.9,0-11.3,3.7-14.1,8.7L35.3,422.2c-2.8,5-4.8,13-1.9,17.9\r\n\t\tc2.9,4.9,8.2,7.9,14,7.9h417.1c5.8,0,11.1-3,14-7.9C481.5,435.2,479.5,427.1,476.7,422.2z M288,400h-64v-48h64V400z M288,320h-64\r\n\t\tV176h64V320z"
},
"children": []
}]
}]
}]
};
exports.alertCircled = alertCircled; |
#!/bin/bash
set -e
source $(dirname $0)/helpers.sh
it_can_check_from_head() {
local repo=$(init_repo)
local ref=$(make_commit $repo)
check_uri $repo | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
}
it_can_check_from_head_only_fetching_single_branch() {
local repo=$(init_repo)
local ref=$(make_commit $repo)
local cachedir="$TMPDIR/git-resource-repo-cache"
check_uri_with_branch $repo "master" | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
! git -C $cachedir rev-parse origin/bogus
}
it_fails_if_key_has_password() {
local repo=$(init_repo)
local ref=$(make_commit $repo)
local key=$TMPDIR/key-with-passphrase
ssh-keygen -f $key -N some-passphrase
local failed_output=$TMPDIR/failed-output
if check_uri_with_key $repo $key 2>$failed_output; then
echo "checking should have failed"
return 1
fi
grep "Private keys with passphrases are not supported." $failed_output
}
it_configures_forward_agent() {
local repo=$(init_repo)
local key=$TMPDIR/key-no-passphrase
ssh-keygen -f $key
check_uri_with_key_and_ssh_agent $repo $key true
grep "ForwardAgent yes" $HOME/.ssh/config
}
it_skips_forward_agent_configuration() {
local repo=$(init_repo)
local key=$TMPDIR/key-no-passphrase
ssh-keygen -f $key
check_uri_with_key_and_ssh_agent $repo $key false
! grep "ForwardAgent" $HOME/.ssh/config
}
it_can_check_with_credentials() {
local repo=$(init_repo)
local ref=$(make_commit $repo)
check_uri_with_credentials $repo "user1" "pass1" | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
# only check that the expected credential helper is set
# because it is not easily possible to simulate a git http backend that needs credentials
local expected_netrc="default login user1 password pass1"
[ "$(cat $HOME/.netrc)" = "$expected_netrc" ]
# make sure it clears out .netrc for this request without credentials
check_uri_with_credentials $repo "" "" | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
[ ! -f "$HOME/.netrc" ]
}
it_can_check_with_submodule_credentials() {
local repo=$(init_repo)
local ref=$(make_commit "$repo")
local expected_netrc
expected_netrc=$(cat <<EOF
machine host1 login user2 password pass2
default login user1 password pass1
EOF
)
check_uri_with_submodule_credentials "$repo" "user1" "pass1" "host1" "user2" "pass2" | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
echo "Generated netrc $(cat ${HOME}/.netrc)"
echo "Expected netrc $expected_netrc"
[ "$(cat $HOME/.netrc)" = "$expected_netrc" ]
check_uri_with_credentials $repo "" "" | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
[ ! -f "$HOME/.netrc" ]
}
it_clears_netrc_even_after_errors() {
local repo=$(init_repo)
local ref=$(make_commit $repo)
if check_uri_with_credentials "non_existent_repo" "user1" "pass1" ; then
exit 1
fi
local expected_netrc="default login user1 password pass1"
[ "$(cat $HOME/.netrc)" = "$expected_netrc" ]
# make sure it clears out .netrc for this request without credentials
if check_uri_with_credentials "non_existent_repo" "" "" ; then
exit 1
fi
[ ! -f "$HOME/.netrc" ]
}
it_can_check_from_a_ref() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo)
local ref3=$(make_commit $repo)
check_uri_from $repo $ref1 | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)}
]
"
}
it_can_check_from_a_ref_and_only_show_merge_commit() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo)
local branch_ref1=$(make_commit_to_file_on_branch $repo some-branch-file some-branch)
local ref3=$(make_commit $repo)
check_uri_from $repo $ref1 | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)}
]
"
local ref4=$(merge_branch $repo master some-branch)
check_uri_from $repo $ref1 | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)},
{ref: $(echo $ref4 | jq -R .)}
]
"
}
it_can_check_from_a_ref_with_paths_merged_in() {
local repo=$(init_repo)
local ref1=$(make_commit_to_file $repo some-master-file)
local ref2=$(make_commit $repo)
local branch_ref1=$(make_commit_to_file_on_branch $repo some-branch-file some-branch)
local ref3=$(make_commit_to_file $repo some-master-file)
check_uri_from_paths $repo $ref1 some-master-file some-branch-file | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)}
]
"
local ref4=$(merge_branch $repo master some-branch)
check_uri_from_paths $repo $ref1 some-master-file some-branch-file | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)},
{ref: $(echo $ref4 | jq -R .)}
]
"
}
it_can_check_from_a_first_commit_in_repo() {
local repo=$(init_repo)
local initial_ref=$(get_initial_ref $repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo)
local ref3=$(make_commit $repo)
check_uri_from $repo $initial_ref | jq -e "
. == [
{ref: $(echo $initial_ref | jq -R .)},
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)}
]
"
}
it_can_check_from_a_bogus_sha() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo)
check_uri_from $repo "bogus-ref" | jq -e "
. == [{ref: $(echo $ref2 | jq -R .)}]
"
}
it_skips_ignored_paths() {
local repo=$(init_repo)
local ref1=$(make_commit_to_file $repo file-a)
local ref2=$(make_commit_to_file $repo file-b)
local ref3=$(make_commit_to_file $repo file-c)
check_uri_ignoring $repo "file-c" | jq -e "
. == [{ref: $(echo $ref2 | jq -R .)}]
"
check_uri_from_ignoring $repo $ref1 "file-a" | jq -e "
. == [
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)}
]
"
check_uri_from_ignoring $repo $ref1 "file-c" | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)}
]
"
local ref4=$(make_commit_to_file $repo file-b)
check_uri_ignoring $repo "file-c" | jq -e "
. == [{ref: $(echo $ref4 | jq -R .)}]
"
check_uri_from_ignoring $repo $ref1 "file-c" | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref4 | jq -R .)}
]
"
}
it_checks_given_paths() {
local repo=$(init_repo)
local ref1=$(make_commit_to_file $repo file-a)
local ref2=$(make_commit_to_file $repo file-b)
local ref3=$(make_commit_to_file $repo file-c)
check_uri_paths $repo "file-c" | jq -e "
. == [{ref: $(echo $ref3 | jq -R .)}]
"
check_uri_from_paths $repo $ref1 "file-c" | jq -e "
. == [{ref: $(echo $ref3 | jq -R .)}]
"
local ref4=$(make_commit_to_file $repo file-b)
check_uri_paths $repo "file-c" | jq -e "
. == [{ref: $(echo $ref3 | jq -R .)}]
"
local ref5=$(make_commit_to_file $repo file-c)
check_uri_from_paths $repo $ref1 "file-c" | jq -e "
. == [
{ref: $(echo $ref3 | jq -R .)},
{ref: $(echo $ref5 | jq -R .)}
]
"
}
it_checks_given_paths_ci_skip_disabled() {
local repo=$(init_repo)
local ref1=$(make_commit_to_file $repo file-a)
local ref2=$(make_commit_to_file $repo file-a)
local ref3=$(make_commit_to_file $repo file-a)
check_uri_from_paths_disable_ci_skip $repo $ref1 "file-a" | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)}
]
"
}
it_checks_given_paths_on_branch() {
local repo=$(init_repo)
local ref1=$(make_commit_to_file_on_branch_with_path $repo dummy file-b master)
echo $ref1
local ref2=$(make_commit_to_file_on_branch_with_path $repo dummy file-b master)
echo $ref2
local ref3=$(make_commit_to_file_on_branch_with_path $repo dummy file-b newbranch)
echo $ref3
local result=$(check_uri_from_paths_with_branch $repo newbranch "dummy/*")
echo result
check_uri_from_paths_with_branch $repo newbranch "dummy/*"| jq -e "
. == [{ref: $(echo $ref3 | jq -R .)}]
"
}
it_checks_given_glob_paths() { # issue gh-120
local repo=$(init_repo)
mkdir -p $repo/a/b
make_commit_to_file $repo a/file > /dev/null
local ref1=$(make_commit_to_file $repo a/b/file)
check_uri_paths $repo "**/file" | jq -e "
. == [{ref: $(echo $ref1 | jq -R .)}]
"
}
it_checks_given_ignored_paths() {
local repo=$(init_repo)
local ref1=$(make_commit_to_file $repo file-a)
local ref2=$(make_commit_to_file $repo file-b)
local ref3=$(make_commit_to_file $repo some-file)
check_uri_paths_ignoring $repo 'file-*' 'file-b' | jq -e "
. == [{ref: $(echo $ref1 | jq -R .)}]
"
check_uri_from_paths_ignoring $repo $ref1 'file-*' 'file-b' | jq -e "
. == [{ref: $(echo $ref1 | jq -R .)}]
"
check_uri_from_paths_ignoring $repo $ref1 'file-*' 'file-a' | jq -e "
. == [{ref: $(echo $ref2 | jq -R .)}]
"
local ref4=$(make_commit_to_file $repo file-b)
check_uri_paths_ignoring $repo 'file-*' 'file-b' | jq -e "
. == [{ref: $(echo $ref1 | jq -R .)}]
"
local ref5=$(make_commit_to_file $repo file-a)
check_uri_paths_ignoring $repo 'file-*' 'file-b' | jq -e "
. == [{ref: $(echo $ref5 | jq -R .)}]
"
local ref6=$(make_commit_to_file $repo file-c)
local ref7=$(make_commit_to_file $repo some-file)
check_uri_from_paths_ignoring $repo $ref1 'file-*' 'file-b' | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref5 | jq -R .)},
{ref: $(echo $ref6 | jq -R .)}
]
"
check_uri_from_paths_ignoring $repo $ref1 'file-*' 'file-b' 'file-c' | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref5 | jq -R .)}
]
"
local ref8=$(make_commit_to_file $repo another-file)
check_uri_paths_ignoring $repo '*-file' 'another-file' | jq -e "
. == [
{ref: $(echo $ref7 | jq -R .)}
]
"
check_uri_paths_ignoring $repo '.' 'file-*' | jq -e "
. == [
{ref: $(echo $ref8 | jq -R .)}
]
"
}
it_can_check_when_not_ff() {
local repo=$(init_repo)
local other_repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo)
local ref3=$(make_commit $other_repo)
check_uri $other_repo
cd "$TMPDIR/git-resource-repo-cache"
# do this so we get into a situation that git can't resolve by rebasing
git config branch.autosetuprebase never
# set my remote to be the other git repo
git remote remove origin
git remote add origin $repo/.git
# fetch so we have master available to track
git fetch
# setup tracking for my branch
git branch -u origin/master HEAD
check_uri $other_repo | jq -e "
. == [{ref: $(echo $ref2 | jq -R .)}]
"
}
it_skips_marked_commits() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit_to_be_skipped $repo)
local ref3=$(make_commit $repo "not ci skipped")
local ref4=$(make_commit_to_be_skipped2 $repo)
local ref5=$(make_commit $repo)
check_uri_from $repo $ref1 | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)},
{ref: $(echo $ref5 | jq -R .)}
]
"
}
it_skips_marked_commits_with_no_version() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit_to_be_skipped $repo)
local ref3=$(make_commit_to_be_skipped2 $repo)
check_uri $repo | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)}
]
"
}
it_skips_excluded_commits() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo "not skipped")
local ref3=$(make_commit $repo "should skip this commit")
local ref4=$(make_commit $repo)
check_uri_with_filter $repo $ref1 "exclude" "should skip" | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref4 | jq -R .)}
]
"
}
it_skips_non_included_commits() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo "not skipped commit")
local ref3=$(make_commit $repo "should skip this commit")
local ref4=$(make_commit $repo)
check_uri_with_filter $repo $ref1 "include" "not skipped" | jq -e "
. == [
{ref: $(echo $ref2 | jq -R .)}
]
"
}
it_skips_excluded_commits_conventional() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo "chore: update a thing")
local ref3=$(make_commit $repo "chore(release): auto-publish")
local ref4=$(make_commit $repo "fix: a bug")
local ref5=$(make_commit $repo)
check_uri_with_filter $repo $ref1 "exclude" "chore(release):" | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref4 | jq -R .)},
{ref: $(echo $ref5 | jq -R .)}
]
"
}
it_skips_non_included_and_excluded_commits() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit $repo "not skipped commit")
local ref3=$(make_commit $repo "not skipped sometimes")
local ref4=$(make_commit $repo)
check_uri_with_filters $repo $ref1 "not skipped" "sometimes" | jq -e "
. == [
{ref: $(echo $ref2 | jq -R .)}
]
"
}
it_does_not_skip_marked_commits_when_disable_skip_configured() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_commit_to_be_skipped $repo)
local ref3=$(make_commit $repo)
local ref4=$(make_commit_to_be_skipped2 $repo)
check_uri_disable_ci_skip $repo $ref1 | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)},
{ref: $(echo $ref3 | jq -R .)},
{ref: $(echo $ref4 | jq -R .)}
]
"
}
it_can_check_empty_commits() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_empty_commit $repo)
check_uri_from $repo $ref1 | jq -e "
. == [
{ref: $(echo $ref1 | jq -R .)},
{ref: $(echo $ref2 | jq -R .)}
]
"
}
it_can_check_from_head_with_empty_commits() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_empty_commit $repo)
check_uri $repo | jq -e "
. == [{ref: $(echo $ref2 | jq -R .)}]
"
}
it_can_check_with_tag_filter() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_annotated_tag $repo "1.0-staging" "tag 1")
local ref3=$(make_commit $repo)
local ref4=$(make_annotated_tag $repo "1.0-production" "tag 2")
local ref5=$(make_annotated_tag $repo "2.0-staging" "tag 3")
local ref6=$(make_commit $repo)
local ref7=$(make_annotated_tag $repo "2.0-staging" "tag 5")
local ref8=$(make_commit $repo)
local ref9=$(make_annotated_tag $repo "2.0-production" "tag 4")
local ref10=$(make_commit $repo)
check_uri_with_tag_filter $repo "*-staging" | jq -e "
. == [{ref: \"2.0-staging\", commit: \"$ref6\"}]
"
}
it_can_check_with_tag_filter_with_cursor() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_annotated_tag $repo "1.0-staging" "a tag")
local ref3=$(make_commit $repo)
local ref4=$(make_annotated_tag $repo "1.0-production" "another tag")
local ref5=$(make_commit $repo)
local ref6=$(make_annotated_tag $repo "2.0-staging" "tag 3")
local ref7=$(make_commit $repo)
local ref8=$(make_annotated_tag $repo "2.0-production" "tag 4")
local ref9=$(make_commit $repo)
local ref10=$(make_annotated_tag $repo "3.0-staging" "tag 5")
local ref11=$(make_commit $repo)
local ref12=$(make_annotated_tag $repo "3.0-production" "tag 6")
local ref13=$(make_commit $repo)
x=$(check_uri_with_tag_filter_from $repo "*-staging" "2.0-staging")
check_uri_with_tag_filter_from $repo "*-staging" "2.0-staging" | jq -e "
. == [{ref: \"2.0-staging\", commit: \"$ref5\"}, {ref: \"3.0-staging\", commit: \"$ref9\"}]
"
}
it_can_check_with_tag_filter_over_all_branches() {
local repo=$(init_repo)
local ref1=$(make_commit_to_branch $repo branch-a)
local ref2=$(make_annotated_tag $repo "1.0-staging" "a tag")
local ref3=$(make_commit_to_branch $repo branch-a)
local ref4=$(make_annotated_tag $repo "1.0-production" "another tag")
local ref5=$(make_commit_to_branch $repo branch-a)
local ref6=$(make_annotated_tag $repo "2.0-staging" "tag 3")
local ref7=$(make_commit_to_branch $repo branch-a)
local ref8=$(make_annotated_tag $repo "2.0-production" "tag 4")
local ref9=$(make_commit_to_branch $repo branch-a)
local ref10=$(make_annotated_tag $repo "3.0-staging" "tag 5")
local ref11=$(make_commit_to_branch $repo branch-a)
local ref12=$(make_annotated_tag $repo "3.0-production" "tag 6")
local ref13=$(make_commit_to_branch $repo branch-a)
check_uri_with_tag_filter $repo "*-staging" | jq -e "
. == [{ref: \"3.0-staging\", commit: \"$ref9\"}]
"
}
it_can_check_with_tag_filter_over_all_branches_with_cursor() {
local repo=$(init_repo)
local ref1=$(make_commit_to_branch $repo branch-a)
local ref2=$(make_annotated_tag $repo "1.0-staging" "a tag")
local ref3=$(make_commit_to_branch $repo branch-a)
local ref4=$(make_annotated_tag $repo "1.0-production" "another tag")
local ref5=$(make_annotated_tag $repo "2.0-staging" "tag 3")
local ref6=$(make_commit_to_branch $repo branch-a)
local ref7=$(make_annotated_tag $repo "2.0-staging" "tag 3")
local ref8=$(make_commit_to_branch $repo branch-a)
local ref9=$(make_annotated_tag $repo "2.0-production" "tag 4")
local ref10=$(make_commit_to_branch $repo branch-a)
local ref11=$(make_annotated_tag $repo "3.0-staging" "tag 5")
local ref12=$(make_commit_to_branch $repo branch-a)
local ref13=$(make_annotated_tag $repo "3.0-production" "tag 6")
local ref14=$(make_commit_to_branch $repo branch-a)
check_uri_with_tag_filter_from $repo "*-staging" "2.0-staging" | jq -e "
. == [{ref: \"2.0-staging\", commit: \"$ref6\"}, {ref: \"3.0-staging\", commit: \"$ref10\"}]
"
}
it_can_check_with_tag_filter_with_bogus_ref() {
local repo=$(init_repo)
local ref1=$(make_commit $repo)
local ref2=$(make_annotated_tag $repo "1.0-staging" "tag 1")
local ref3=$(make_commit $repo)
local ref4=$(make_annotated_tag $repo "1.0-production" "tag 2")
local ref5=$(make_commit $repo)
local ref6=$(make_annotated_tag $repo "2.0-staging" "tag 3")
local ref7=$(make_commit $repo)
local ref8=$(make_annotated_tag $repo "2.0-production" "tag 4")
local ref9=$(make_commit $repo)
check_uri_with_tag_filter_from $repo "*-staging" "bogus-ref" | jq -e "
. == [{ref: \"2.0-staging\", commit: \"$ref5\"}]
"
}
it_can_check_with_tag_filter_with_replaced_tags() {
local repo=$(init_repo)
local ref1=$(make_commit_to_branch $repo branch-a)
local ref2=$(make_annotated_tag $repo "staging" "tag branch-a")
# see that the tag is initially ref1
check_uri_with_tag_filter $repo "staging" | jq -e "
. == [{ref: \"staging\", commit: \"$ref1\"}]
"
local ref3=$(make_commit_to_branch $repo branch-a)
local ref4=$(make_annotated_tag $repo "staging" "tag branch-a")
check_uri_with_tag_filter $repo "staging" | jq -e "
. == [{ref: \"staging\", commit: \"$ref3\"}]
"
}
it_can_check_with_tag_filter_given_branch_first_ref() {
local repo=$(init_repo)
local ref1=$(make_commit_to_branch $repo branch-a)
local ref2=$(make_annotated_tag $repo "test.tag.1" "tag branch-a")
# see that the tag on non-master branch doesn't get picked up
check_uri_with_tag_filter_given_branch $repo "test.tag.*" "master" | jq -e "
. == []
"
# make a new tag on master, ensure it gets picked up
local ref3=$(make_commit_to_branch $repo master)
local ref4=$(make_annotated_tag $repo "test.tag.2" "tag branch-a")
check_uri_with_tag_filter_given_branch $repo "test.tag.*" "master" | jq -e "
. == [{ref: \"test.tag.2\", commit: \"$ref3\"}]
"
}
it_can_check_and_set_git_config() {
local repo=$(init_repo)
local ref=$(make_commit $repo)
cp ~/.gitconfig ~/.gitconfig.orig
check_uri_with_config $repo | jq -e "
. == [{ref: $(echo $ref | jq -R .)}]
"
test "$(git config --global core.pager)" == 'true'
test "$(git config --global credential.helper)" == '!true long command with variables $@'
mv ~/.gitconfig.orig ~/.gitconfig
}
run it_can_check_from_head
run it_can_check_from_a_ref
run it_can_check_from_a_first_commit_in_repo
run it_can_check_from_a_bogus_sha
run it_skips_ignored_paths
run it_checks_given_paths
run it_checks_given_paths_ci_skip_disabled
run it_checks_given_paths_on_branch
run it_checks_given_glob_paths
run it_checks_given_ignored_paths
run it_can_check_when_not_ff
run it_skips_marked_commits
run it_skips_marked_commits_with_no_version
run it_skips_excluded_commits
run it_skips_excluded_commits_conventional
run it_skips_non_included_commits
run it_skips_non_included_and_excluded_commits
run it_does_not_skip_marked_commits_when_disable_skip_configured
run it_fails_if_key_has_password
run it_configures_forward_agent
run it_skips_forward_agent_configuration
run it_can_check_with_credentials
run it_can_check_with_submodule_credentials
run it_clears_netrc_even_after_errors
run it_can_check_empty_commits
run it_can_check_with_tag_filter
run it_can_check_with_tag_filter_with_cursor
run it_can_check_with_tag_filter_over_all_branches
run it_can_check_with_tag_filter_over_all_branches_with_cursor
run it_can_check_with_tag_filter_with_bogus_ref
run it_can_check_with_tag_filter_with_replaced_tags
run it_can_check_from_head_only_fetching_single_branch
run it_can_check_and_set_git_config
run it_can_check_from_a_ref_and_only_show_merge_commit
run it_can_check_from_a_ref_with_paths_merged_in
run it_can_check_with_tag_filter_given_branch_first_ref
|
#
# DIST.sh
#
# David Janes
# IOTDB
# 2016-10-30
#
exit 0
PACKAGE=homestar-onvif
DIST_ROOT=/var/tmp/.dist.$$
if [ ! -d "$DIST_ROOT" ]
then
mkdir "$DIST_ROOT"
fi
echo "=================="
echo "NPM Packge: $PACKAGE"
echo "=================="
(
NPM_DST="$DIST_ROOT/$PACKAGE"
echo "NPM_DST=$NPM_DST"
if [ -d ${NPM_DST} ]
then
rm -rf "${NPM_DST}"
fi
mkdir "${NPM_DST}" || exit 1
update-package --increment-version --package "$PACKAGE" --homestar || exit 1
tar cf - \
--exclude "xx*" \
--exclude "yy*" \
--exclude "node_modules" \
README.md LICENSE \
homestar.json package.json \
ONVIFBridge.js index.js \
models/*/*.js models/*/*.json \
|
( cd "${NPM_DST}" && tar xvf - && npm publish ) || exit 1
git commit -m "new release" package.json || exit 1
git push || exit 1
echo "end"
)
|
<reponame>tbo72787/bamazon
var mysql = require("mysql");
var inquirer = require("inquirer");
var idNum = 0;
var quantity = 0;
var newQuantity = 0;
var moneySpent = 0;
var connection = mysql.createConnection({
host: "localhost",
// Your port; if not 3306
port: 3306,
// Your username
user: "root",
// Your password
password: "<PASSWORD>",
database: "bamazon"
});
connection.connect(function(err) {
if (err) throw err;
console.log("connected as id " + connection.threadId + "\n");
readProducts();
});
function readProducts() {
console.log("Selecting all products...\n");
connection.query("SELECT * FROM products", function(err, results) {
if (err) throw err;
for (var i = 0; i < results.length; i++) {
var item_id = results[i].item_id;
var product_name = results[i].product_name;
var department = results[i].department;
var price = results[i].price;
var stock_quantity = results[i].stock_quantity;
console.log("--------------------");
console.log("ID: " + item_id);
console.log("Product: " + product_name);
console.log("Department: " + department);
console.log("Price: " + price);
console.log("Quantity: " + stock_quantity);
}
console.log("--------------------");
// connection.end();
bamazonBuy();
});
}
function bamazonBuy() {
inquirer.prompt([
{
type: "input",
name: "idNum",
message: "Please enter the ID number of the item you wish to purchase"
},
{
type: "input",
name: "quantity",
message: "Please enter the quantity of items you would like to purchase"
}
])
.then(function(product) {
connection.query("SELECT * FROM products", function (err, result) {
if (err) throw err;
for(var i = 0; i < result.length; i++) {
idNum = parseInt(product.idNum);
quantity = parseInt(product.quantity);
newQuantity = result[i].stock_quantity - quantity;
moneySpent = quantity * result[i].price;
if(idNum === result[i].item_id) {
console.log("Checking quantity...");
if(quantity <= result[i].stock_quantity) {
console.log("Thank you for your purchase!");
return stockDrop();
}
else {
console.log("Sorry, we don't have enough. Try again.");
return bamazonBuy();
}
}
}
console.log("Sorry, but that ID doesn't exist. Try again.")
bamazonBuy();
})
})
}
function stockDrop() {
connection.query(
"UPDATE products SET ? WHERE ?",
[
{
stock_quantity: newQuantity
},
{
item_id: idNum
}
],
function(err, res) {
if (err) throw err;
console.log("You spent $" + moneySpent);
}
)
connection.query(
"SELECT * FROM products WHERE ?",
{item_id : idNum},
function (err, result) {
if (err) throw err;
console.log("There are " + result[0].stock_quantity + " of this item left in stock. Have a great day!")
connection.end();
})} |
#include <bits/stdc++.h>
#include <cctype>
using namespace std;
class Solution
{
public:
string toLowerCase(string s)
{
transform(s.begin(), s.end(), s.begin(), ::tolower);
return s;
}
};
int main()
{
return 0;
} |
#!/bin/bash
TOP="/path/to/source"
DEST="/path/to/destination"
SIDECAR="/path/to/sidecar"
# Copy env-init.sh to DEST directory
cp "${TOP}/env-init.sh" "${DEST}/" || { echo "Failed to install env-init.sh"; exit 1; }
# Copy sidecar.sh to SIDECAR directory
cp "${TOP}/sidecar.sh" "${SIDECAR}/" || { echo "Failed to install sidecar.sh"; exit 1; }
# Update the 'latest' symbolic link to point to the new build directory
rm -f "${SIDECAR}/builds/latest"
ln -s "${DEST}" "${SIDECAR}/builds/latest"
# If missing, create a top-level symbolic link to the latest data directory
if [[ ! -d "${SIDECAR}/data" ]]; then
ln -s "${SIDECAR}/builds/latest/data" "${SIDECAR}/data"
fi |
#!/bin/tcsh #!/bin/tcsh
#PBS -A NTDD0005
#PBS -N testb
#PBS -q regular
#PBS -l walltime=12:00:00
#PBS -j oe
#PBS -M apinard@ucar.edu
#PBS -l select=1:ncpus=1
module load conda
conda activate ldcpy_env
setenv TMPDIR /glade/scratch/$USER/temp
mkdir -p $TMPDIR
python ./compute_batch.py -o '/glade/scratch/apinard/3D/CLOUD_calcs.csv' -j './batch_scripts/3d_dssim_scripts/CLOUD.json' -ts 270 -tt 285 -v -ld
|
#import "SBHUDView.h"
@interface SBHUDController : NSObject
+ (SBHUDController *)sharedHUDController;
- (void)presentHUDView:(SBHUDView *)hud autoDismissWithDelay:(double)delay;
@end
|
<reponame>jing-si/plant
package kr.co.gardener.admin.service.user;
import java.util.List;
import kr.co.gardener.admin.model.user.Bookmark;
import kr.co.gardener.admin.model.user.list.BookmarkList;
import kr.co.gardener.util.Pager;
public interface BookmarkService {
List<Bookmark> list();
void add(Bookmark item);
Bookmark item(int bookmarkId);
void update(Bookmark item);
void delete(int bookmarkId);
BookmarkList list_pager(Pager pager);
void insert_list(BookmarkList list);
void delete_list(BookmarkList list);
void update_list(BookmarkList list);
void delete(Bookmark item);
List<Bookmark> list_date(String userId);
List<Bookmark> list_name(String userId);
}
|
#!/bin/sh
<<<<<<< HEAD
python3 main.py
=======
python3 main.py
>>>>>>> b83d194... Installations for Windows and Linux (issue #3)+ updating README.md (issue #23)
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { rgbToHex } from './rgb_to_hex';
describe('rgbToHex ', () => {
describe('validation', () => {
it('should return an empty string for malformed input', () => {
expect(rgbToHex('fred')).toEqual('');
expect(rgbToHex('rgb(fred')).toEqual('');
expect(rgbToHex('rgb(fred, bob, banana')).toEqual('');
expect(rgbToHex('rgb(0, 3, 5')).toEqual('');
expect(rgbToHex('rgba(0, 3, 5')).toEqual('');
expect(rgbToHex('rgba(0, 3, 5, 99)')).toEqual('');
});
});
describe('rgb()', () => {
it('should handle rgb() without whitespace', () => {
expect(rgbToHex('rgb(12,34,56)')).toEqual('#0c2238');
});
it('should handle rgb() with whitespace', () => {
expect(rgbToHex('rgb ( 12 , 34 , 56 )')).toEqual('#0c2238');
});
});
describe('rgba()', () => {
it('should handle no whitespace', () => {
expect(rgbToHex('rgba(12,34,56,0.4)')).toEqual('#0c2238');
});
it('should handle whitespace', () => {
expect(rgbToHex('rgba ( 12 , 34 , 56 , 0.4 )')).toEqual('#0c2238');
});
it('should handle integer maximum alpha', () => {
expect(rgbToHex('rgba(12,34,56,1)')).toEqual('#0c2238');
});
it('should handle decimal maximum alpha', () => {
expect(rgbToHex('rgba(12,34,56,1.00000)')).toEqual('#0c2238');
});
it('should handle integer zero alpha', () => {
expect(rgbToHex('rgba(12,34,56,0)')).toEqual('#0c2238');
});
it('should handle decimal zero alpha', () => {
expect(rgbToHex('rgba(12,34,56,0.0000)')).toEqual('#0c2238');
});
});
});
|
import React from "react";
import TweetHeader from "./tweet-header";
import TweetInfo from "./tweet-info";
const TweetContext = React.createContext();
export const useTweet = () => React.useContext(TweetContext);
export default function Tweet({ children, data }) {
return (
<div className="tweet">
<blockquote>
<TweetHeader tweet={data} />
<TweetContext.Provider value={data}>{children}</TweetContext.Provider>
<TweetInfo tweet={data} />
</blockquote>
<style jsx>{`
.tweet {
color: var(--tweet-font-color);
font: var(--tweet-font);
overflow: hidden;
background: var(--tweet-bg-color);
border: var(--tweet-border);
border-radius: 5px;
margin: var(--container-margin);
}
@media (any-hover: hover) {
.tweet:hover {
border: var(--tweet-border-hover);
}
}
blockquote {
position: relative;
padding: 1.25rem 1.25rem 0.625rem 1.25rem;
}
`}</style>
<style jsx global>{`
.tweet :global(.icon) {
display: inline-block;
height: 1.25em;
vertical-align: text-bottom;
background-size: contain;
background-repeat: no-repeat;
}
`}</style>
</div>
);
}
|
#!/bin/bash
#------------------------------------------------------------------------------
# [Syspixel] Check ELK Methods
# Nagios check for ELK API
#
# Dependencies: https://stedolan.github.io/jq/
#
#
#------------------------------------------------------------------------------
VERSION=0.2
# Exit codes
STATE_OK=0
STATE_WARNING=1
STATE_CRITICAL=2
STATE_UNKNOWN=3
# Print help
_usage() {
echo "Usage: check_cms_services.sh [-h help] -H <host> [-P port] -m <method> -w <warning> -c <critical> [-p path]
-h Print this help message
-H Host where is ELK API
-m HTTP's method (GET, POST...)
-P Port where ELK listen
-p Path for GET method
-w Warning value
-c Critical value
Exit status:
0 if OK
1 if minor problems (e.g., cannot create a temp file)
2 if serious trouble (e.g., cannot access command-line argument)
3 unknown"
}
# Arguments
while getopts "h:P:p:H:m:w:c:" opt; do
case $opt in
h)
_usage
exit $STATE_OK
;;
H)
HOST=$OPTARG
;;
m)
METHOD=$OPTARG
;;
p)
GET_PATH=$OPTARG
;;
c)
CRITICAL=$OPTARG
;;
w)
WARNING=$OPTARG
;;
P)
if [ ! -z "$OPTARG" ]; then
PORT=$OPTARG
fi
;;
\?)
echo "Invalid option: -$OPTARG" >&2
_usage
exit $STATE_CRITICAL
;;
:)
echo "Requiere an argument: -$OPTARG" >&2
_usage
exit $STATE_CRITICAL
;;
esac
done
# Save temp value for counters
_persistanceValue() {
if [ "$METHOD" == "POST" ]; then
if [ ! -f /tmp/${HOST}_${METHOD} ]; then
echo $VALUE > /tmp/${HOST}_${METHOD}
fi
else
if [ ! -f /tmp/${HOST}_${METHOD}_${CLEAN_PATH} ]; then
echo $VALUE > /tmp/${HOST}_${METHOD}_${CLEAN_PATH}
fi
fi
}
# Less than zero
_lessZero() {
if [ "$METHOD" == "POST" ]; then
OPERATION=$(expr $VALUE - $(</tmp/${HOST}_${METHOD}))
else
OPERATION=$(expr $VALUE - $(</tmp/${HOST}_${METHOD}_${CLEAN_PATH}))
fi
if [ $OPERATION -lt 0 ]; then
OPERATION=0
echo $OPERATION
else
echo $OPERATION
fi
}
# Calculate the result
_returnValue() {
_persistanceValue
VALUE_TEMP=$(_lessZero)
if [ "$METHOD" == "POST" ]; then
echo $VALUE > /tmp/${HOST}_${METHOD}
else
echo $VALUE > /tmp/${HOST}_${METHOD}_${CLEAN_PATH}
fi
echo $VALUE_TEMP
}
_main(){
if [ "$RESULT" -ge "$CRITICAL" ]; then
echo "METHOD $METHOD CRITICAL - $RESULT $METHOD $PERF_DATA"
exit $STATE_CRITICAL
elif [ "$RESULT" -ge "$WARNING" ]; then
echo "METHOD $METHOD WARNING - $RESULT $METHOD $PERF_DATA"
exit $STATE_WARNING
else
echo "METHOD $METHOD OK - $RESULT $METHOD $PERF_DATA"
exit $STATE_OK
fi
}
# Check empty arguments
if [[ -z $HOST || -z $METHOD || -z $WARNING || -z $CRITICAL ]]; then
echo "Empty obligatory arguments"
_usage
exit $STATE_WARNING
elif [ -z $PORT ]; then
PORT=9200
fi
# Check if jq is installed
if [ ! $(which jq) ]; then
echo "jq isn't installed. Please install it"
exit $STATE_CRITICAL
fi
# Clean path for GET methods
if [ ! -z "$GET_PATH" ]; then
CLEAN_PATH=$(echo ${GET_PATH##*/})
fi
# Vars
if [ "$METHOD" == "POST" ]; then
JSON=$(curl -sN -X$METHOD http://${HOST}:${PORT}/_search -d "{\"query\":{\"bool\":{\"must\":[{\"match_phrase\":{\"method\":{\"query\":\"$METHOD\"}}},{\"match_phrase\":{\"http.request.headers.content-type\":{\"query\":\"application/json\"}}},{\"match_phrase\":{\"port\":{\"query\":\"8181\"}}}]}}}")
else
JSON=$(curl -sN -X$METHOD http://${HOST}:${PORT}/_search -d "{\"query\":{\"bool\":{\"must\":[{\"match_phrase\":{\"method\":{\"query\":\"$METHOD\"}}},{\"match_phrase\":{\"path\":{\"query\":\"$GET_PATH\"}}}]}}}")
fi
VALUE=$(echo $JSON | jq '.hits.total')
RESULT=$(_returnValue)
DESCRIPTION="requests"
PERF_DATA="$DESCRIPTION| $METHOD $DESCRIPTION=$RESULT;$WARNING;$CRITICAL;0"
# Main #####################################################
if [ -z "$JSON" ]; then
exit $STATE_CRITICAL
else
_main
fi
|
<filename>cmd/dialect-import/main.go<gh_stars>10-100
package main
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"text/template"
"gopkg.in/alecthomas/kingpin.v2"
)
var (
reMsgName = regexp.MustCompile("^[A-Z0-9_]+$")
reTypeIsArray = regexp.MustCompile(`^(.+?)\[([0-9]+)\]$`)
)
var tplDialect = template.Must(template.New("").Parse(
`//autogenerated:yes
//nolint:golint,misspell,govet,lll
{{- if .Comment }}
// {{ .Comment }}
{{- end }}
package {{ .PkgName }}
import (
{{- if .Enums }}
"errors"
"strconv"
{{- end }}
"github.com/aler9/gomavlib/pkg/msg"
"github.com/aler9/gomavlib/pkg/dialect"
)
// Dialect contains the dialect object that can be passed to the library.
var Dialect = dial
// dialect is not exposed directly such that it is not displayed in godoc.
var dial = &dialect.Dialect{ {{.Version}}, []msg.Message{
{{- range .Defs }}
// {{ .Name }}
{{- range .Messages }}
&Message{{ .Name }}{},
{{- end }}
{{- end }}
} }
{{ range .Enums }}
// {{ .Description }}
type {{ .Name }} int
const (
{{- $pn := .Name }}
{{- range .Values }}
// {{ .Description }}
{{ .Name }} {{ $pn }} = {{ .Value }}
{{- end }}
)
// MarshalText implements the encoding.TextMarshaler interface.
func (e {{ .Name }}) MarshalText() ([]byte, error) {
switch e { //nolint:gocritic
{{- range .Values }}
case {{ .Name }}:
return []byte("{{ .Name }}"), nil
{{- end }}
}
return nil, errors.New("invalid value")
}
// UnmarshalText implements the encoding.TextUnmarshaler interface.
func (e *{{ .Name }}) UnmarshalText(text []byte) error {
switch string(text) { //nolint:gocritic
{{- range .Values }}
case "{{ .Name }}":
*e = {{ .Name }}
return nil
{{- end }}
}
return errors.New("invalid value")
}
// String implements the fmt.Stringer interface.
func (e {{ .Name }}) String() string {
byts, err := e.MarshalText()
if err == nil {
return string(byts)
}
return strconv.FormatInt(int64(e), 10)
}
{{ end }}
{{ range .Defs }}
// {{ .Name }}
{{ range .Messages }}
// {{ .Description }}
type Message{{ .Name }} struct {
{{- range .Fields }}
// {{ .Description }}
{{ .Line }}
{{- end }}
}
// GetID implements the msg.Message interface.
func (*Message{{ .Name }}) GetID() uint32 {
return {{ .ID }}
}
{{ end }}
{{- end }}
`))
var dialectTypeToGo = map[string]string{
"double": "float64",
"uint64_t": "uint64",
"int64_t": "int64",
"float": "float32",
"uint32_t": "uint32",
"int32_t": "int32",
"uint16_t": "uint16",
"int16_t": "int16",
"uint8_t": "uint8",
"int8_t": "int8",
"char": "string",
}
func dialectFieldGoToDef(in string) string {
re := regexp.MustCompile("([A-Z])")
in = re.ReplaceAllString(in, "_${1}")
return strings.ToLower(in[1:])
}
func dialectFieldDefToGo(in string) string {
return dialectMsgDefToGo(in)
}
func dialectMsgDefToGo(in string) string {
re := regexp.MustCompile("_[a-z]")
in = strings.ToLower(in)
in = re.ReplaceAllStringFunc(in, func(match string) string {
return strings.ToUpper(match[1:2])
})
return strings.ToUpper(in[:1]) + in[1:]
}
func filterDesc(in string) string {
return strings.ReplaceAll(in, "\n", "")
}
type outEnumValue struct {
Value string
Name string
Description string
}
type outEnum struct {
Name string
Description string
Values []*outEnumValue
}
type outField struct {
Description string
Line string
}
type outMessage struct {
Name string
Description string
ID int
Fields []*outField
}
type outDefinition struct {
Name string
Enums []*outEnum
Messages []*outMessage
}
func definitionProcess(
version *string,
defsProcessed map[string]struct{},
isRemote bool,
defAddr string) ([]*outDefinition, error) {
// skip already processed
if _, ok := defsProcessed[defAddr]; ok {
return nil, nil
}
defsProcessed[defAddr] = struct{}{}
fmt.Fprintf(os.Stderr, "definition %s\n", defAddr)
content, err := definitionGet(isRemote, defAddr)
if err != nil {
return nil, err
}
def, err := definitionDecode(content)
if err != nil {
return nil, fmt.Errorf("unable to decode: %s", err)
}
addrPath, addrName := filepath.Split(defAddr)
var outDefs []*outDefinition
// version
if def.Version != "" {
if *version != "" && *version != def.Version {
return nil, fmt.Errorf("version defined twice (%s and %s)", def.Version, *version)
}
*version = def.Version
}
// includes
for _, inc := range def.Includes {
// prepend url to remote address
if isRemote {
inc = addrPath + inc
}
subDefs, err := definitionProcess(version, defsProcessed, isRemote, inc)
if err != nil {
return nil, err
}
outDefs = append(outDefs, subDefs...)
}
outDef := &outDefinition{
Name: addrName,
}
// enums
for _, enum := range def.Enums {
oute := &outEnum{
Name: enum.Name,
Description: filterDesc(enum.Description),
}
for _, val := range enum.Values {
oute.Values = append(oute.Values, &outEnumValue{
Value: val.Value,
Name: val.Name,
Description: filterDesc(val.Description),
})
}
outDef.Enums = append(outDef.Enums, oute)
}
// messages
for _, msg := range def.Messages {
outMsg, err := messageProcess(msg)
if err != nil {
return nil, err
}
outDef.Messages = append(outDef.Messages, outMsg)
}
outDefs = append(outDefs, outDef)
return outDefs, nil
}
func definitionGet(isRemote bool, defAddr string) ([]byte, error) {
if isRemote {
byt, err := download(defAddr)
if err != nil {
return nil, fmt.Errorf("unable to download: %s", err)
}
return byt, nil
}
byt, err := ioutil.ReadFile(defAddr)
if err != nil {
return nil, fmt.Errorf("unable to open: %s", err)
}
return byt, nil
}
func download(desturl string) ([]byte, error) {
res, err := http.Get(desturl)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return nil, fmt.Errorf("bad return code: %v", res.StatusCode)
}
byt, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, err
}
return byt, nil
}
func messageProcess(msg *definitionMessage) (*outMessage, error) {
if m := reMsgName.FindStringSubmatch(msg.Name); m == nil {
return nil, fmt.Errorf("unsupported message name: %s", msg.Name)
}
outMsg := &outMessage{
Name: dialectMsgDefToGo(msg.Name),
Description: filterDesc(msg.Description),
ID: msg.ID,
}
for _, f := range msg.Fields {
outField, err := fieldProcess(f)
if err != nil {
return nil, err
}
outMsg.Fields = append(outMsg.Fields, outField)
}
return outMsg, nil
}
func fieldProcess(field *dialectField) (*outField, error) {
outF := &outField{
Description: filterDesc(field.Description),
}
tags := make(map[string]string)
newname := dialectFieldDefToGo(field.Name)
// name conversion is not univoque: add tag
if dialectFieldGoToDef(newname) != field.Name {
tags["mavname"] = field.Name
}
outF.Line += newname
typ := field.Type
arrayLen := ""
if typ == "uint8_t_mavlink_version" {
typ = "uint8_t"
}
// string or array
if matches := reTypeIsArray.FindStringSubmatch(typ); matches != nil {
// string
if matches[1] == "char" {
tags["mavlen"] = matches[2]
typ = "char"
// array
} else {
arrayLen = matches[2]
typ = matches[1]
}
}
// extension
if field.Extension {
tags["mavext"] = "true"
}
typ = dialectTypeToGo[typ]
if typ == "" {
return nil, fmt.Errorf("unknown type: %s", typ)
}
outF.Line += " "
if arrayLen != "" {
outF.Line += "[" + arrayLen + "]"
}
if field.Enum != "" {
outF.Line += field.Enum
tags["mavenum"] = typ
} else {
outF.Line += typ
}
if len(tags) > 0 {
var tmp []string
for k, v := range tags {
tmp = append(tmp, fmt.Sprintf("%s:\"%s\"", k, v))
}
sort.Strings(tmp)
outF.Line += " `" + strings.Join(tmp, " ") + "`"
}
return outF, nil
}
func run() error {
kingpin.CommandLine.Help = "Convert Mavlink dialects from XML format into Go format."
argPkgName := kingpin.Flag("package", "Package name").Default("main").String()
argComment := kingpin.Flag("comment", "comment to add before the package name").Default("").String()
argMainDef := kingpin.Arg("xml", "Path or url pointing to a XML Mavlink dialect").Required().String()
kingpin.Parse()
mainDef := *argMainDef
comment := *argComment
pkgName := *argPkgName
version := ""
defsProcessed := make(map[string]struct{})
isRemote := func() bool {
_, err := url.ParseRequestURI(mainDef)
return err == nil
}()
// parse all definitions recursively
outDefs, err := definitionProcess(&version, defsProcessed, isRemote, mainDef)
if err != nil {
return err
}
// merge enums together
enums := make(map[string]*outEnum)
for _, def := range outDefs {
for _, defEnum := range def.Enums {
if _, ok := enums[defEnum.Name]; !ok {
enums[defEnum.Name] = &outEnum{
Name: defEnum.Name,
Description: defEnum.Description,
}
}
enum := enums[defEnum.Name]
enum.Values = append(enum.Values, defEnum.Values...)
}
}
// fill enum missing values
for _, enum := range enums {
nextVal := 0
for _, v := range enum.Values {
if v.Value != "" {
nextVal, _ = strconv.Atoi(v.Value)
nextVal++
} else {
v.Value = strconv.Itoa(nextVal)
nextVal++
}
}
}
// dump
return tplDialect.Execute(os.Stdout, map[string]interface{}{
"PkgName": pkgName,
"Comment": comment,
"Version": func() int {
ret, _ := strconv.Atoi(version)
return ret
}(),
"Defs": outDefs,
"Enums": enums,
})
}
func main() {
err := run()
if err != nil {
fmt.Fprintf(os.Stderr, "ERR: %s\n", err)
os.Exit(1)
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.skipForward = void 0;
var skipForward = {
"viewBox": "0 0 512 512",
"children": [{
"name": "style",
"attribs": {
"type": "text/css"
},
"children": []
}, {
"name": "path",
"attribs": {
"class": "st0",
"d": "M436.3,96h-8.1c-6.7,0-12.2,5-12.2,11.7v113.5L228.9,98.7c-2.5-1.7-5.1-2.3-8.1-2.3c-8.3,0-15.4,7-15.4,17v63.1\r\n\tL86.9,98.3c-2.5-1.7-5.1-2.3-8.1-2.3c-8.3,0-14.9,7.4-14.9,17.4v286c0,10,6.7,16.5,15,16.5c3.1,0,5.4-1.2,8.2-2.9l118.3-77.6v64\r\n\tc0,10,7.2,16.5,15.5,16.5c3.1,0,5.5-1.2,8.2-2.9L416,290.8v113c0,6.7,5.4,12.2,12.2,12.2h8.1c6.7,0,11.7-5.5,11.7-12.2V107.7\r\n\tC448,101,443.1,96,436.3,96z"
},
"children": []
}]
};
exports.skipForward = skipForward; |
<reponame>nicobo/ciform
/*!
Copyright 2004 by <NAME>, www.haneWIN.de
Original code : http://www.hanewin.net/encrypt/rsa/rsa.js
About this package : http://ciform.google.com
*/
//
// NOTE : The original code is wrapped so that the defined functions don't collide with existing ones.
// See http://michaux.ca/articles/javascript-namespacing.
// See http://msdn.microsoft.com/en-us/library/259s7zc1%28v=vs.85%29.aspx
//
/** @namespace */
Crypto = typeof Crypto != 'undefined' ? Crypto : {};
/** @namespace */
Crypto.RSA = (function(/*window, undefined, $*/) {
//
// START OF ORIGINAL CODE
//
/* RSA public key encryption/decryption
* The following functions are (c) 2000 by <NAME> and are
* released under the terms of the Gnu Public License.
* You must freely redistribute them with their source -- see the
* GPL for details.
* -- Latest version found at http://sourceforge.net/projects/shop-js
*
* GnuPG multi precision integer (mpi) conversion added
* 2004 by <NAME>, www.haneWIN.de
*/
// --- Arbitrary Precision Math ---
// badd(a,b), bsub(a,b), bmul(a,b)
// bdiv(a,b), bmod(a,b), bmodexp(xx,y,m)
// set the base... 32bit cpu -> bs=16, 64bit -> bs=32
// bs is the shift, bm is the mask
var bs=28;
var bx2=1<<bs, bm=bx2-1, bx=bx2>>1, bd=bs>>1, bdm=(1<<bd)-1;
var log2=Math.log(2);
function badd(a,b) // binary add
{
var al=a.length, bl=b.length;
if(al < bl) return badd(b,a);
var r=new Array(al);
var c=0, n=0;
for(; n<bl; n++)
{
c+=a[n]+b[n];
r[n]=c & bm;
c>>>=bs;
}
for(; n<al; n++)
{
c+=a[n];
r[n]=c & bm;
c>>>=bs;
}
if(c) r[n]=c;
return r;
}
function bsub(a,b) // binary subtract
{
var al=a.length, bl=b.length;
if(bl > al) return [];
if(bl == al)
{
if(b[bl-1] > a[bl-1]) return [];
if(bl==1) return [a[0]-b[0]];
}
var r=new Array(al);
var c=0;
for(var n=0; n<bl; n++)
{
c+=a[n]-b[n];
r[n]=c & bm;
c>>=bs;
}
for(;n<al; n++)
{
c+=a[n];
r[n]=c & bm;
c>>=bs;
}
if(c) return [];
if(r[n-1]) return r;
while(n>1 && r[n-1]==0) n--;
return r.slice(0,n);
}
function zeros(n)
{
var r=new Array(n);
while(n-->0) r[n]=0;
return r;
}
function bmul(a,b) // binary multiply
{
b=b.concat([0]);
var al=a.length, bl=b.length;
var n,nn,aa,c,m, g,gg,h,hh,ghh,ghhb;
var r=zeros(al+bl+1);
for(n=0; n<al; n++)
{
aa=a[n];
if(aa)
{
c=0;
hh=aa>>bd; h=aa & bdm;
m=n;
for(nn=0; nn<bl; nn++, m++)
{
g = b[nn]; gg=g>>bd; g=g & bdm;
// (gg*2^15 + g) * (hh*2^15 + h) = (gghh*2^30 + (ghh+hgg)*2^15 +hg)
ghh = g * hh + h * gg;
ghhb= ghh >> bd; ghh &= bdm;
c += r[m] + h * g + (ghh << bd);
r[m] = c & bm;
c = (c >> bs) + gg * hh + ghhb;
}
}
}
n=r.length;
if(r[n-1]) return r;
while(n>1 && r[n-1]==0) n--;
return r.slice(0,n);
}
function toppart(x,start,len)
{
var n=0;
while(start >= 0 && len-->0) n=n*bx2+x[start--];
return n;
}
// ----------------------------------------------------
// 14.20 Algorithm Multiple-precision division from HAC
function bdiv(x,y)
{
var n=x.length-1, t=y.length-1, nmt=n-t;
// trivial cases; x < y
if(n < t || n==t && (x[n]<y[n] || n>0 && x[n]==y[n] && x[n-1]<y[n-1]))
{
this.q=[0]; this.mod=x;
return this;
}
// trivial cases; q < 4
if(n==t && toppart(x,t,2)/toppart(y,t,2) <4)
{
var qq=0, xx;
for(;;)
{
xx=bsub(x,y);
if(xx.length==0) break;
x=xx; qq++;
}
this.q=[qq]; this.mod=x;
return this;
}
var shift, shift2
// normalize
shift2=Math.floor(Math.log(y[t])/log2)+1;
shift=bs-shift2;
if(shift)
{
x=x.concat(); y=y.concat()
for(i=t; i>0; i--) y[i]=((y[i]<<shift) & bm) | (y[i-1] >> shift2);
y[0]=(y[0]<<shift) & bm;
if(x[n] & ((bm <<shift2) & bm))
{
x[++n]=0; nmt++;
}
for(i=n; i>0; i--) x[i]=((x[i]<<shift) & bm) | (x[i-1] >> shift2);
x[0]=(x[0]<<shift) & bm;
}
var i, j, x2;
var q=zeros(nmt+1);
var y2=zeros(nmt).concat(y);
for(;;)
{
x2=bsub(x,y2);
if(x2.length==0) break;
q[nmt]++;
x=x2;
}
var yt=y[t], top=toppart(y,t,2)
for(i=n; i>t; i--)
{
var m=i-t-1;
if(i >= x.length) q[m]=1;
else if(x[i] == yt) q[m]=bm;
else q[m]=Math.floor(toppart(x,i,2)/yt);
var topx=toppart(x,i,3);
while(q[m] * top > topx) q[m]--;
//x-=q[m]*y*b^m
y2=y2.slice(1);
x2=bsub(x,bmul([q[m]],y2));
if(x2.length==0)
{
q[m]--;
x2=bsub(x,bmul([q[m]],y2));
}
x=x2;
}
// de-normalize
if(shift)
{
for(i=0; i<x.length-1; i++) x[i]=(x[i]>>shift) | ((x[i+1] << shift2) & bm);
x[x.length-1]>>=shift;
}
n = q.length;
while(n > 1 && q[n-1]==0) n--;
this.q=q.slice(0,n);
n = x.length;
while(n > 1 && x[n-1]==0) n--;
this.mod=x.slice(0,n);
return this;
}
function simplemod(i,m) // returns the mod where m < 2^bd
{
var c=0, v;
for(var n=i.length-1; n>=0; n--)
{
v=i[n];
c=((v >> bd) + (c<<bd)) % m;
c=((v & bdm) + (c<<bd)) % m;
}
return c;
}
function bmod(p,m) // binary modulo
{
if(m.length==1)
{
if(p.length==1) return [p[0] % m[0]];
if(m[0] < bdm) return [simplemod(p,m[0])];
}
var r=new bdiv(p,m);
return r.mod;
}
// ------------------------------------------------------
// Barrett's modular reduction from HAC, 14.42, CRC Press
function bmod2(x,m,mu)
{
var xl=x.length - (m.length << 1);
if(xl > 0) return bmod2(x.slice(0,xl).concat(bmod2(x.slice(xl),m,mu)),m,mu);
var ml1=m.length+1, ml2=m.length-1,rr;
//var q1=x.slice(ml2)
//var q2=bmul(q1,mu)
var q3=bmul(x.slice(ml2),mu).slice(ml1);
var r1=x.slice(0,ml1);
var r2=bmul(q3,m).slice(0,ml1);
var r=bsub(r1,r2);
//var s=('x='+x+'\nm='+m+'\nmu='+mu+'\nq1='+q1+'\nq2='+q2+'\nq3='+q3+'\nr1='+r1+'\nr2='+r2+'\nr='+r);
if(r.length==0)
{
r1[ml1]=1;
r=bsub(r1,r2);
}
for(var n=0;;n++)
{
rr=bsub(r,m);
if(rr.length==0) break;
r=rr;
if(n>=3) return bmod2(r,m,mu);
}
return r;
}
function bmodexp(xx,y,m) // binary modular exponentiation
{
var r=[1], an,a, x=xx.concat();
var n=m.length*2;
var mu=new Array(n+1);
mu[n--]=1;
for(; n>=0; n--) mu[n]=0; mu=new bdiv(mu,m).q;
for(n=0; n<y.length; n++)
{
for(a=1, an=0; an<bs; an++, a<<=1)
{
if(y[n] & a) r=bmod2(bmul(r,x),m,mu);
x=bmod2(bmul(x,x),m,mu);
}
}
return r;
}
// -----------------------------------------------------
// Compute s**e mod m for RSA public key operation
function RSAencrypt(s, e, m) { return bmodexp(s,e,m); }
// Compute m**d mod p*q for RSA private key operations.
function RSAdecrypt(m, d, p, q, u)
{
var xp = bmodexp(bmod(m,p), bmod(d,bsub(p,[1])), p);
var xq = bmodexp(bmod(m,q), bmod(d,bsub(q,[1])), q);
var t=bsub(xq,xp);
if(t.length==0)
{
t=bsub(xp,xq);
t=bmod(bmul(t, u), q);
t=bsub(q,t);
}
else
{
t=bmod(bmul(t, u), q);
}
return badd(bmul(t,p), xp);
}
// -----------------------------------------------------------------
// conversion functions: num array <-> multi precision integer (mpi)
// mpi: 2 octets with length in bits + octets in big endian order
function mpi2b(s)
{
var bn=1, r=[0], rn=0, sb=256;
var c, sn=s.length;
if(sn < 2)
{
alert('string too short, not a MPI');
return 0;
}
var len=(sn-2)*8;
var bits=s.charCodeAt(0)*256+s.charCodeAt(1);
if(bits > len || bits < len-8)
{
alert('not a MPI, bits='+bits+",len="+len);
return 0;
}
for(var n=0; n<len; n++)
{
if((sb<<=1) > 255)
{
sb=1; c=s.charCodeAt(--sn);
}
if(bn > bm)
{
bn=1;
r[++rn]=0;
}
if(c & sb) r[rn]|=bn;
bn<<=1;
}
return r;
}
function b2mpi(b)
{
var bn=1, bc=0, r=[0], rb=1, rn=0;
var bits=b.length*bs;
var n, rr='';
for(n=0; n<bits; n++)
{
if(b[bc] & bn) r[rn]|=rb;
if((rb<<=1) > 255)
{
rb=1; r[++rn]=0;
}
if((bn<<=1) > bm)
{
bn=1; bc++;
}
}
while(rn && r[rn]==0) rn--;
bn=256;
for(bits=8; bits>0; bits--) if(r[rn] & (bn>>=1)) break;
bits+=rn*8;
rr+=String.fromCharCode(bits/256)+String.fromCharCode(bits%256);
if(bits) for(n=rn; n>=0; n--) rr+=String.fromCharCode(r[n]);
return rr;
}
//
// END OF ORIGINAL CODE
//
return {
b2mpi: b2mpi,
encrypt: RSAencrypt,
decrypt: RSAdecrypt
};
})(/*window, undefined, $*/); |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
sudo apt-get install -y -q \
gdb ccache libboost-dev libboost-filesystem-dev \
libboost-system-dev libjemalloc-dev
if [ "$ARROW_TRAVIS_VALGRIND" == "1" ]; then
sudo apt-get install -y -q valgrind
fi
|
/***************************************************************************
* (C) Copyright 2003-2016 - Marauroa *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package marauroa.common.net.message;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
import marauroa.common.net.Channel;
import marauroa.common.net.InputSerializer;
import marauroa.common.net.OutputSerializer;
/**
* This message indicate the server to create an account.
*
* @see marauroa.common.net.message.Message
*/
public class MessageC2SCreateAccount extends Message {
/** Desired username */
private String username;
/** Desired password */
private String password;
/** email address for whatever thing it may be needed. */
private String email;
/** client language */
private String language = Locale.ENGLISH.getLanguage();
/** Constructor for allowing creation of an empty message */
public MessageC2SCreateAccount() {
super(MessageType.C2S_CREATEACCOUNT, null);
}
/**
* Constructor with a TCP/IP source/destination of the message and username, password
* and email associated to the account to be created.
*
* @param source
* The TCP/IP address associated to this message
* @param username
* desired username
* @param password
* desired password
* @param email
* email of the player
* @param language
* client language
*/
public MessageC2SCreateAccount(Channel source, String username, String password,
String email, String language) {
super(MessageType.C2S_CREATEACCOUNT, source);
this.username = username;
this.password = password;
this.email = email;
this.language = language;
}
/**
* Returns desired account's username
* @return desired account's username
*/
public String getUsername() {
return username;
}
/**
* Returns desired account's password
* @return desired account's password
*/
public String getPassword() {
return password;
}
/**
* Returns the account associated email.
* @return the account associated email.
*/
public String getEmail() {
return email;
}
/**
* gets the language
*
* @return language
*/
public String getLanguage() {
return language;
}
/**
* This method returns a String that represent the object
*
* @return a string representing the object.
*/
@Override
public String toString() {
return "Message (C2S CreateAccount) from (" + getAddress() + ") CONTENTS: (" + username
+ ";" + password + ";" + email + ")";
}
@Override
public void writeObject(OutputSerializer out) throws IOException {
super.writeObject(out);
out.write(username);
out.write(password);
out.write(email);
out.write255LongString(language);
}
@Override
public void readObject(InputSerializer in) throws IOException {
super.readObject(in);
username = in.readString();
password = in.readString();
email = in.readString();
if (in.available() > 0) {
language = in.read255LongString();
}
if (type != MessageType.C2S_CREATEACCOUNT) {
throw new IOException();
}
}
@Override
public void readFromMap(Map<String, Object> in) throws IOException {
super.readFromMap(in);
if (in.get("u") != null) {
username = in.get("u").toString();
}
if (in.get("p") != null) {
password = in.get("p").toString();
}
if (in.get("e") != null) {
email = in.get("e").toString();
}
if (in.get("l") != null) {
language = in.get("l").toString();
}
if (type != MessageType.C2S_CREATEACCOUNT) {
throw new IOException();
}
}
}
|
package com.scand.realmbrowser;
import android.view.View;
import com.scand.realmbrowser.view.DragOverlayView;
import com.scand.realmbrowser.view.RowView;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Slabodeniuk on 3/31/16.
*/
public class ColumnWidthMediator implements
RowView.OnColumnWidthChangeListener,
DragOverlayView.OnDragFinished {
interface ColumnWidthProvider {
int getColumnWidth(int position);
}
private int colPosition;
private int colLeft;
private DragOverlayView dragOverlayView;
private ColumnWidthProvider mColumnWidthProvider;
private List<RowView> mViews = new ArrayList<>();
ColumnWidthMediator(DragOverlayView view, ColumnWidthProvider widthProvider) {
dragOverlayView = view;
dragOverlayView.setOnDragFinishedListener(this);
mColumnWidthProvider = widthProvider;
}
@Override
public void startColumnWidthChange(int minX, int currentLeft, int currentRight, int position) {
colLeft = currentLeft;
colPosition = position;
dragOverlayView.setMinLeft(minX);
dragOverlayView.setShadowPosition(currentRight);
dragOverlayView.startDrag(null,
// create empty drag shadow
// real shadow will be drawn by DragOverlayView itself
new View.DragShadowBuilder(),
null,
0);
}
@Override
public void onDragFinished(int position) {
int newWidth = position - colLeft;
for (RowView v : mViews) {
v.setColumnWidth(newWidth, colPosition);
}
}
public int getColWidth(int position) {
return mColumnWidthProvider.getColumnWidth(position);
}
public void addView(RowView v) {
mViews.add(v);
}
public void removeAllViews() {
mViews.clear();
}
} |
<filename>hwpy_modules/i2c_rapi.py
"""
Raspberry Pi hardware i2c
part of hwpy: an OO hardware interface library
home: https://www.github.com/wovo/hwpy
"""
from hwpy_modules.i2c_interface import *
import smbus
class _rapi_i2c_hardware(i2c_interface):
"""Hardware i2c interface.
This is the hardware i2c interface.
It is much faster than the bit banged (software) version,
but it must be enabled
(sudo raspi-config; select 5 Interfacing Options; enable i2c),
and it can only use the hardware i2c pins.
"""
def __init__(self, interface: int = 1):
"""Create an interface to the hardware i2c.
Recent Pi's use interface 1, which is the default.
For older Pi's, if you get the error
'IOError: [Errno 2] No such file or directory'
try with interface=0.
"""
import smbus
try:
self._bus = smbus.SMBus(interface)
except FileNotFoundError:
print(
"To use the hardware i2c, enable i2c in the kernel using "
"\"sudo raspi-config\"." )
print("Exiting...")
exit()
def write(self, address: int, data: list):
"""An i2c write transaction.
Perform an i2c write transaction, writing the values
in the data list to the device at the specified address.
"""
self._bus.write_i2c_block_data(address, data[0], data[1:])
def read(self, address: int, n: int) -> list:
"""An i2c read transaction.
Perform an i2c read transaction, reading and returning
n bytes from the device at the specified address.
"""
return self._bus.read_i2c_block_data(address, 0, n)
def read_command(self, address: int, command: int, n: int) -> list:
return self._bus.read_i2c_block_data(address, command, n)
def write_command(self, address: int, command: int, data: list):
self._bus.write_i2c_block_data(address, command, data)
# ===========================================================================
#
# hardware i2c
#
# ===========================================================================
"""Hardware i2c interface.
This is the hardware i2c interface supported by the target.
"""
i2c_hardware = _rapi_i2c_hardware |
#!/bin/bash
wget -r http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wistr-models-cwardev-gt
wget -r http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wistr-models-cwartest-gt
wget -r http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wistr-models-trdev-gt
wget -r http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wistr-models-trtest-gt
wget http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wiki/enwiki-cwardev-20spd-100.log
wget http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wiki/enwiki-cwartest-20spd-100.log
wget http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wiki/enwiki-trconlldev-100.log
wget http://web.corral.tacc.utexas.edu/utcompling/fieldspring-data/wiki/enwiki-trconlltest-100.log
|
/**
* Licensed under the Artistic License; you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://displaytag.sourceforge.net/license.html
*
* THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
* WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
*/
/**
* Per the conditions of the Artistic License,
* Hexagon Safety & Infrastructure states that it has
* made the following changes to this source file:
*
* 25 July 2014 - Added a property for secured row label
* 18 October 2019 - Added support for alternate data encodings
*
*/
package org.displaytag.properties;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.text.Collator;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.jsp.PageContext;
import javax.servlet.jsp.tagext.Tag;
import org.apache.commons.lang.ClassUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.UnhandledException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.displaytag.Messages;
import org.displaytag.model.DefaultComparator;
import org.displaytag.decorator.DecoratorFactory;
import org.displaytag.decorator.DefaultDecoratorFactory;
import org.displaytag.exception.FactoryInstantiationException;
import org.displaytag.exception.TablePropertiesLoadException;
import org.displaytag.localization.I18nResourceProvider;
import org.displaytag.localization.LocaleResolver;
import org.displaytag.util.DefaultRequestHelperFactory;
import org.displaytag.util.ReflectHelper;
import org.displaytag.util.RequestHelperFactory;
/**
* The properties used by the Table tags. The properties are loaded in the following order, in increasing order of
* priority. The locale of getInstance() is used to determine the locale of the property file to use; if the key
* required does not exist in the specified file, the key will be loaded from a more general property file.
* <ol>
* <li>First, from the TableTag.properties included with the DisplayTag distribution.</li>
* <li>Then, from the file displaytag.properties, if it is present; these properties are intended to be set by the user
* for sitewide application. Messages are gathered according to the Locale of the property file.</li>
* <li>Finally, if this class has a userProperties defined, all of the properties from that Properties object are
* copied in as well.</li>
* </ol>
* @author <NAME>
* @author rapruitt
* @version $Revision: 1096 $ ($Author: rapruitt $)
*/
public final class TableProperties implements Cloneable
{
/**
* name of the default properties file name ("displaytag.properties").
*/
public static final String DEFAULT_FILENAME = "displaytag.properties"; //$NON-NLS-1$
/**
* The name of the local properties file that is searched for on the classpath. Settings in this file will override
* the defaults loaded from TableTag.properties.
*/
public static final String LOCAL_PROPERTIES = "displaytag"; //$NON-NLS-1$
/**
* property <code>export.banner</code>.
*/
public static final String PROPERTY_STRING_EXPORTBANNER = "export.banner"; //$NON-NLS-1$
/**
* property <code>export.banner.sepchar</code>.
*/
public static final String PROPERTY_STRING_EXPORTBANNER_SEPARATOR = "export.banner.sepchar"; //$NON-NLS-1$
/**
* property <code>export.decorated</code>.
*/
public static final String PROPERTY_BOOLEAN_EXPORTDECORATED = "export.decorated"; //$NON-NLS-1$
/**
* property <code>export.amount</code>.
*/
public static final String PROPERTY_STRING_EXPORTAMOUNT = "export.amount"; //$NON-NLS-1$
/**
* property <code>sort.amount</code>.
*/
public static final String PROPERTY_STRING_SORTAMOUNT = "sort.amount"; //$NON-NLS-1$
/**
* property <code>basic.show.header</code>.
*/
public static final String PROPERTY_BOOLEAN_SHOWHEADER = "basic.show.header"; //$NON-NLS-1$
/**
* property <code>basic.msg.empty_list</code>.
*/
public static final String PROPERTY_STRING_EMPTYLIST_MESSAGE = "basic.msg.empty_list"; //$NON-NLS-1$
/**
* property <code>basic.msg.empty_list_row</code>.
*/
public static final String PROPERTY_STRING_EMPTYLISTROW_MESSAGE = "basic.msg.empty_list_row"; //$NON-NLS-1$
/**
* property <code>basic.empty.showtable</code>.
*/
public static final String PROPERTY_BOOLEAN_EMPTYLIST_SHOWTABLE = "basic.empty.showtable"; //$NON-NLS-1$
/**
* property <code>paging.banner.placement</code>.
*/
public static final String PROPERTY_STRING_BANNER_PLACEMENT = "paging.banner.placement"; //$NON-NLS-1$
/**
* property <code>error.msg.invalid_page</code>.
*/
public static final String PROPERTY_STRING_PAGING_INVALIDPAGE = "error.msg.invalid_page"; //$NON-NLS-1$
/**
* property <code>paging.banner.item_name</code>.
*/
public static final String PROPERTY_STRING_PAGING_ITEM_NAME = "paging.banner.item_name"; //$NON-NLS-1$
/**
* property <code>paging.banner.items_name</code>.
*/
public static final String PROPERTY_STRING_PAGING_ITEMS_NAME = "paging.banner.items_name"; //$NON-NLS-1$
/**
* property <code>paging.banner.no_items_found</code>.
*/
public static final String PROPERTY_STRING_PAGING_NOITEMS = "paging.banner.no_items_found"; //$NON-NLS-1$
/**
* property <code>paging.banner.one_item_found</code>.
*/
public static final String PROPERTY_STRING_PAGING_FOUND_ONEITEM = "paging.banner.one_item_found"; //$NON-NLS-1$
/**
* property <code>paging.banner.all_items_found</code>.
*/
public static final String PROPERTY_STRING_PAGING_FOUND_ALLITEMS = "paging.banner.all_items_found"; //$NON-NLS-1$
/**
* property <code>paging.banner.some_items_found</code>.
*/
public static final String PROPERTY_STRING_PAGING_FOUND_SOMEITEMS = "paging.banner.some_items_found"; //$NON-NLS-1$
/**
* property <code>paging.banner.group_size</code>.
*/
public static final String PROPERTY_INT_PAGING_GROUPSIZE = "paging.banner.group_size"; //$NON-NLS-1$
/**
* property <code>paging.banner.onepage</code>.
*/
public static final String PROPERTY_STRING_PAGING_BANNER_ONEPAGE = "paging.banner.onepage"; //$NON-NLS-1$
/**
* property <code>paging.banner.first</code>.
*/
public static final String PROPERTY_STRING_PAGING_BANNER_FIRST = "paging.banner.first"; //$NON-NLS-1$
/**
* property <code>paging.banner.last</code>.
*/
public static final String PROPERTY_STRING_PAGING_BANNER_LAST = "paging.banner.last"; //$NON-NLS-1$
/**
* property <code>paging.banner.full</code>.
*/
public static final String PROPERTY_STRING_PAGING_BANNER_FULL = "paging.banner.full"; //$NON-NLS-1$
/**
* property <code>paging.banner.page.link</code>.
*/
public static final String PROPERTY_STRING_PAGING_PAGE_LINK = "paging.banner.page.link"; //$NON-NLS-1$
/**
* property <code>paging.banner.page.selected</code>.
*/
public static final String PROPERTY_STRING_PAGING_PAGE_SELECTED = "paging.banner.page.selected"; //$NON-NLS-1$
/**
* property <code>paging.banner.page.separator</code>.
*/
public static final String PROPERTY_STRING_PAGING_PAGE_SPARATOR = "paging.banner.page.separator"; //$NON-NLS-1$
/**
* property <code>factory.requestHelper</code>.
*/
public static final String PROPERTY_CLASS_REQUESTHELPERFACTORY = "factory.requestHelper"; //$NON-NLS-1$
/**
* property <code>factory.decorators</code>.
*/
public static final String PROPERTY_CLASS_DECORATORFACTORY = "factory.decorator"; //$NON-NLS-1$
/**
* property <code>locale.provider</code>.
*/
public static final String PROPERTY_CLASS_LOCALEPROVIDER = "locale.provider"; //$NON-NLS-1$
/**
* property <code>locale.resolver</code>.
*/
public static final String PROPERTY_CLASS_LOCALERESOLVER = "locale.resolver"; //$NON-NLS-1$
/**
* property <code>css.tr.even</code>: holds the name of the css class for even rows. Defaults to
* <code>even</code>.
*/
public static final String PROPERTY_CSS_TR_EVEN = "css.tr.even"; //$NON-NLS-1$
/**
* property <code>css.tr.odd</code>: holds the name of the css class for odd rows. Defaults to <code>odd</code>.
*/
public static final String PROPERTY_CSS_TR_ODD = "css.tr.odd"; //$NON-NLS-1$
/**
* property <code>css.table</code>: holds the name of the css class added to the main table tag. By default no
* css class is added.
*/
public static final String PROPERTY_CSS_TABLE = "css.table"; //$NON-NLS-1$
/**
* property <code>css.th.sortable</code>: holds the name of the css class added to the the header of a sortable
* column. By default no css class is added.
*/
public static final String PROPERTY_CSS_TH_SORTABLE = "css.th.sortable"; //$NON-NLS-1$
/**
* property <code>css.th.sorted</code>: holds the name of the css class added to the the header of a sorted
* column. Defaults to <code>sorted</code>.
*/
public static final String PROPERTY_CSS_TH_SORTED = "css.th.sorted"; //$NON-NLS-1$
/**
* property <code>css.th.ascending</code>: holds the name of the css class added to the the header of a column
* sorted in ascending order. Defaults to <code>order1</code>.
*/
public static final String PROPERTY_CSS_TH_SORTED_ASCENDING = "css.th.ascending"; //$NON-NLS-1$
/**
* property <code>css.th.descending</code>: holds the name of the css class added to the the header of a column
* sorted in descending order. Defaults to <code>order2</code>.
*/
public static final String PROPERTY_CSS_TH_SORTED_DESCENDING = "css.th.descending"; //$NON-NLS-1$
/**
* prefix used for all the properties related to export ("export"). The full property name is <code>export.</code>
* <em>[export type]</em><code>.</code><em>[property name]</em>
*/
public static final String PROPERTY_EXPORT_PREFIX = "export"; //$NON-NLS-1$
/**
* prefix used to set the media decorator property name. The full property name is
* <code>decorator.media.</code><em>[export type]</em>.
*/
public static final String PROPERTY_DECORATOR_SUFFIX = "decorator"; //$NON-NLS-1$
/**
* used to set the media decorator property name. The full property name is
* <code>decorator.media.</code><em>[export type]</em>
*/
public static final String PROPERTY_DECORATOR_MEDIA = "media"; //$NON-NLS-1$
/**
* property <code>export.types</code>: holds the list of export available export types.
*/
public static final String PROPERTY_EXPORTTYPES = "export.types"; //$NON-NLS-1$
/**
* property <code>export.direction.rtl</code>: If this property is true, it means the exported table is rendered
* in right-to-left direction.
*/
public static final String PROPERTY_EXPORT_DIRECTION_RTL = "export.direction.rtl"; //$NON-NLS-1$
/**
* export property <code>label</code>.
*/
public static final String EXPORTPROPERTY_STRING_LABEL = "label"; //$NON-NLS-1$
/**
* export property <code>class</code>.
*/
public static final String EXPORTPROPERTY_STRING_CLASS = "class"; //$NON-NLS-1$
/**
* export property <code>include_header</code>.
*/
public static final String EXPORTPROPERTY_BOOLEAN_EXPORTHEADER = "include_header"; //$NON-NLS-1$
/**
* export property <code>filename</code>.
*/
public static final String EXPORTPROPERTY_STRING_FILENAME = "filename"; //$NON-NLS-1$
/**
* export property <code>file.encoding</code>.
*/
public static final String EXPORTPROPERTY_STRING_FILE_ENCODING = "file.encoding"; //$NON-NLS-1$
/**
* export property <code>font.path</code>.
*/
public static final String EXPORTPROPERTY_STRING_FONT_PATH = "font.path"; //$NON-NLS-1$
/**
* Property <code>pagination.sort.param</code>. If external pagination and sorting is used, it holds the name of
* the parameter used to hold the sort criterion in generated links
*/
public static final String PROPERTY_STRING_PAGINATION_SORT_PARAM = "pagination.sort.param"; //$NON-NLS-1$
/**
* Property <code>pagination.sortdirection.param</code>. If external pagination and sorting is used, it holds the
* name of the parameter used to hold the sort direction in generated links (asc or desc)
*/
public static final String PROPERTY_STRING_PAGINATION_SORT_DIRECTION_PARAM = "pagination.sortdirection.param"; //$NON-NLS-1$
/**
* Property <code>pagination.pagenumber.param</code>. If external pagination and sorting is used, it holds the
* name of the parameter used to hold the page number in generated links
*/
public static final String PROPERTY_STRING_PAGINATION_PAGE_NUMBER_PARAM = "pagination.pagenumber.param"; //$NON-NLS-1$
/**
* Property <code>pagination.searchid.param</code>. If external pagination and sorting is used, it holds the name
* of the parameter used to hold the search ID in generated links
*/
public static final String PROPERTY_STRING_PAGINATION_SEARCH_ID_PARAM = "pagination.searchid.param"; //$NON-NLS-1$
/**
* Property <code>pagination.sort.asc.value</code>. If external pagination and sorting is used, it holds the
* value of the parameter of the sort direction parameter for "ascending"
*/
public static final String PROPERTY_STRING_PAGINATION_ASC_VALUE = "pagination.sort.asc.value"; //$NON-NLS-1$
/**
* Property <code>pagination.sort.desc.value</code>. If external pagination and sorting is used, it holds the
* value of the parameter of the sort direction parameter for "descending"
*/
public static final String PROPERTY_STRING_PAGINATION_DESC_VALUE = "pagination.sort.desc.value"; //$NON-NLS-1$
public static final String PROPERTY_STRING_SECURED_ROW_LABEL = "secured.row.label";
/**
* Property <code>pagination.sort.skippagenumber</code>. If external pagination and sorting is used, it
* determines if the current page number must be added in sort links or not. If this property is true, it means that
* each click on a generated sort link will re-sort the list, and go back to the default page number. If it is
* false, each click on a generated sort link will re-sort the list, and ask the current page number.
*/
public static final String PROPERTY_BOOLEAN_PAGINATION_SKIP_PAGE_NUMBER_IN_SORT = "pagination.sort.skippagenumber"; //$NON-NLS-1$
/**
* Property <code>comparator.default</code>. If present, will use use as the classname of the default comparator.
* Will be overriden by column level comparators.
*/
public static final String PROPERTY_DEFAULT_COMPARATOR = "comparator.default"; //$NON-NLS-1$
// </JBN>
/**
* Separator char used in property names.
*/
private static final char SEP = '.';
/**
* logger.
*/
private static Log log = LogFactory.getLog(TableProperties.class);
/**
* The userProperties are local, non-default properties; these settings override the defaults from
* displaytag.properties and TableTag.properties.
*/
private static Properties userProperties = new Properties();
/**
* Configured resource provider. If no ResourceProvider is configured, an no-op one is used. This instance is
* initialized at first use and shared.
*/
private static I18nResourceProvider resourceProvider;
/**
* Configured locale resolver.
*/
private static LocaleResolver localeResolver;
/**
* TableProperties for each locale are loaded as needed, and cloned for public usage.
*/
private static Map prototypes = new HashMap();
/**
* Loaded properties (defaults from defaultProperties + custom from bundle).
*/
private Properties properties;
/**
* The locale for these properties.
*/
private Locale locale;
/**
* Cache for dinamically instantiated object (request factory, decorator factory).
*/
private Map objectCache = new HashMap();
/**
* Setter for I18nResourceProvider. A resource provider is usually set using displaytag properties, this accessor is
* needed for tests.
* @param provider I18nResourceProvider instance
*/
protected static void setResourceProvider(I18nResourceProvider provider)
{
resourceProvider = provider;
}
/**
* Setter for LocaleResolver. A locale resolver is usually set using displaytag properties, this accessor is needed
* for tests.
* @param resolver LocaleResolver instance
*/
protected static void setLocaleResolver(LocaleResolver resolver)
{
localeResolver = resolver;
}
/**
* Loads default properties (TableTag.properties).
* @return loaded properties
* @throws TablePropertiesLoadException if default properties file can't be found
*/
private static Properties loadBuiltInProperties() throws TablePropertiesLoadException
{
Properties defaultProperties = new Properties();
try
{
InputStream is = TableProperties.class.getResourceAsStream(DEFAULT_FILENAME);
if (is == null)
{
throw new TablePropertiesLoadException(TableProperties.class, DEFAULT_FILENAME, null);
}
defaultProperties.load(is);
}
catch (IOException e)
{
throw new TablePropertiesLoadException(TableProperties.class, DEFAULT_FILENAME, e);
}
return defaultProperties;
}
/**
* Loads user properties (displaytag.properties) according to the given locale. User properties are not guarantee to
* exist, so the method can return <code>null</code> (no exception will be thrown).
* @param locale requested Locale
* @return loaded properties
*/
private static ResourceBundle loadUserProperties(Locale locale)
{
ResourceBundle bundle = null;
try
{
bundle = ResourceBundle.getBundle(LOCAL_PROPERTIES, locale);
}
catch (MissingResourceException e)
{
// if no resource bundle is found, try using the context classloader
try
{
bundle = ResourceBundle.getBundle(LOCAL_PROPERTIES, locale, Thread
.currentThread()
.getContextClassLoader());
}
catch (MissingResourceException mre)
{
if (log.isDebugEnabled())
{
log.debug(Messages.getString("TableProperties.propertiesnotfound", //$NON-NLS-1$
new Object[]{mre.getMessage()}));
}
}
}
return bundle;
}
/**
* Returns the configured Locale Resolver. This method is called before the loading of localized properties.
* @return LocaleResolver instance.
* @throws TablePropertiesLoadException if the default <code>TableTag.properties</code> file is not found.
*/
public static LocaleResolver getLocaleResolverInstance() throws TablePropertiesLoadException
{
if (localeResolver == null)
{
// special handling, table properties is not yet instantiated
String className = null;
ResourceBundle defaultUserProperties = loadUserProperties(Locale.getDefault());
// if available, user properties have higher precedence
if (defaultUserProperties != null)
{
try
{
className = defaultUserProperties.getString(PROPERTY_CLASS_LOCALERESOLVER);
}
catch (MissingResourceException e)
{
// no problem
}
}
// still null? load defaults
if (className == null)
{
Properties defaults = loadBuiltInProperties();
className = defaults.getProperty(PROPERTY_CLASS_LOCALERESOLVER);
}
if (className != null)
{
try
{
Class classProperty = ReflectHelper.classForName(className);
localeResolver = (LocaleResolver) classProperty.newInstance();
log.info(Messages.getString("TableProperties.classinitializedto", //$NON-NLS-1$
new Object[]{ClassUtils.getShortClassName(LocaleResolver.class), className}));
}
catch (Throwable e)
{
log.warn(Messages.getString("TableProperties.errorloading", //$NON-NLS-1$
new Object[]{
ClassUtils.getShortClassName(LocaleResolver.class),
e.getClass().getName(),
e.getMessage()}));
}
}
else
{
log.info(Messages.getString("TableProperties.noconfigured", //$NON-NLS-1$
new Object[]{ClassUtils.getShortClassName(LocaleResolver.class)}));
}
// still null?
if (localeResolver == null)
{
// fallback locale resolver
localeResolver = new LocaleResolver()
{
public Locale resolveLocale(HttpServletRequest request)
{
return request.getLocale();
}
};
}
}
return localeResolver;
}
/**
* Initialize a new TableProperties loading the default properties file and the user defined one. There is no
* caching used here, caching is assumed to occur in the getInstance factory method.
* @param myLocale the locale we are in
* @throws TablePropertiesLoadException for errors during loading of properties files
*/
private TableProperties(Locale myLocale)
{
this.locale = myLocale;
// default properties will not change unless this class is reloaded
Properties defaultProperties = loadBuiltInProperties();
properties = new Properties(defaultProperties);
addProperties(myLocale);
// Now copy in the user properties (properties file set by calling setUserProperties()).
// note setUserProperties() MUST BE CALLED before the first TableProperties instantation
Enumeration<Object> keys = userProperties.keys();
while (keys.hasMoreElements())
{
String key = (String) keys.nextElement();
if (key != null)
{
properties.setProperty(key, (String) userProperties.get(key));
}
}
}
/**
* Try to load the properties from the local properties file, displaytag.properties, and merge them into the
* existing properties.
* @param userLocale the locale from which the properties are to be loaded
*/
private void addProperties(Locale userLocale)
{
ResourceBundle bundle = loadUserProperties(userLocale);
if (bundle != null)
{
Enumeration<String> keys = bundle.getKeys();
while (keys.hasMoreElements())
{
String key = (String) keys.nextElement();
properties.setProperty(key, bundle.getString(key));
}
}
}
/**
* Clones the properties as well.
* @return a new clone of oneself
*/
protected Object clone()
{
TableProperties twin;
try
{
twin = (TableProperties) super.clone();
}
catch (CloneNotSupportedException e)
{
// should never happen
throw new UnhandledException(e);
}
twin.properties = (Properties) this.properties.clone();
return twin;
}
/**
* Returns a new TableProperties instance for the given locale.
* @param request HttpServletRequest needed to extract the locale to use. If null the default locale will be used.
* @return TableProperties instance
*/
public static TableProperties getInstance(HttpServletRequest request)
{
Locale locale;
if (request != null)
{
locale = getLocaleResolverInstance().resolveLocale(request);
}
else
{
// for some configuration parameters locale doesn't matter
locale = Locale.getDefault();
}
TableProperties props = (TableProperties) prototypes.get(locale);
if (props == null)
{
TableProperties lprops = new TableProperties(locale);
prototypes.put(locale, lprops);
props = lprops;
}
return (TableProperties) props.clone();
}
/**
* Unload all cached properties. This will not clear properties set by by setUserProperties; you must clear those
* manually.
*/
public static void clearProperties()
{
prototypes.clear();
}
/**
* Local, non-default properties; these settings override the defaults from displaytag.properties and
* TableTag.properties. Please note that the values are copied in, so that multiple calls with non-overlapping
* properties will be merged, not overwritten. Note: setUserProperties() MUST BE CALLED before the first
* TableProperties instantation.
* @param overrideProperties - The local, non-default properties
*/
public static void setUserProperties(Properties overrideProperties)
{
// copy keys here, so that this can be invoked more than once from different sources.
// if default properties are not yet loaded they will be copied in constructor
Enumeration<Object> keys = overrideProperties.keys();
while (keys.hasMoreElements())
{
String key = (String) keys.nextElement();
if (key != null)
{
userProperties.setProperty(key, (String) overrideProperties.get(key));
}
}
}
/**
* The locale for which these properties are intended.
* @return the locale
*/
public Locale getLocale()
{
return locale;
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_INVALIDPAGE</code> property.
* @return String
*/
public String getPagingInvalidPage()
{
return getProperty(PROPERTY_STRING_PAGING_INVALIDPAGE);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_ITEM_NAME</code> property.
* @return String
*/
public String getPagingItemName()
{
return getProperty(PROPERTY_STRING_PAGING_ITEM_NAME);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_ITEMS_NAME</code> property.
* @return String
*/
public String getPagingItemsName()
{
return getProperty(PROPERTY_STRING_PAGING_ITEMS_NAME);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_NOITEMS</code> property.
* @return String
*/
public String getPagingFoundNoItems()
{
return getProperty(PROPERTY_STRING_PAGING_NOITEMS);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_FOUND_ONEITEM</code> property.
* @return String
*/
public String getPagingFoundOneItem()
{
return getProperty(PROPERTY_STRING_PAGING_FOUND_ONEITEM);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_FOUND_ALLITEMS</code> property.
* @return String
*/
public String getPagingFoundAllItems()
{
return getProperty(PROPERTY_STRING_PAGING_FOUND_ALLITEMS);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_FOUND_SOMEITEMS</code> property.
* @return String
*/
public String getPagingFoundSomeItems()
{
return getProperty(PROPERTY_STRING_PAGING_FOUND_SOMEITEMS);
}
/**
* Getter for the <code>PROPERTY_INT_PAGING_GROUPSIZE</code> property.
* @return int
*/
public int getPagingGroupSize()
{
// default size is 8
return getIntProperty(PROPERTY_INT_PAGING_GROUPSIZE, 8);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_BANNER_ONEPAGE</code> property.
* @return String
*/
public String getPagingBannerOnePage()
{
return getProperty(PROPERTY_STRING_PAGING_BANNER_ONEPAGE);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_BANNER_FIRST</code> property.
* @return String
*/
public String getPagingBannerFirst()
{
return getProperty(PROPERTY_STRING_PAGING_BANNER_FIRST);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_BANNER_LAST</code> property.
* @return String
*/
public String getPagingBannerLast()
{
return getProperty(PROPERTY_STRING_PAGING_BANNER_LAST);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_BANNER_FULL</code> property.
* @return String
*/
public String getPagingBannerFull()
{
return getProperty(PROPERTY_STRING_PAGING_BANNER_FULL);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_PAGE_LINK</code> property.
* @return String
*/
public String getPagingPageLink()
{
return getProperty(PROPERTY_STRING_PAGING_PAGE_LINK);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_PAGE_SELECTED</code> property.
* @return String
*/
public String getPagingPageSelected()
{
return getProperty(PROPERTY_STRING_PAGING_PAGE_SELECTED);
}
/**
* Getter for the <code>PROPERTY_STRING_PAGING_PAGE_SPARATOR</code> property.
* @return String
*/
public String getPagingPageSeparator()
{
return getProperty(PROPERTY_STRING_PAGING_PAGE_SPARATOR);
}
/**
* Is the given export option enabled?
* @param exportType instance of MediaTypeEnum
* @return boolean true if export is enabled
*/
public boolean getAddExport(MediaTypeEnum exportType)
{
return getBooleanProperty(PROPERTY_EXPORT_PREFIX + SEP + exportType.getName());
}
/**
* Should headers be included in given export type?
* @param exportType instance of MediaTypeEnum
* @return boolean true if export should include headers
*/
public boolean getExportHeader(MediaTypeEnum exportType)
{
return getBooleanProperty(PROPERTY_EXPORT_PREFIX
+ SEP
+ exportType.getName()
+ SEP
+ EXPORTPROPERTY_BOOLEAN_EXPORTHEADER);
}
/**
* Returns the label for the given export option.
* @param exportType instance of MediaTypeEnum
* @return String label
*/
public String getExportLabel(MediaTypeEnum exportType)
{
return getProperty(PROPERTY_EXPORT_PREFIX + SEP + exportType.getName() + SEP + EXPORTPROPERTY_STRING_LABEL);
}
/**
* Returns the file name for the given media. Can be null
* @param exportType instance of MediaTypeEnum
* @return String filename
*/
public String getExportFileName(MediaTypeEnum exportType)
{
return getProperty(PROPERTY_EXPORT_PREFIX + SEP + exportType.getName() + SEP + EXPORTPROPERTY_STRING_FILENAME);
}
/**
* Returns the file encoding type for the given media. Can be null
* @param exportType instance of MediaTypeEnum
* @return String file encoding type
*/
public String getExportFileEncoding(MediaTypeEnum exportType)
{
return getProperty(PROPERTY_EXPORT_PREFIX + SEP + exportType.getName() + SEP + EXPORTPROPERTY_STRING_FILE_ENCODING);
}
/**
* Returns the font file path for the given media. Can be null
* @param exportType instance of MediaTypeEnum
* @return String font file path
*/
public String getExportFontPath(MediaTypeEnum exportType)
{
return getProperty(PROPERTY_EXPORT_PREFIX + SEP + exportType.getName() + SEP + EXPORTPROPERTY_STRING_FONT_PATH);
}
/**
* Getter for the <code>PROPERTY_BOOLEAN_EXPORTDECORATED</code> property.
* @return boolean <code>true</code> if decorators should be used in exporting
*/
public boolean getExportDecorated()
{
return getBooleanProperty(PROPERTY_BOOLEAN_EXPORTDECORATED);
}
/**
* Getter for the <code>PROPERTY_STRING_EXPORTBANNER</code> property.
* @return String
*/
public String getExportBanner()
{
return getProperty(PROPERTY_STRING_EXPORTBANNER);
}
/**
* Getter for the <code>PROPERTY_STRING_EXPORTBANNER_SEPARATOR</code> property.
* @return String
*/
public String getExportBannerSeparator()
{
return getProperty(PROPERTY_STRING_EXPORTBANNER_SEPARATOR);
}
/**
* Getter for the <code>PROPERTY_BOOLEAN_SHOWHEADER</code> property.
* @return boolean
*/
public boolean getShowHeader()
{
return getBooleanProperty(PROPERTY_BOOLEAN_SHOWHEADER);
}
/**
* Getter for the <code>PROPERTY_STRING_EMPTYLIST_MESSAGE</code> property.
* @return String
*/
public String getEmptyListMessage()
{
return getProperty(PROPERTY_STRING_EMPTYLIST_MESSAGE);
}
/**
* Getter for the <code>PROPERTY_STRING_EMPTYLISTROW_MESSAGE</code> property.
* @return String
*/
public String getEmptyListRowMessage()
{
return getProperty(PROPERTY_STRING_EMPTYLISTROW_MESSAGE);
}
/**
* Getter for the <code>PROPERTY_BOOLEAN_EMPTYLIST_SHOWTABLE</code> property.
* @return boolean <code>true</code> if table should be displayed also if no items are found
*/
public boolean getEmptyListShowTable()
{
return getBooleanProperty(PROPERTY_BOOLEAN_EMPTYLIST_SHOWTABLE);
}
/**
* Getter for the <code>PROPERTY_STRING_EXPORTAMOUNT</code> property.
* @return boolean <code>true</code> if <code>export.amount</code> is <code>list</code>
*/
public boolean getExportFullList()
{
return "list".equals(getProperty(PROPERTY_STRING_EXPORTAMOUNT)); //$NON-NLS-1$
}
/**
* Getter for the <code>PROPERTY_STRING_SORTAMOUNT</code> property.
* @return boolean <code>true</code> if <code>sort.amount</code> is <code>list</code>
*/
public boolean getSortFullList()
{
return "list".equals(getProperty(PROPERTY_STRING_SORTAMOUNT)); //$NON-NLS-1$
}
/**
* Should paging banner be added before the table?
* @return boolean
*/
public boolean getAddPagingBannerTop()
{
String placement = getProperty(PROPERTY_STRING_BANNER_PLACEMENT);
return "top".equals(placement) || "both".equals(placement); //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* Should paging banner be added after the table?
* @return boolean
*/
public boolean getAddPagingBannerBottom()
{
String placement = getProperty(PROPERTY_STRING_BANNER_PLACEMENT);
return "bottom".equals(placement) || "both".equals(placement); //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* Returns the appropriate css class for a table row.
* @param rowNumber row number
* @return the value of <code>PROPERTY_CSS_TR_EVEN</code> if rowNumber is even or <code>PROPERTY_CSS_TR_ODD</code>
* if rowNumber is odd.
*/
public String getCssRow(int rowNumber)
{
return getProperty((rowNumber % 2 == 0) ? PROPERTY_CSS_TR_ODD : PROPERTY_CSS_TR_EVEN);
}
/**
* Returns the appropriate css class for a sorted column header.
* @param ascending <code>true</code> if column is sorded in ascending order.
* @return the value of <code>PROPERTY_CSS_TH_SORTED_ASCENDING</code> if column is sorded in ascending order or
* <code>PROPERTY_CSS_TH_SORTED_DESCENDING</code> if column is sorded in descending order.
*/
public String getCssOrder(boolean ascending)
{
return getProperty(ascending ? PROPERTY_CSS_TH_SORTED_ASCENDING : PROPERTY_CSS_TH_SORTED_DESCENDING);
}
/**
* Returns the configured css class for a sorted column header.
* @return the value of <code>PROPERTY_CSS_TH_SORTED</code>
*/
public String getCssSorted()
{
return getProperty(PROPERTY_CSS_TH_SORTED);
}
/**
* Returns the configured css class for the main table tag.
* @return the value of <code>PROPERTY_CSS_TABLE</code>
*/
public String getCssTable()
{
return getProperty(PROPERTY_CSS_TABLE);
}
/**
* Returns the configured css class for a sortable column header.
* @return the value of <code>PROPERTY_CSS_TH_SORTABLE</code>
*/
public String getCssSortable()
{
return getProperty(PROPERTY_CSS_TH_SORTABLE);
}
/**
* Returns the configured list of media.
* @return the value of <code>PROPERTY_EXPORTTYPES</code>
*/
public String[] getExportTypes()
{
String list = getProperty(PROPERTY_EXPORTTYPES);
if (list == null)
{
return new String[0];
}
return StringUtils.split(list);
}
/**
* Getter for the <code>PROPERTY_EXPORT_DIRECTION_RTL</code> property.
* @return boolean
*/
public boolean getExportDirectionRtl()
{
return getBooleanProperty(PROPERTY_EXPORT_DIRECTION_RTL);
}
/**
* Returns the class responsible for the given export.
* @param exportName export name
* @return String classname
*/
public String getExportClass(String exportName)
{
return getProperty(PROPERTY_EXPORT_PREFIX + SEP + exportName + SEP + EXPORTPROPERTY_STRING_CLASS);
}
/**
* Returns an instance of configured requestHelperFactory.
* @return RequestHelperFactory instance.
* @throws FactoryInstantiationException if unable to load or instantiate the configurated class.
*/
public RequestHelperFactory getRequestHelperFactoryInstance() throws FactoryInstantiationException
{
Object loadedObject = getClassPropertyInstance(PROPERTY_CLASS_REQUESTHELPERFACTORY);
// should not be null, but avoid errors just in case... see DISPL-148
if (loadedObject == null)
{
return new DefaultRequestHelperFactory();
}
try
{
return (RequestHelperFactory) loadedObject;
}
catch (ClassCastException e)
{
throw new FactoryInstantiationException(getClass(), PROPERTY_CLASS_REQUESTHELPERFACTORY, loadedObject
.getClass()
.getName(), e);
}
}
/**
* Returns an instance of configured DecoratorFactory.
* @return DecoratorFactory instance.
* @throws FactoryInstantiationException if unable to load or instantiate the configurated class.
*/
public DecoratorFactory getDecoratorFactoryInstance() throws FactoryInstantiationException
{
Object loadedObject = getClassPropertyInstance(PROPERTY_CLASS_DECORATORFACTORY);
if (loadedObject == null)
{
return new DefaultDecoratorFactory();
}
try
{
return (DecoratorFactory) loadedObject;
}
catch (ClassCastException e)
{
throw new FactoryInstantiationException(getClass(), PROPERTY_CLASS_DECORATORFACTORY, loadedObject
.getClass()
.getName(), e);
}
}
public String getPaginationSortParam()
{
String result = getProperty(PROPERTY_STRING_PAGINATION_SORT_PARAM);
if (result == null)
{
result = "sort";
}
return result;
}
public String getPaginationPageNumberParam()
{
String result = getProperty(PROPERTY_STRING_PAGINATION_PAGE_NUMBER_PARAM);
if (result == null)
{
result = "page";
}
return result;
}
public String getPaginationSortDirectionParam()
{
String result = getProperty(PROPERTY_STRING_PAGINATION_SORT_DIRECTION_PARAM);
if (result == null)
{
result = "dir";
}
return result;
}
public String getPaginationSearchIdParam()
{
String result = getProperty(PROPERTY_STRING_PAGINATION_SEARCH_ID_PARAM);
if (result == null)
{
result = "searchId";
}
return result;
}
public String getPaginationAscValue()
{
String result = getProperty(PROPERTY_STRING_PAGINATION_ASC_VALUE);
if (result == null)
{
result = "asc";
}
return result;
}
public String getPaginationDescValue()
{
String result = getProperty(PROPERTY_STRING_PAGINATION_DESC_VALUE);
if (result == null)
{
result = "desc";
}
return result;
}
public String getSecuredRowLabel()
{
String result = getProperty(PROPERTY_STRING_SECURED_ROW_LABEL);
if (result == null)
{
result = "SECURED";
}
return result;
}
public boolean getPaginationSkipPageNumberInSort()
{
String s = getProperty(PROPERTY_BOOLEAN_PAGINATION_SKIP_PAGE_NUMBER_IN_SORT);
if (s == null)
{
return true;
}
else
{
return getBooleanProperty(PROPERTY_BOOLEAN_PAGINATION_SKIP_PAGE_NUMBER_IN_SORT);
}
}
// </JBN>
/**
* Returns the configured resource provider instance. If necessary instantiate the resource provider from config and
* then keep a cached instance.
* @return I18nResourceProvider instance.
* @see I18nResourceProvider
*/
public I18nResourceProvider geResourceProvider()
{
String className = getProperty(PROPERTY_CLASS_LOCALEPROVIDER);
if (resourceProvider == null)
{
if (className != null)
{
try
{
Class classProperty = ReflectHelper.classForName(className);
resourceProvider = (I18nResourceProvider) classProperty.newInstance();
log.info(Messages.getString("TableProperties.classinitializedto", //$NON-NLS-1$
new Object[]{ClassUtils.getShortClassName(I18nResourceProvider.class), className}));
}
catch (Throwable e)
{
log.warn(Messages.getString("TableProperties.errorloading", //$NON-NLS-1$
new Object[]{
ClassUtils.getShortClassName(I18nResourceProvider.class),
e.getClass().getName(),
e.getMessage()}));
}
}
else
{
log.info(Messages.getString("TableProperties.noconfigured", //$NON-NLS-1$
new Object[]{ClassUtils.getShortClassName(I18nResourceProvider.class)}));
}
// still null?
if (resourceProvider == null)
{
// fallback provider, no i18n
resourceProvider = new I18nResourceProvider()
{
// Always returns null
public String getResource(String titleKey, String property, Tag tag, PageContext context)
{
return null;
}
};
}
}
return resourceProvider;
}
/**
* Reads a String property.
* @param key property name
* @return property value or <code>null</code> if property is not found
*/
private String getProperty(String key)
{
return this.properties.getProperty(key);
}
/**
* Sets a property.
* @param key property name
* @param value property value
*/
public void setProperty(String key, String value)
{
this.properties.setProperty(key, value);
}
/**
* Reads a boolean property.
* @param key property name
* @return boolean <code>true</code> if the property value is "true", <code>false</code> for any other value.
*/
private boolean getBooleanProperty(String key)
{
return Boolean.TRUE.toString().equals(getProperty(key));
}
/**
* Returns an instance of a configured Class. Returns a configured Class instantiated
* callingClass.forName([configuration value]).
* @param key configuration key
* @return instance of configured class
* @throws FactoryInstantiationException if unable to load or instantiate the configurated class.
*/
private Object getClassPropertyInstance(String key) throws FactoryInstantiationException
{
Object instance = objectCache.get(key);
if (instance != null)
{
return instance;
}
String className = getProperty(key);
// shouldn't be null, but better check it
if (className == null)
{
return null;
}
try
{
Class classProperty = ReflectHelper.classForName(className);
instance = classProperty.newInstance();
objectCache.put(key, instance);
return instance;
}
catch (Exception e)
{
throw new FactoryInstantiationException(getClass(), key, className, e);
}
}
/**
* Reads an int property.
* @param key property name
* @param defaultValue default value returned if property is not found or not a valid int value
* @return property value
*/
private int getIntProperty(String key, int defaultValue)
{
try
{
return Integer.parseInt(getProperty(key));
}
catch (NumberFormatException e)
{
// Don't care, use default
log.warn(Messages.getString("TableProperties.invalidvalue", //$NON-NLS-1$
new Object[]{key, getProperty(key), new Integer(defaultValue)}));
}
return defaultValue;
}
/**
* Obtain the name of the decorator configured for a given media type.
* @param thatEnum A media type
* @return The name of the decorator configured for a given media type.
* @deprecated Use getMediaTypeDecoratorName instead.
*/
public String getExportDecoratorName(MediaTypeEnum thatEnum)
{
return getProperty(PROPERTY_EXPORT_PREFIX + SEP + thatEnum + SEP + PROPERTY_DECORATOR_SUFFIX);
}
/**
* Obtain the name of the decorator configured for a given media type.
* @param thatEnum A media type
* @return The name of the decorator configured for a given media type.
*/
public String getMediaTypeDecoratorName(MediaTypeEnum thatEnum)
{
return getProperty(PROPERTY_DECORATOR_SUFFIX + SEP + PROPERTY_DECORATOR_MEDIA + SEP + thatEnum);
}
public Comparator getDefaultComparator()
{
String className = getProperty(PROPERTY_DEFAULT_COMPARATOR);
if (className != null)
{
try
{
Class classProperty = ReflectHelper.classForName(className);
return (Comparator) classProperty.newInstance();
}
catch (Throwable e)
{
log.warn(Messages.getString("TableProperties.errorloading", //$NON-NLS-1$
new Object[]{
ClassUtils.getShortClassName(Comparator.class),
e.getClass().getName(),
e.getMessage()}));
}
}
return new DefaultComparator(Collator.getInstance(getLocale()));
}
} |
'use strict';
var binary = require('binary');
var jspack = require('jspack').jspack;
exports.flt64 = function(buf, o) {
return jspack.Unpack("<d", buf, o | 0);
};
exports.int8 = function(buf, o) {
return binary.parse(buf.slice(o | 0)).word8lu('n').vars.n;
};
exports.int16 = function(buf, o) {
return binary.parse(buf.slice(o | 0)).word16lu('n').vars.n;
};
exports.int24 = function(buf, o) {
o = o | 0;
return buf[o] | buf[o + 1] << 8 | buf[o + 2] << 16;
};
exports.int32 = function(buf, o) {
return binary.parse(buf.slice(o | 0)).word32lu('n').vars.n;
};
exports.mkint32 = function(i) {
return new Buffer([i & 255, (i >> 8) & 255, (i >> 16) & 255, (i >> 24) & 255]);
};
exports.mkint24 = function(i) {
return new Buffer([i & 255, (i >> 8) & 255, (i >> 16) & 255]);
}; |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#include "pal_x509.h"
#include <assert.h>
#include <openssl/pem.h>
#include <openssl/x509v3.h>
static_assert(PAL_X509_V_OK == X509_V_OK, "");
static_assert(PAL_X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT == X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT, "");
static_assert(PAL_X509_V_ERR_UNABLE_TO_GET_CRL == X509_V_ERR_UNABLE_TO_GET_CRL, "");
static_assert(PAL_X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE == X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE, "");
static_assert(PAL_X509_V_ERR_CERT_SIGNATURE_FAILURE == X509_V_ERR_CERT_SIGNATURE_FAILURE, "");
static_assert(PAL_X509_V_ERR_CRL_SIGNATURE_FAILURE == X509_V_ERR_CRL_SIGNATURE_FAILURE, "");
static_assert(PAL_X509_V_ERR_CERT_NOT_YET_VALID == X509_V_ERR_CERT_NOT_YET_VALID, "");
static_assert(PAL_X509_V_ERR_CERT_HAS_EXPIRED == X509_V_ERR_CERT_HAS_EXPIRED, "");
static_assert(PAL_X509_V_ERR_CRL_NOT_YET_VALID == X509_V_ERR_CRL_NOT_YET_VALID, "");
static_assert(PAL_X509_V_ERR_CRL_HAS_EXPIRED == X509_V_ERR_CRL_HAS_EXPIRED, "");
static_assert(PAL_X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD == X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD, "");
static_assert(PAL_X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD == X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD, "");
static_assert(PAL_X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD == X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD, "");
static_assert(PAL_X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD == X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD, "");
static_assert(PAL_X509_V_ERR_OUT_OF_MEM == X509_V_ERR_OUT_OF_MEM, "");
static_assert(PAL_X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT == X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT, "");
static_assert(PAL_X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN == X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN, "");
static_assert(PAL_X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY == X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY, "");
static_assert(PAL_X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE == X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE, "");
static_assert(PAL_X509_V_ERR_CERT_CHAIN_TOO_LONG == X509_V_ERR_CERT_CHAIN_TOO_LONG, "");
static_assert(PAL_X509_V_ERR_CERT_REVOKED == X509_V_ERR_CERT_REVOKED, "");
static_assert(PAL_X509_V_ERR_INVALID_CA == X509_V_ERR_INVALID_CA, "");
static_assert(PAL_X509_V_ERR_PATH_LENGTH_EXCEEDED == X509_V_ERR_PATH_LENGTH_EXCEEDED, "");
static_assert(PAL_X509_V_ERR_INVALID_PURPOSE == X509_V_ERR_INVALID_PURPOSE, "");
static_assert(PAL_X509_V_ERR_CERT_UNTRUSTED == X509_V_ERR_CERT_UNTRUSTED, "");
static_assert(PAL_X509_V_ERR_CERT_REJECTED == X509_V_ERR_CERT_REJECTED, "");
static_assert(PAL_X509_V_ERR_KEYUSAGE_NO_CERTSIGN == X509_V_ERR_KEYUSAGE_NO_CERTSIGN, "");
static_assert(PAL_X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER == X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER, "");
static_assert(PAL_X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION == X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION, "");
static_assert(PAL_X509_V_ERR_KEYUSAGE_NO_CRL_SIGN == X509_V_ERR_KEYUSAGE_NO_CRL_SIGN, "");
static_assert(PAL_X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION == X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION, "");
static_assert(PAL_X509_V_ERR_INVALID_NON_CA == X509_V_ERR_INVALID_NON_CA, "");
static_assert(PAL_X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE == X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE, "");
static_assert(PAL_X509_V_ERR_INVALID_EXTENSION == X509_V_ERR_INVALID_EXTENSION, "");
static_assert(PAL_X509_V_ERR_INVALID_POLICY_EXTENSION == X509_V_ERR_INVALID_POLICY_EXTENSION, "");
static_assert(PAL_X509_V_ERR_NO_EXPLICIT_POLICY == X509_V_ERR_NO_EXPLICIT_POLICY, "");
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" EVP_PKEY* GetX509EvpPublicKey(X509* x509)
{
return CryptoNative_GetX509EvpPublicKey(x509);
}
extern "C" EVP_PKEY* CryptoNative_GetX509EvpPublicKey(X509* x509)
{
if (!x509)
{
return nullptr;
}
// X509_get_X509_PUBKEY returns an interior pointer, so should not be freed
return X509_PUBKEY_get(X509_get_X509_PUBKEY(x509));
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_CRL* DecodeX509Crl(const uint8_t* buf, int32_t len)
{
return CryptoNative_DecodeX509Crl(buf, len);
}
extern "C" X509_CRL* CryptoNative_DecodeX509Crl(const uint8_t* buf, int32_t len)
{
if (!buf || !len)
{
return nullptr;
}
return d2i_X509_CRL(nullptr, &buf, len);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509* DecodeX509(const uint8_t* buf, int32_t len)
{
return CryptoNative_DecodeX509(buf, len);
}
extern "C" X509* CryptoNative_DecodeX509(const uint8_t* buf, int32_t len)
{
if (!buf || !len)
{
return nullptr;
}
return d2i_X509(nullptr, &buf, len);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t GetX509DerSize(X509* x)
{
return CryptoNative_GetX509DerSize(x);
}
extern "C" int32_t CryptoNative_GetX509DerSize(X509* x)
{
return i2d_X509(x, nullptr);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t EncodeX509(X509* x, uint8_t* buf)
{
return CryptoNative_EncodeX509(x, buf);
}
extern "C" int32_t CryptoNative_EncodeX509(X509* x, uint8_t* buf)
{
return i2d_X509(x, &buf);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" void X509Destroy(X509* a)
{
return CryptoNative_X509Destroy(a);
}
extern "C" void CryptoNative_X509Destroy(X509* a)
{
if (a != nullptr)
{
X509_free(a);
}
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509* X509Duplicate(X509* x509)
{
return CryptoNative_X509Duplicate(x509);
}
extern "C" X509* CryptoNative_X509Duplicate(X509* x509)
{
return X509_dup(x509);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509* PemReadX509FromBio(BIO* bio)
{
return CryptoNative_PemReadX509FromBio(bio);
}
extern "C" X509* CryptoNative_PemReadX509FromBio(BIO* bio)
{
return PEM_read_bio_X509_AUX(bio, nullptr, nullptr, nullptr);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" ASN1_INTEGER* X509GetSerialNumber(X509* x509)
{
return CryptoNative_X509GetSerialNumber(x509);
}
extern "C" ASN1_INTEGER* CryptoNative_X509GetSerialNumber(X509* x509)
{
return X509_get_serialNumber(x509);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_NAME* X509GetIssuerName(X509* x509)
{
return CryptoNative_X509GetIssuerName(x509);
}
extern "C" X509_NAME* CryptoNative_X509GetIssuerName(X509* x509)
{
return X509_get_issuer_name(x509);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_NAME* X509GetSubjectName(X509* x509)
{
return CryptoNative_X509GetSubjectName(x509);
}
extern "C" X509_NAME* CryptoNative_X509GetSubjectName(X509* x509)
{
return X509_get_subject_name(x509);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509CheckPurpose(X509* x, int32_t id, int32_t ca)
{
return CryptoNative_X509CheckPurpose(x, id, ca);
}
extern "C" int32_t CryptoNative_X509CheckPurpose(X509* x, int32_t id, int32_t ca)
{
return X509_check_purpose(x, id, ca);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509CheckIssued(X509* issuer, X509* subject)
{
return CryptoNative_X509CheckIssued(issuer, subject);
}
extern "C" int32_t CryptoNative_X509CheckIssued(X509* issuer, X509* subject)
{
return X509_check_issued(issuer, subject);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" uint64_t X509IssuerNameHash(X509* x)
{
return CryptoNative_X509IssuerNameHash(x);
}
extern "C" uint64_t CryptoNative_X509IssuerNameHash(X509* x)
{
return X509_issuer_name_hash(x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509GetExtCount(X509* x)
{
return CryptoNative_X509GetExtCount(x);
}
extern "C" int32_t CryptoNative_X509GetExtCount(X509* x)
{
return X509_get_ext_count(x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_EXTENSION* X509GetExt(X509* x, int32_t loc)
{
return CryptoNative_X509GetExt(x, loc);
}
extern "C" X509_EXTENSION* CryptoNative_X509GetExt(X509* x, int32_t loc)
{
return X509_get_ext(x, loc);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" ASN1_OBJECT* X509ExtensionGetOid(X509_EXTENSION* x)
{
return CryptoNative_X509ExtensionGetOid(x);
}
extern "C" ASN1_OBJECT* CryptoNative_X509ExtensionGetOid(X509_EXTENSION* x)
{
return X509_EXTENSION_get_object(x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" ASN1_OCTET_STRING* X509ExtensionGetData(X509_EXTENSION* x)
{
return CryptoNative_X509ExtensionGetData(x);
}
extern "C" ASN1_OCTET_STRING* CryptoNative_X509ExtensionGetData(X509_EXTENSION* x)
{
return X509_EXTENSION_get_data(x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509ExtensionGetCritical(X509_EXTENSION* x)
{
return CryptoNative_X509ExtensionGetCritical(x);
}
extern "C" int32_t CryptoNative_X509ExtensionGetCritical(X509_EXTENSION* x)
{
return X509_EXTENSION_get_critical(x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_STORE* X509StoreCreate()
{
return CryptoNative_X509StoreCreate();
}
extern "C" X509_STORE* CryptoNative_X509StoreCreate()
{
return X509_STORE_new();
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" void X509StoreDestory(X509_STORE* v)
{
return CryptoNative_X509StoreDestory(v);
}
extern "C" void CryptoNative_X509StoreDestory(X509_STORE* v)
{
if (v != nullptr)
{
X509_STORE_free(v);
}
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509StoreAddCert(X509_STORE* ctx, X509* x)
{
return CryptoNative_X509StoreAddCert(ctx, x);
}
extern "C" int32_t CryptoNative_X509StoreAddCert(X509_STORE* ctx, X509* x)
{
return X509_STORE_add_cert(ctx, x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509StoreAddCrl(X509_STORE* ctx, X509_CRL* x)
{
return CryptoNative_X509StoreAddCrl(ctx, x);
}
extern "C" int32_t CryptoNative_X509StoreAddCrl(X509_STORE* ctx, X509_CRL* x)
{
return X509_STORE_add_crl(ctx, x);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509StoreSetRevocationFlag(X509_STORE* ctx, X509RevocationFlag revocationFlag)
{
return CryptoNative_X509StoreSetRevocationFlag(ctx, revocationFlag);
}
extern "C" int32_t CryptoNative_X509StoreSetRevocationFlag(X509_STORE* ctx, X509RevocationFlag revocationFlag)
{
unsigned long verifyFlags = X509_V_FLAG_CRL_CHECK;
if (revocationFlag != X509RevocationFlag::EndCertificateOnly)
{
verifyFlags |= X509_V_FLAG_CRL_CHECK_ALL;
}
return X509_STORE_set_flags(ctx, verifyFlags);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_STORE_CTX* X509StoreCtxCreate()
{
return CryptoNative_X509StoreCtxCreate();
}
extern "C" X509_STORE_CTX* CryptoNative_X509StoreCtxCreate()
{
return X509_STORE_CTX_new();
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" void X509StoreCtxDestroy(X509_STORE_CTX* v)
{
return CryptoNative_X509StoreCtxDestroy(v);
}
extern "C" void CryptoNative_X509StoreCtxDestroy(X509_STORE_CTX* v)
{
if (v != nullptr)
{
X509_STORE_CTX_free(v);
}
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509StoreCtxInit(X509_STORE_CTX* ctx, X509_STORE* store, X509* x509)
{
return CryptoNative_X509StoreCtxInit(ctx, store, x509);
}
extern "C" int32_t CryptoNative_X509StoreCtxInit(X509_STORE_CTX* ctx, X509_STORE* store, X509* x509)
{
return X509_STORE_CTX_init(ctx, store, x509, nullptr);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509VerifyCert(X509_STORE_CTX* ctx)
{
return CryptoNative_X509VerifyCert(ctx);
}
extern "C" int32_t CryptoNative_X509VerifyCert(X509_STORE_CTX* ctx)
{
return X509_verify_cert(ctx);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509Stack* X509StoreCtxGetChain(X509_STORE_CTX* ctx)
{
return CryptoNative_X509StoreCtxGetChain(ctx);
}
extern "C" X509Stack* CryptoNative_X509StoreCtxGetChain(X509_STORE_CTX* ctx)
{
return X509_STORE_CTX_get1_chain(ctx);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509Stack* X509StoreCtxGetSharedUntrusted(X509_STORE_CTX* ctx)
{
return CryptoNative_X509StoreCtxGetSharedUntrusted(ctx);
}
extern "C" X509Stack* CryptoNative_X509StoreCtxGetSharedUntrusted(X509_STORE_CTX* ctx)
{
return ctx ? ctx->untrusted : nullptr;
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509* X509StoreCtxGetTargetCert(X509_STORE_CTX* ctx)
{
return CryptoNative_X509StoreCtxGetTargetCert(ctx);
}
extern "C" X509* CryptoNative_X509StoreCtxGetTargetCert(X509_STORE_CTX* ctx)
{
return ctx ? ctx->cert : nullptr;
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509VerifyStatusCode X509StoreCtxGetError(X509_STORE_CTX* ctx)
{
return CryptoNative_X509StoreCtxGetError(ctx);
}
extern "C" X509VerifyStatusCode CryptoNative_X509StoreCtxGetError(X509_STORE_CTX* ctx)
{
return static_cast<X509VerifyStatusCode>(X509_STORE_CTX_get_error(ctx));
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" void X509StoreCtxSetVerifyCallback(X509_STORE_CTX* ctx, X509StoreVerifyCallback callback)
{
return CryptoNative_X509StoreCtxSetVerifyCallback(ctx, callback);
}
extern "C" void CryptoNative_X509StoreCtxSetVerifyCallback(X509_STORE_CTX* ctx, X509StoreVerifyCallback callback)
{
X509_STORE_CTX_set_verify_cb(ctx, callback);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t X509StoreCtxGetErrorDepth(X509_STORE_CTX* ctx)
{
return CryptoNative_X509StoreCtxGetErrorDepth(ctx);
}
extern "C" int32_t CryptoNative_X509StoreCtxGetErrorDepth(X509_STORE_CTX* ctx)
{
return X509_STORE_CTX_get_error_depth(ctx);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" const char* X509VerifyCertErrorString(X509VerifyStatusCode n)
{
return CryptoNative_X509VerifyCertErrorString(n);
}
extern "C" const char* CryptoNative_X509VerifyCertErrorString(X509VerifyStatusCode n)
{
return X509_verify_cert_error_string(n);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" void X509CrlDestroy(X509_CRL* a)
{
return CryptoNative_X509CrlDestroy(a);
}
extern "C" void CryptoNative_X509CrlDestroy(X509_CRL* a)
{
if (a != nullptr)
{
X509_CRL_free(a);
}
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t PemWriteBioX509Crl(BIO* bio, X509_CRL* crl)
{
return CryptoNative_PemWriteBioX509Crl(bio, crl);
}
extern "C" int32_t CryptoNative_PemWriteBioX509Crl(BIO* bio, X509_CRL* crl)
{
return PEM_write_bio_X509_CRL(bio, crl);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" X509_CRL* PemReadBioX509Crl(BIO* bio)
{
return CryptoNative_PemReadBioX509Crl(bio);
}
extern "C" X509_CRL* CryptoNative_PemReadBioX509Crl(BIO* bio)
{
return PEM_read_bio_X509_CRL(bio, nullptr, nullptr, nullptr);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t GetX509SubjectPublicKeyInfoDerSize(X509* x509)
{
return CryptoNative_GetX509SubjectPublicKeyInfoDerSize(x509);
}
extern "C" int32_t CryptoNative_GetX509SubjectPublicKeyInfoDerSize(X509* x509)
{
if (!x509)
{
return 0;
}
// X509_get_X509_PUBKEY returns an interior pointer, so should not be freed
return i2d_X509_PUBKEY(X509_get_X509_PUBKEY(x509), nullptr);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t EncodeX509SubjectPublicKeyInfo(X509* x509, uint8_t* buf)
{
return CryptoNative_EncodeX509SubjectPublicKeyInfo(x509, buf);
}
extern "C" int32_t CryptoNative_EncodeX509SubjectPublicKeyInfo(X509* x509, uint8_t* buf)
{
if (!x509)
{
return 0;
}
// X509_get_X509_PUBKEY returns an interior pointer, so should not be freed
return i2d_X509_PUBKEY(X509_get_X509_PUBKEY(x509), &buf);
}
|
# GitHub Labels
#
# A simple shell script that loops through an array of repositories in the
# ProctorU organization, removes the default labels (if applicable), then
# adds our Organization-wide labels.
#
# Usage:
#
# Add your repository to the variable, REPOS, below.
# Example:
# => REPOS("repository-one" "repository-two")
#
# Run the script.
# Example:
# => ./main.sh
#
# Voila!
echo ''
echo 'Adding ProctorU labels to your repository...'
echo ''
echo -n 'GitHub Personal Access Token: '
read -s TOKEN
REPOS=()
for i in "${REPOS[@]}"
do
ENDPOINT="https://api.github.com/repos/ProctorU/$i/labels"
# Delete default labels
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/bug
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/duplicate
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/enhancement
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/help%20wanted
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/good%20first%20issue
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/invalid
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/question
curl -u $TOKEN:x-oauth-basic --request DELETE $ENDPOINT/wontfix
# Needs
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Needs: Tests","color":"F3BF51"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Needs: Follow Up","color":"F3BF51"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Needs: Revision","color":"F3BF51"}' $ENDPOINT
# Deployed
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Deployed: Staging","color":"868E96"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Deployed: Demo","color":"868E96"}' $ENDPOINT
# Status
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Status: In Review","color":"FAD2B1"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Status: Reviewed","color":"B7DAD5"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Status: Review Complete","color":"008761"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Status: Revised","color":"BFDBF7"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Status: Ready To Ship","color":"6610f2"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Status: Blocked","color":"E21C2C"}' $ENDPOINT
# Type
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Type: Design","color":"0072b1"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Type: Technical Debt","color":"0072b1"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Type: Feature","color":"0072b1"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Type: Bug","color":"0072b1"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Type: Database","color":"0072b1"}' $ENDPOINT
# Dependecy
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Dependency: Parent","color":"DBC4FF"}' $ENDPOINT
curl -u $TOKEN:x-oauth-basic --include --request POST --data '{"name":"Dependency: Child","color":"DBC4FF"}' $ENDPOINT
done
|
/*
* Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#ifndef __CSV_READER_HPP_
#define __CSV_READER_HPP_
#include "csvReader.h"
// toInt()
inline bool csvData::toInt( int* value ) const
{
char* e;
errno = 0;
const int x = strtol(string.c_str(), &e, 0);
if( *e != '\0' || errno != 0 )
return false;
if( value != NULL )
*value = x;
return true;
}
// toFloat()
inline bool csvData::toFloat( float* value ) const
{
char* e;
errno = 0;
const float x = strtof(string.c_str(), &e);
if( *e != '\0' || errno != 0 )
return false;
if( value != NULL )
*value = x;
return true;
}
// toDouble()
inline bool csvData::toDouble( double* value ) const
{
char* e;
errno = 0;
const float x = strtof(string.c_str(), &e);
if( *e != '\0' || errno != 0 )
return false;
if( value != NULL )
*value = x;
return true;
}
// toInt()
inline int csvData::toInt( bool* valid ) const
{
int x=0;
const bool v=toInt(&x);
if( valid != NULL )
*valid=v;
return x;
}
// toFloat()
inline float csvData::toFloat( bool* valid ) const
{
float x=0.0f;
const bool v=toFloat(&x);
if( valid != NULL )
*valid=v;
return x;
}
// toDouble()
inline double csvData::toDouble( bool* valid ) const
{
double x=0.0f;
const bool v=toDouble(&x);
if( valid != NULL )
*valid=v;
return x;
}
// operator >>
inline std::istream& operator >> (std::istream& in, csvData& obj)
{
in >> obj.string;
return in;
}
// operator <<
inline std::ostream& operator << (std::ostream& out, const csvData& obj)
{
out << obj.string;
return out;
}
// Parse()
inline std::vector<csvData> csvData::Parse( const char* str, const char* delimiters )
{
std::vector<csvData> tokens;
Parse(tokens, str, delimiters);
return tokens;
}
// Parse
inline bool csvData::Parse( std::vector<csvData>& tokens, const char* str, const char* delimiters )
{
if( !str || !delimiters )
return false;
tokens.clear();
const size_t str_length = strlen(str);
char* str_tokens = (char*)malloc(str_length + 1);
if( !str_tokens )
return false;
strcpy(str_tokens, str);
if( str_tokens[str_length] == '\n' )
str_tokens[str_length] = '\0';
if( str_tokens[str_length-1] == '\n' )
str_tokens[str_length-1] = '\0';
char* token = strtok(str_tokens, delimiters);
while( token != NULL )
{
tokens.push_back(token);
token = strtok(NULL, delimiters);
}
free(str_tokens);
return tokens.size() > 0;
}
//-------------------------------------------------------------------------------------
// constructor
csvReader::csvReader( const char* filename, const char* delimiters ) : mFile(NULL)
{
if( !filename || !delimiters )
return;
mFile = fopen(filename, "r");
if( !mFile )
{
printf("csvReader -- failed to open file %s\n", filename);
perror("csvReader -- error");
return;
}
mFilename = filename;
mDelimiters = delimiters;
}
// destructor
csvReader::~csvReader()
{
Close();
}
// open
inline csvReader* csvReader::Open( const char* filename, const char* delimiters )
{
if( !filename || !delimiters )
return NULL;
csvReader* csv = new csvReader(filename, delimiters);
if( !csv->IsOpen() )
{
delete csv;
return NULL;
}
return csv;
}
// close
inline void csvReader::Close()
{
if( IsClosed() )
return;
fclose(mFile);
mFile = NULL;
}
// isOpen
inline bool csvReader::IsOpen() const
{
return mFile != NULL;
}
// isClosed
inline bool csvReader::IsClosed() const
{
return !IsOpen();
}
// readLine
inline std::vector<csvData> csvReader::Read()
{
return Read(mDelimiters.c_str());
}
// readLine
inline std::vector<csvData> csvReader::Read( const char* delimiters )
{
std::vector<csvData> tokens;
Read(tokens, delimiters);
}
// readLine
inline bool csvReader::Read( std::vector<csvData>& data )
{
return Read(data, mDelimiters.c_str());
}
// readLine
inline bool csvReader::Read( std::vector<csvData>& data, const char* delimiters )
{
if( IsClosed() )
return false;
// read the next line
char str[MaxLineLength];
if( fgets(str, sizeof(str), mFile) == NULL )
{
if( ferror(mFile) )
{
printf("csvReader -- error reading file %s\n", mFilename.c_str());
perror("csvReader -- error");
}
Close();
return false;
}
// check if EOF was reached
if( feof(mFile) == EOF )
Close();
// disregard comments
if( str[0] == '#' )
return Read(data, delimiters);
return csvData::Parse(data, str, delimiters);
}
// SetDelimiters
inline void csvReader::SetDelimiters( const char* delimiters )
{
mDelimiters = delimiters;
}
// GetDelimiters
inline const char* csvReader::GetDelimiters() const
{
return mDelimiters.c_str();
}
// GetFilename
inline const char* csvReader::GetFilename() const
{
return mFilename.c_str();
}
#endif
|
export interface ISendQueueMessageOptions {
readonly replyTo: string;
readonly correlationId: string;
}
|
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
from skimage.feature import hog
from skimage import color, exposure
import random
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.svm import LinearSVC
import time
from moviepy.editor import VideoFileClip
from scipy.ndimage.measurements import label
from IPython.display import HTML
def load_data(my_list):
new_list = []
for image in my_list:
img = cv2.imread(image)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
new_list.append(img)
return new_list
# Define a function to return HOG features and visualization
def get_hog_features(img, orient, pix_per_cell, cell_per_block, vis=False, feature_vec=True):
if vis == True:
features, hog_image = hog(img, orientations=orient, pixels_per_cell=(pix_per_cell, pix_per_cell),
cells_per_block=(cell_per_block, cell_per_block), transform_sqrt=False,
visualise=True, feature_vector=False)
return features, hog_image
else:
features = hog(img, orientations=orient, pixels_per_cell=(pix_per_cell, pix_per_cell),
cells_per_block=(cell_per_block, cell_per_block), transform_sqrt=False,
visualise=False, feature_vector=feature_vec)
return features
def bin_spatial(img, size=(32, 32)):
color1 = cv2.resize(img[:,:,0], size).ravel()
color2 = cv2.resize(img[:,:,1], size).ravel()
color3 = cv2.resize(img[:,:,2], size).ravel()
return np.hstack((color1, color2, color3))
def color_hist(img, nbins=32): #bins_range=(0, 256)
# Compute the histogram of the color channels separately
channel1_hist = np.histogram(img[:,:,0], bins=nbins)
channel2_hist = np.histogram(img[:,:,1], bins=nbins)
channel3_hist = np.histogram(img[:,:,2], bins=nbins)
# Concatenate the histograms into a single feature vector
hist_features = np.concatenate((channel1_hist[0], channel2_hist[0], channel3_hist[0]))
# Return the individual histograms, bin_centers and feature vector
return hist_features
# Define a function to extract features from a list of images
# Have this function call bin_spatial() and color_hist()
def extract_features(imgs,
color_space='RGB',
spatial_size=(32, 32),
hist_bins=32, orient=9,
pix_per_cell=8,
cell_per_block=2,
hog_channel=0,
spatial_feat=True,
hist_feat=True,
hog_feat=True):
# Create a list to append feature vectors to
features = []
# Iterate through the list of images
for image in imgs:
file_features = []
# Read in each one by one
#image = mpimg.imread(file)
# apply color conversion if other than 'RGB'
if color_space != 'RGB':
if color_space == 'HSV':
feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
elif color_space == 'LUV':
feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2LUV)
elif color_space == 'HLS':
feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2HLS)
elif color_space == 'YUV':
feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2YUV)
elif color_space == 'YCrCb':
feature_image = cv2.cvtColor(image, cv2.COLOR_RGB2YCrCb)
else: feature_image = np.copy(image)
if spatial_feat == True:
spatial_features = bin_spatial(feature_image, size=spatial_size)
file_features.append(spatial_features)
if hist_feat == True:
# Apply color_hist()
hist_features = color_hist(feature_image, nbins=hist_bins)
file_features.append(hist_features)
if hog_feat == True:
# Call get_hog_features() with vis=False, feature_vec=True
if hog_channel == 'ALL':
hog_features = []
for channel in range(feature_image.shape[2]):
hog_features.append(get_hog_features(feature_image[:,:,channel],
orient, pix_per_cell, cell_per_block,
vis=False, feature_vec=True))
hog_features = np.ravel(hog_features)
else:
hog_features = get_hog_features(feature_image[:,:,hog_channel], orient,
pix_per_cell, cell_per_block, vis=False, feature_vec=True)
# Append the new feature vector to the features list
file_features.append(hog_features)
features.append(np.concatenate(file_features))
# Return list of feature vectors
return features
def add_heat(heatmap, bbox_list):
# Iterate through list of bboxes
for box in bbox_list:
# Add += 1 for all pixels inside each bbox
# Assuming each "box" takes the form ((x1, y1), (x2, y2))
heatmap[box[0][1]:box[1][1], box[0][0]:box[1][0]] += 1
# Return updated heatmap
return heatmap
def apply_threshold(heatmap, threshold):
# Zero out pixels below the threshold
heatmap[heatmap <= threshold] = 0
# Return thresholded map
return heatmap
def draw_labeled_bboxes(img, labels):
# Iterate through all detected cars
for car_number in range(1, labels[1]+1):
# Find pixels with each car_number label value
nonzero = (labels[0] == car_number).nonzero()
# Identify x and y values of those pixels
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Define a bounding box based on min/max x and y
bbox = ((np.min(nonzerox), np.min(nonzeroy)), (np.max(nonzerox), np.max(nonzeroy)))
# Draw the box on the image
cv2.rectangle(img, bbox[0], bbox[1], (0,0,255), 6)
# Return the image
return img
img_boxes = []
def convert_color(img, conv='RGB2YCrCb'):
if conv == 'RGB2YCrCb':
return cv2.cvtColor(img, cv2.COLOR_RGB2YCrCb)
if conv == 'BGR2YCrCb':
return cv2.cvtColor(img, cv2.COLOR_BGR2YCrCb)
if conv == 'RGB2LUV':
return cv2.cvtColor(img, cv2.COLOR_RGB2LUV)
# Define a single function that can extract features using hog sub-sampling and make predictions
def find_cars(img, ystart, ystop, scale, svc, X_scaler, orient, pix_per_cell, cell_per_block, spatial_size, hist_bins):
draw_img = np.copy(img)
#img = img.astype(np.float32)/255
heat_map = np.zeros_like(img[:,:,0]).astype(np.float)
img_tosearch = img[ystart:ystop,:,:]
ctrans_tosearch = convert_color(img_tosearch, conv='RGB2YCrCb')
if scale != 1:
imshape = ctrans_tosearch.shape
ctrans_tosearch = cv2.resize(ctrans_tosearch, (np.int(imshape[1]/scale), np.int(imshape[0]/scale)))
ch1 = ctrans_tosearch[:,:,0]
ch2 = ctrans_tosearch[:,:,1]
ch3 = ctrans_tosearch[:,:,2]
# Define blocks and steps as above
nxblocks = (ch1.shape[1] // pix_per_cell) - cell_per_block + 1
nyblocks = (ch1.shape[0] // pix_per_cell) - cell_per_block + 1
nfeat_per_block = orient*cell_per_block**2
# 64 was the orginal sampling rate, with 8 cells and 8 pix per cell
window = 64
nblocks_per_window = (window // pix_per_cell) - cell_per_block + 1
cells_per_step = 2 # Instead of overlap, define how many cells to step
nxsteps = (nxblocks - nblocks_per_window) // cells_per_step + 1
nysteps = (nyblocks - nblocks_per_window) // cells_per_step + 1
# Compute individual channel HOG features for the entire image
hog1 = get_hog_features(ch1, orient, pix_per_cell, cell_per_block, feature_vec=False)
hog2 = get_hog_features(ch2, orient, pix_per_cell, cell_per_block, feature_vec=False)
hog3 = get_hog_features(ch3, orient, pix_per_cell, cell_per_block, feature_vec=False)
for xb in range(nxsteps):
for yb in range(nysteps):
ypos = yb*cells_per_step
xpos = xb*cells_per_step
# Extract HOG for this patch
hog_feat1 = hog1[ypos:ypos+nblocks_per_window, xpos:xpos+nblocks_per_window].ravel()
hog_feat2 = hog2[ypos:ypos+nblocks_per_window, xpos:xpos+nblocks_per_window].ravel()
hog_feat3 = hog3[ypos:ypos+nblocks_per_window, xpos:xpos+nblocks_per_window].ravel()
hog_features = np.hstack((hog_feat1, hog_feat2, hog_feat3))
xleft = xpos*pix_per_cell
ytop = ypos*pix_per_cell
# Extract the image patch
subimg = cv2.resize(ctrans_tosearch[ytop:ytop+window, xleft:xleft+window], (64,64))
# Get color features
spatial_features = bin_spatial(subimg, size=spatial_size)
hist_features = color_hist(subimg, nbins=hist_bins)
# Scale features and make a prediction
test_features = X_scaler.transform(np.hstack((spatial_features, hist_features, hog_features)).reshape(1, -1))
#test_features = X_scaler.transform(np.hstack((shape_feat, hist_feat)).reshape(1, -1))
test_prediction = svc.predict(test_features)
if test_prediction == 1:
xbox_left = np.int(xleft*scale)
ytop_draw = np.int(ytop*scale)
win_draw = np.int(window*scale)
cv2.rectangle(draw_img,(xbox_left, ytop_draw+ystart),(xbox_left+win_draw,ytop_draw+win_draw+ystart),(0,0,255),6)
img_boxes.append(((xbox_left, ytop_draw+ystart),(xbox_left+win_draw, ytop_draw+win_draw+ystart)))
heat_map[ytop_draw+ystart:ytop_draw+win_draw+ystart, xbox_left:xbox_left+win_draw] +=1
return draw_img, heat_map
def process_image(img):
# Find final boxes from heatmap using label function
out_img, heatmap = find_cars(img,
ystart=YSTART,
ystop=YSTOP,
scale=SCALE,
svc = SVC,
X_scaler = X_scaler,
orient= ORIENTATION,
pix_per_cell = PIX_PER_CELL,
cell_per_block= CELL_PER_BLOCK,
spatial_size = SPATIAL_SIZE,
hist_bins = HIST_BINS)
labels = label(heatmap)
draw_img = draw_labeled_bboxes(np.copy(img), labels)
return draw_img
if __name__ == "__main__":
vehicles_images = glob.glob('../../../vehicles/vehicles/*/*.png')
non_vehicles_images = glob.glob('../../../non-vehicles/non-vehicles/*/*.png')
cars = load_data(vehicles_images)
non_cars = load_data(non_vehicles_images)
"""Parameters"""
COLOR_SPACE = 'YCrCb' # Can be RGB, HSV, LUV, HLS, YUV, YCrCb
ORIENTATION = 9 # HOG orientations
PIX_PER_CELL = 8 # HOG pixels per cell
CELL_PER_BLOCK = 2 # HOG cells per block
HOG_CHANNEL = "ALL" # Can be 0, 1, 2, or "ALL"
SPATIAL_SIZE = (16, 16) # Spatial binning dimensions
HIST_BINS = 16 # Number of histogram bins
IS_SPATIAL_FEAT = True # Spatial features on or off
IS_HIST_FEAT = True # Histogram features on or off
IS_HOG_FEAT = True # HOG features on or off
t=time.time()
car_features = extract_features(cars,
color_space = COLOR_SPACE,
spatial_size= SPATIAL_SIZE,
hist_bins = HIST_BINS,
orient = ORIENTATION,
pix_per_cell = PIX_PER_CELL,
cell_per_block = CELL_PER_BLOCK,
hog_channel = HOG_CHANNEL,
spatial_feat = IS_SPATIAL_FEAT ,
hist_feat = IS_HIST_FEAT,
hog_feat = IS_HOG_FEAT)
notcar_features = extract_features(non_cars,
color_space = COLOR_SPACE,
spatial_size= SPATIAL_SIZE,
hist_bins = HIST_BINS,
orient = ORIENTATION,
pix_per_cell = PIX_PER_CELL,
cell_per_block = CELL_PER_BLOCK,
hog_channel = HOG_CHANNEL,
spatial_feat = IS_SPATIAL_FEAT ,
hist_feat = IS_HIST_FEAT,
hog_feat = IS_HOG_FEAT)
print(time.time()-t, 'Seconds to compute features...')
X = np.vstack((car_features, notcar_features)).astype(np.float64)
# Fit a per-column scaler
X_scaler = StandardScaler().fit(X)
# Apply the scaler to X
scaled_X = X_scaler.transform(X)
# Define the labels vector
y = np.hstack((np.ones(len(car_features)), np.zeros(len(notcar_features))))
# Split up data into randomized training and test sets
rand_state = np.random.randint(0, 100)
X_train, X_test, y_train, y_test = train_test_split(scaled_X, y, test_size=0.2, random_state=rand_state)
print('Using:',orient,'orientations',pix_per_cell,'pixels per cell and', cell_per_block,'cells per block')
print('Feature vector length:', len(X_train[0]))
# Use a linear SVC
SVC = LinearSVC()
# Check the training time for the SVC
SVC.fit(X_train, y_train)
t2 = time.time()
print(round(t2-t, 2), 'Seconds to train SVC...')
# Check the score of the SVC
print('Test Accuracy of SVC = ', round(SVC.score(X_test, y_test), 4))
clip1 = VideoFileClip('../project_video.mp4')
video_clip = clip1.fl_image(process_image) #NOTE: this function expects color images!!
video_output = '../output_videos/project_video.mp4' |
SPM_MODEL_PATH="/data/10/litong/NICT-MT/all-4-sentencepiece-en_ja-32000.model"
BASE_PATH="/data/temp/litong/fairseq/data-bin"
#CUDA_VISIBLE_DEVICES=${1}
BASE_SAVE_PATH=${1}
RESULT_FILE=${BASE_SAVE_PATH}/results.txt
mkdir -p ${BASE_SAVE_PATH}
for LANG_PAIR in $(echo "en_ja ja_en" | tr " " "\n")
do
echo ${LANG_PAIR} >> ${RESULT_FILE}
echo "------------------------------" >> ${RESULT_FILE}
SAVE_PATH=${BASE_SAVE_PATH}/${LANG_PAIR}
mkdir -p ${SAVE_PATH}
# for MODEL in $(find ${BASE_PATH} -maxdepth 1 | grep "${LANG_PAIR}" | sort -u | grep filtered)
for MODEL in $(find ${BASE_PATH} -maxdepth 1 | grep "${LANG_PAIR}" | sort -u)
do
MODEL_NAME=$(echo ${MODEL} | rev | cut -d"/" -f 1 | rev)
echo ${MODEL_NAME} >> ${RESULT_FILE}
SCORE=""
for SPLIT in $(echo "valid test" | tr " " "\n")
do
TARGET=${SAVE_PATH}/${MODEL_NAME}_${SPLIT}
REFERENCE=${TARGET}.ref
fairseq-generate ${MODEL} --path ${MODEL}/../../${MODEL_NAME}/checkpoint_best.pt --beam 6 \
--user-dir context_nmt --batch-size 500 --gen-subset ${SPLIT} | tee /tmp/gen.out
grep ^H /tmp/gen.out | cut -f3- > /tmp/gen.out.sys
grep ^T /tmp/gen.out | cut -f2- > /tmp/gen.out.ref
spm_decode --model=${SPM_MODEL_PATH} --input_format=piece < /tmp/gen.out.sys > /tmp/gen.out.sys.retok
spm_decode --model=${SPM_MODEL_PATH} --input_format=piece < /tmp/gen.out.ref > /tmp/gen.out.ref.retok
if [ "$LANG_PAIR" == "en_ja" ]; then
mecab -O wakati < /tmp/gen.out.sys.retok > ${TARGET}
mecab -O wakati < /tmp/gen.out.ref.retok > ${REFERENCE}
# jumanpp --segment -o ${TARGET} /tmp/gen.out.sys.retok
# jumanpp --segment -o ${REFERENCE} /tmp/gen.out.ref.retok
else
cp /tmp/gen.out.sys.retok ${TARGET}
cp /tmp/gen.out.ref.retok ${REFERENCE}
fi
SCORE="${SCORE}@${SPLIT}: $(sacrebleu -b -w 2 ${REFERENCE} < ${TARGET})"
done
echo ${SCORE} | tr "@" "\t" >> ${RESULT_FILE}
done
echo >> ${RESULT_FILE}
done
|
#!/bin/bash
rm -rf bin
mkdir bin
gcc -w src/icd/* src/lib/* -o bin/icd -std=c99 -I inc/ -lpthread -lnfnetlink -lnetfilter_queue -lm -Wfatal-errors
gcc -w src/isd/* src/lib/* -o bin/isd -std=c99 -I inc/ -lpthread -lnfnetlink -lnetfilter_queue -Wfatal-errors
gcc -w src/ird/* src/lib/* -o bin/ird -std=c99 -I inc/ -lpthread -lnfnetlink -lnetfilter_queue -Wfatal-errors
gcc -w src/imd/* src/lib/* -o bin/imd -std=c99 -I inc/ -lpthread -lnfnetlink -lnetfilter_queue -Wfatal-errors
mkdir bin/tools
gcc -w tools/iprptest.c src/lib/* -o bin/tools/iprptest -std=c99 -I inc/ -lnfnetlink -lnetfilter_queue -Wfatal-errors
gcc -w tools/iprptracert.c src/lib/* -o bin/tools/iprptracert -std=c99 -I inc/ -lnfnetlink -lnetfilter_queue -Wfatal-errors
gcc -w tools/iprpping.c src/lib/* -o bin/tools/iprpping -std=c99 -I inc/ -lnfnetlink -lnetfilter_queue -Wfatal-errors
gcc -w tools/iprprecvstats.c src/lib/* -o bin/tools/iprprecvstats -std=c99 -I inc/ -lnfnetlink -lnetfilter_queue -Wfatal-errors
gcc -w tools/iprpsendstats.c src/lib/* -o bin/tools/iprpsendstats -std=c99 -I inc/ -lnfnetlink -lnetfilter_queue -Wfatal-errors |
#!/bin/bash
set -e
cd $(dirname ${0})
PAWPAW_ROOT="${PWD}"
JACK2_VERSION=${JACK2_VERSION:=git}
JACK_ROUTER_VERSION=${JACK_ROUTER_VERSION:=6c2e532bb05d2ba59ef210bef2fe270d588c2fdf}
QJACKCTL_VERSION=${QJACKCTL_VERSION:=0.9.5}
# ---------------------------------------------------------------------------------------------------------------------
target="${1}"
if [ -z "${target}" ]; then
echo "usage: ${0} <target> [package-build?]"
exit 1
fi
# TODO check that bootstrap-jack.sh has been run
# ---------------------------------------------------------------------------------------------------------------------
source setup/check_target.sh
source setup/env.sh
source setup/functions.sh
source setup/versions.sh
# ---------------------------------------------------------------------------------------------------------------------
jack2_repo="https://github.com/jackaudio/jack2.git"
jack2_prefix="${PAWPAW_PREFIX}-jack2"
if [ "${MACOS}" -eq 1 ]; then
jack2_extra_prefix="/usr/local"
fi
# ---------------------------------------------------------------------------------------------------------------------
if [ ! -e jack2 ]; then
ln -s "${PAWPAW_BUILDDIR}/jack2-${JACK2_VERSION}" jack2
fi
# ---------------------------------------------------------------------------------------------------------------------
if [ "${WIN32}" -eq 1 ]; then
# setup innosetup
dlfile="${PAWPAW_DOWNLOADDIR}/innosetup-6.0.5.exe"
innodir="${PAWPAW_BUILDDIR}/innosetup-6.0.5"
iscc="${innodir}/drive_c/InnoSetup/ISCC.exe"
# download it
if [ ! -f "${dlfile}" ]; then
# FIXME proper dl version
curl -L https://jrsoftware.org/download.php/is.exe?site=2 -o "${dlfile}"
fi
# initialize wine
if [ ! -d "${innodir}"/drive_c ]; then
env WINEPREFIX="${innodir}" wineboot -u
fi
# install innosetup in custom wineprefix
if [ ! -f "${innodir}"/drive_c/InnoSetup/ISCC.exe ]; then
env WINEPREFIX="${innodir}" wine "${dlfile}" /allusers /dir=C:\\InnoSetup /nocancel /norestart /verysilent
fi
# copy jackrouter binaries
mkdir -p "${jack2_prefix}/jack-router/win32"
mkdir -p "${jack2_prefix}/jack-router/win64"
copy_file jack-router "${JACK_ROUTER_VERSION}" "README-win" "${jack2_prefix}/jack-router/README.txt"
copy_file jack-router "${JACK_ROUTER_VERSION}" "binaries/win32/JackRouter.dll" "${jack2_prefix}/jack-router/win32/JackRouter.dll"
copy_file jack-router "${JACK_ROUTER_VERSION}" "binaries/win32/JackRouter.ini" "${jack2_prefix}/jack-router/win32/JackRouter.ini"
if [ "${WIN64}" -eq 1 ]; then
copy_file jack-router "${JACK_ROUTER_VERSION}" "binaries/win64/JackRouter.dll" "${jack2_prefix}/jack-router/win64/JackRouter.dll"
copy_file jack-router "${JACK_ROUTER_VERSION}" "binaries/win64/JackRouter.ini" "${jack2_prefix}/jack-router/win64/JackRouter.ini"
fi
# finally create the installer file
pushd "${PAWPAW_BUILDDIR}/jack2-${JACK2_VERSION}/windows/inno"
echo "#define VERSION \"${JACK2_VERSION}\"" > "version.iss"
ln -sf "${PAWPAW_PREFIX}/bin/Qt5"{Core,Gui,Network,Widgets,Xml}".dll" .
ln -sf "${PAWPAW_PREFIX}/lib/qt5/plugins/platforms/qwindows.dll" .
ln -sf "${jack2_prefix}" "${PAWPAW_TARGET}"
env WINEPREFIX="${innodir}" wine "${iscc}" "${PAWPAW_TARGET}.iss"
popd
# and move installer file where CI expects it to be
mv "${PAWPAW_BUILDDIR}/jack2-${JACK2_VERSION}/windows/inno/"*.exe .
elif [ "${MACOS}" -eq 1 ]; then
for f in $(ls "${jack2_prefix}${jack2_extra_prefix}/bin"/* \
"${jack2_prefix}${jack2_extra_prefix}/lib"/*.dylib \
"${jack2_prefix}${jack2_extra_prefix}/lib/jack"/*); do
patch_osx_binary_libs "${f}"
done
jack2_lastversion=$(cat jack2/wscript | awk 'sub("VERSION=","")' | tr -d "'")
./jack2/macosx/generate-pkg.sh "${jack2_prefix}${jack2_extra_prefix}/"
qjackctl_app="${PAWPAW_PREFIX}/bin/QjackCtl.app"
qjackctl_dir="${qjackctl_app}/Contents/MacOS"
patch_osx_qtapp qjackctl "${QJACKCTL_VERSION}" "${qjackctl_app}"
patch_osx_binary_libs "${qjackctl_dir}/QjackCtl"
rm -rf jack2/macosx/QjackCtl.app
cp -rv "${qjackctl_app}" jack2/macosx/QjackCtl.app
if [ "${MACOS_UNIVERSAL}" -eq 1 ]; then
variant="universal"
else
variant="intel"
fi
rm -f jack2-macOS-${variant}-${JACK2_VERSION}.tar.gz
tar czf jack2-macOS-${variant}-${JACK2_VERSION}.tar.gz -C jack2/macosx jack2-osx-${jack2_lastversion}.pkg QjackCtl.app
fi
# ---------------------------------------------------------------------------------------------------------------------
|
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import textwrap
from random import randint
exercise = 'src.longest_string'
function = 'longest'
@points('5.longest_string')
class LongestStringTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
cls.module = load_module(exercise, 'en')
def test_0_main_program_ok(self):
ok, line = check_source(self.module)
message = """The code for testing the functions should be placed inside
if __name__ == "__main__":
block. The following row should be moved:
"""
self.assertTrue(ok, message+line)
def test_1_function_exists(self):
try:
from src.longest_string import longest
except:
self.assertTrue(False, 'Your code should contain function named as longest(strings: list)' )
try:
from src.longest_string import longest
longest(["ab","a"])
except:
self.assertTrue(False, 'Make sure, that function can be called as follows\nlongest(["ab","a"])' )
def test_2_type_of_return_value(self):
longest = load(exercise, function, 'en')
val = longest(["ab","a"])
self.assertTrue(type(val) == str, f'Function {function} does not return value of string type when calling longest(["ab","a"])')
def test_3_lists(self):
test_cases = ("first second third", "ab abcd abc acbdefg a abcd aa", "orange apple milkshake banana pear", "sheila sells seashells on the seashore")
for tc in test_cases:
test_case = tc.split()
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
reload_module(self.module)
output_alussa = get_stdout()
longest = load(exercise, function, 'en')
correct = max(test_case, key=lambda x : len(x))
try:
test_result = longest(test_case)
except:
self.assertTrue(False, f"Make sure, that the function works when the list is\n{test_case}")
self.assertEqual(correct, test_result, f"The result '{test_result}' does not match with the model solution '{correct}' when the list is {test_case}")
if __name__ == '__main__':
unittest.main() |
<filename>src/apps/opc/src/com/sun/j2ee/blueprints/opc/transitions/QueueHelper.java
/*
* Copyright 2002 Sun Microsystems, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any
* kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND
* WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY
* EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR
* DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN
* OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR
* FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR
* PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF
* LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that Software is not designed, licensed or intended
* for use in the design, construction, operation or maintenance of
* any nuclear facility.
*/
package com.sun.j2ee.blueprints.opc.transitions;
import javax.jms.*;
/**
* A helper class which takes care of sending a JMS message to a queue
*/
public class QueueHelper implements java.io.Serializable {
private Queue q;
private QueueConnectionFactory qFactory;
/**
*
* @param qFactory is the connection factory used to get a connection
* @param q is the Queue to send the message to
*/
public QueueHelper(QueueConnectionFactory qFactory, Queue q) {
this.qFactory = qFactory;
this.q = q;
}
/**
* Sends a JMS message to a queue
* @param xmlMessage is the xml message to put inside the JMS text message
*/
public void sendMessage(String xmlMessage) throws JMSException {
QueueConnection qConnect = null;
QueueSession session = null;
QueueSender qSender = null;
try {
qConnect = qFactory.createQueueConnection();
session = qConnect.createQueueSession(true, 0);
qSender = session.createSender(q);
TextMessage jmsMsg = session.createTextMessage();
jmsMsg.setText(xmlMessage);
qSender.send(jmsMsg);
} finally {
try {
if(qConnect != null) {
qConnect.close();
}
} catch(Exception e) {
System.err.println("OPC.QueueHelper GOT EXCEPTION closing connection" + e);
}
}
}
}
|
#!/bin/bash
source "$TEST_SRCDIR/envoy/test/integration/test_utility.sh"
# TODO(htuch): In this test script, we are duplicating work done in test_environment.cc via sed.
# Instead, we can add a simple C++ binary that links against test_environment.cc and uses the
# substitution methods provided there.
JSON_TEST_ARRAY=()
# Ensure that the runtime watch root exist.
mkdir -p "${TEST_TMPDIR}"/test/common/runtime/test_data/current/envoy
mkdir -p "${TEST_TMPDIR}"/test/common/runtime/test_data/current/envoy_override
# Parameterize IPv4 and IPv6 testing.
if [[ -z "${ENVOY_IP_TEST_VERSIONS}" ]] || [[ "${ENVOY_IP_TEST_VERSIONS}" == "all" ]] \
|| [[ "${ENVOY_IP_TEST_VERSIONS}" == "v4only" ]]; then
HOT_RESTART_JSON_V4="${TEST_TMPDIR}"/hot_restart_v4.yaml
echo building ${HOT_RESTART_JSON_V4} ...
cat "${TEST_SRCDIR}/envoy"/test/config/integration/server.yaml |
sed -e "s#{{ upstream_. }}#0#g" | \
sed -e "s#{{ test_rundir }}#$TEST_SRCDIR/envoy#" | \
sed -e "s#{{ test_tmpdir }}#$TEST_TMPDIR#" | \
sed -e "s#{{ ip_loopback_address }}#127.0.0.1#" | \
sed -e "s#{{ dns_lookup_family }}#V4_ONLY#" | \
cat > "${HOT_RESTART_JSON_V4}"
JSON_TEST_ARRAY+=("${HOT_RESTART_JSON_V4}")
fi
if [[ -z "${ENVOY_IP_TEST_VERSIONS}" ]] || [[ "${ENVOY_IP_TEST_VERSIONS}" == "all" ]] \
|| [[ "${ENVOY_IP_TEST_VERSIONS}" == "v6only" ]]; then
HOT_RESTART_JSON_V6="${TEST_TMPDIR}"/hot_restart_v6.yaml
cat "${TEST_SRCDIR}/envoy"/test/config/integration/server.yaml |
sed -e "s#{{ upstream_. }}#0#g" | \
sed -e "s#{{ test_rundir }}#$TEST_SRCDIR/envoy#" | \
sed -e "s#{{ test_tmpdir }}#$TEST_TMPDIR#" | \
sed -e "s#{{ ip_loopback_address }}#::1#" | \
sed -e "s#{{ dns_lookup_family }}#v6_only#" | \
cat > "${HOT_RESTART_JSON_V6}"
JSON_TEST_ARRAY+=("${HOT_RESTART_JSON_V6}")
fi
# Also test for listening on UNIX domain sockets. We use IPv4 for the
# upstreams to avoid too much wild sedding.
HOT_RESTART_JSON_UDS="${TEST_TMPDIR}"/hot_restart_uds.yaml
SOCKET_DIR="$(mktemp -d /tmp/envoy_test_hotrestart.XXXXXX)"
cat "${TEST_SRCDIR}/envoy"/test/config/integration/server_unix_listener.yaml |
sed -e "s#{{ socket_dir }}#${SOCKET_DIR}#" | \
sed -e "s#{{ ip_loopback_address }}#127.0.0.1#" | \
cat > "${HOT_RESTART_JSON_UDS}"
JSON_TEST_ARRAY+=("${HOT_RESTART_JSON_UDS}")
# Enable this test to work with --runs_per_test
if [[ -z "${TEST_RANDOM_SEED}" ]]; then
BASE_ID=1
else
BASE_ID="${TEST_RANDOM_SEED}"
fi
echo "Hot restart test using --base-id ${BASE_ID}"
TEST_INDEX=0
for HOT_RESTART_JSON in "${JSON_TEST_ARRAY[@]}"
do
# TODO(jun03): instead of setting the base-id, the validate server should use the nop hot restart
start_test validation
check "${ENVOY_BIN}" -c "${HOT_RESTART_JSON}" --mode validate --service-cluster cluster \
--max-obj-name-len 500 --service-node node --base-id "${BASE_ID}"
# Now start the real server, hot restart it twice, and shut it all down as a basic hot restart
# sanity test.
start_test Starting epoch 0
ADMIN_ADDRESS_PATH_0="${TEST_TMPDIR}"/admin.0."${TEST_INDEX}".address
run_in_background_saving_pid "${ENVOY_BIN}" -c "${HOT_RESTART_JSON}" \
--restart-epoch 0 --base-id "${BASE_ID}" --service-cluster cluster --service-node node \
--max-obj-name-len 500 --admin-address-path "${ADMIN_ADDRESS_PATH_0}"
FIRST_SERVER_PID=$BACKGROUND_PID
start_test Updating original config listener addresses
sleep 3
UPDATED_HOT_RESTART_JSON="${TEST_TMPDIR}"/hot_restart_updated."${TEST_INDEX}".yaml
"${TEST_SRCDIR}/envoy"/tools/socket_passing "-o" "${HOT_RESTART_JSON}" "-a" "${ADMIN_ADDRESS_PATH_0}" \
"-u" "${UPDATED_HOT_RESTART_JSON}"
# Send SIGUSR1 signal to the first server, this should not kill it. Also send SIGHUP which should
# get eaten.
echo "Sending SIGUSR1/SIGHUP to first server"
kill -SIGUSR1 ${FIRST_SERVER_PID}
kill -SIGHUP ${FIRST_SERVER_PID}
sleep 3
disableHeapCheck
# To ensure that we don't accidentally change the /hot_restart_version
# string, compare it against a hard-coded string.
start_test Checking for consistency of /hot_restart_version
CLI_HOT_RESTART_VERSION=$("${ENVOY_BIN}" --hot-restart-version --base-id "${BASE_ID}" 2>&1)
EXPECTED_CLI_HOT_RESTART_VERSION="11.104"
echo "The Envoy's hot restart version is ${CLI_HOT_RESTART_VERSION}"
echo "Now checking that the above version is what we expected."
check [ "${CLI_HOT_RESTART_VERSION}" = "${EXPECTED_CLI_HOT_RESTART_VERSION}" ]
# TODO(fredlas) max-obj-name-len is a deprecated no-op; can probably remove this test soon.
start_test Checking for consistency of /hot_restart_version with --max-obj-name-len 500
CLI_HOT_RESTART_VERSION=$("${ENVOY_BIN}" --hot-restart-version --base-id "${BASE_ID}" \
--max-obj-name-len 500 2>&1)
EXPECTED_CLI_HOT_RESTART_VERSION="11.104"
check [ "${CLI_HOT_RESTART_VERSION}" = "${EXPECTED_CLI_HOT_RESTART_VERSION}" ]
start_test Checking for match of --hot-restart-version and admin /hot_restart_version
ADMIN_ADDRESS_0=$(cat "${ADMIN_ADDRESS_PATH_0}")
echo fetching hot restart version from http://${ADMIN_ADDRESS_0}/hot_restart_version ...
ADMIN_HOT_RESTART_VERSION=$(curl -sg http://${ADMIN_ADDRESS_0}/hot_restart_version)
echo "Fetched ADMIN_HOT_RESTART_VERSION is ${ADMIN_HOT_RESTART_VERSION}"
CLI_HOT_RESTART_VERSION=$("${ENVOY_BIN}" --hot-restart-version --base-id "${BASE_ID}" \
--max-obj-name-len 500 2>&1)
check [ "${ADMIN_HOT_RESTART_VERSION}" = "${CLI_HOT_RESTART_VERSION}" ]
# Verify we can see server.live in the admin port.
SERVER_LIVE_0=$(curl -sg http://${ADMIN_ADDRESS_0}/stats | grep server.live)
check [ "$SERVER_LIVE_0" = "server.live: 1" ];
enableHeapCheck
start_test Starting epoch 1
ADMIN_ADDRESS_PATH_1="${TEST_TMPDIR}"/admin.1."${TEST_INDEX}".address
run_in_background_saving_pid "${ENVOY_BIN}" -c "${UPDATED_HOT_RESTART_JSON}" \
--restart-epoch 1 --base-id "${BASE_ID}" --service-cluster cluster --service-node node \
--max-obj-name-len 500 --admin-address-path "${ADMIN_ADDRESS_PATH_1}"
SECOND_SERVER_PID=$BACKGROUND_PID
# Wait for stat flushing
sleep 7
ADMIN_ADDRESS_1=$(cat "${ADMIN_ADDRESS_PATH_1}")
SERVER_LIVE_1=$(curl -sg http://${ADMIN_ADDRESS_1}/stats | grep server.live)
check [ "$SERVER_LIVE_1" = "server.live: 1" ];
start_test Checking that listener addresses have not changed
HOT_RESTART_JSON_1="${TEST_TMPDIR}"/hot_restart.1."${TEST_INDEX}".yaml
"${TEST_SRCDIR}/envoy"/tools/socket_passing "-o" "${UPDATED_HOT_RESTART_JSON}" "-a" "${ADMIN_ADDRESS_PATH_1}" \
"-u" "${HOT_RESTART_JSON_1}"
CONFIG_DIFF=$(diff "${UPDATED_HOT_RESTART_JSON}" "${HOT_RESTART_JSON_1}")
[[ -z "${CONFIG_DIFF}" ]]
ADMIN_ADDRESS_PATH_2="${TEST_TMPDIR}"/admin.2."${TEST_INDEX}".address
start_test Starting epoch 2
run_in_background_saving_pid "${ENVOY_BIN}" -c "${UPDATED_HOT_RESTART_JSON}" \
--restart-epoch 2 --base-id "${BASE_ID}" --service-cluster cluster --service-node node \
--max-obj-name-len 500 --admin-address-path "${ADMIN_ADDRESS_PATH_2}"
THIRD_SERVER_PID=$BACKGROUND_PID
sleep 3
start_test Checking that listener addresses have not changed
HOT_RESTART_JSON_2="${TEST_TMPDIR}"/hot_restart.2."${TEST_INDEX}".yaml
"${TEST_SRCDIR}/envoy"/tools/socket_passing "-o" "${UPDATED_HOT_RESTART_JSON}" "-a" "${ADMIN_ADDRESS_PATH_2}" \
"-u" "${HOT_RESTART_JSON_2}"
CONFIG_DIFF=$(diff "${UPDATED_HOT_RESTART_JSON}" "${HOT_RESTART_JSON_2}")
[[ -z "${CONFIG_DIFF}" ]]
# First server should already be gone.
start_test Waiting for epoch 0
wait ${FIRST_SERVER_PID}
[[ $? == 0 ]]
#Send SIGUSR1 signal to the second server, this should not kill it
start_test Sending SIGUSR1 to the second server
kill -SIGUSR1 ${SECOND_SERVER_PID}
sleep 3
# Now term the last server, and the other one should exit also.
start_test Killing and waiting for epoch 2
kill ${THIRD_SERVER_PID}
wait ${THIRD_SERVER_PID}
[[ $? == 0 ]]
start_test Waiting for epoch 1
wait ${SECOND_SERVER_PID}
[[ $? == 0 ]]
TEST_INDEX=$((TEST_INDEX+1))
done
start_test disabling hot_restart by command line.
CLI_HOT_RESTART_VERSION=$("${ENVOY_BIN}" --hot-restart-version --disable-hot-restart 2>&1)
check [ "disabled" = "${CLI_HOT_RESTART_VERSION}" ]
echo "PASS"
|
import Foundation
func bubbleSort(_ numbers: [Int]) -> [Int] {
var numbers = numbers
for i in 0..<numbers.count {
for j in 1..<numbers.count - i {
if numbers[j] < numbers[j-1] {
let temp = numbers[j]
numbers[j] = numbers[j-1]
numbers[j-1] = temp
}
}
}
return numbers
}
let array = [4, 1, 7, 5, 2, 6, 3]
let sortedArray = bubbleSort(array)
print(sortedArray) // Prints [1, 2, 3, 4, 5, 6, 7] |
import test from "ava";
import m from ".";
test("format brackets", t => {
t.is(m("(x)"), String.raw`\left(x\right)`);
t.is(m("[x]"), String.raw`\left[x\right]`);
t.is(m("(x]"), String.raw`\left(x\right]`);
t.is(m("[x)"), String.raw`\left[x\right)`);
});
test("format absolute value", t => {
t.is(m("|x|"), String.raw`\left|x\right|`);
});
test("format floor and ceil", t => {
t.is(m(String.raw`\lfloor x\rfloor`), String.raw`\left\lfloor x\right\rfloor `);
t.is(m(String.raw`\lceil x\rceil`), String.raw`\left\lceil x\right\rceil `);
});
test("format set builder notation", t => {
t.is(m(String.raw`\{x|x\in\mathbb{N}\}`), String.raw`\left\{x|x\in\mathbb{N}\right\}`);
});
|
<filename>src/main/java/com/aveng/wapp/service/StringDiffer.java
package com.aveng.wapp.service;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import org.springframework.lang.NonNull;
import org.springframework.stereotype.Service;
import com.aveng.wapp.service.dto.StringDiff;
import com.aveng.wapp.service.dto.StringDiffResult;
/**
* A Service for comparing two strings
*
* @author apaydin
*/
@Service
public class StringDiffer {
/**
* Compares two strings and if they are matching in length, finds their diffs
*
* @param leftText left input
* @param rightText right input
* @return Message indicating the diff result and if present, a list of diffs
*/
public StringDiffResult compare(@NonNull String leftText, @NonNull String rightText) {
if (Objects.equals(leftText, rightText)) {
return StringDiffResult.builder().message("Provided strings are equal.").build();
}
if (leftText.length() != rightText.length()) {
return StringDiffResult.builder().message("Provided strings are not equal in length.").build();
}
List<StringDiff> stringDiffs = findDiffs(leftText, rightText);
return StringDiffResult.builder().message("Provided strings have diffs.").stringDiffs(stringDiffs).build();
}
private List<StringDiff> findDiffs(@NonNull String leftText, @NonNull String rightText) {
List<StringDiff> stringDiffs = new ArrayList<>();
boolean hasStartedADiff = false;
StringDiff currentDiff = null;
char left;
char right;
/*
* Walk the strings one char at a time. If the current chars are not equal,
* start a diff and continue. Finish the current diff at the first encountered equal char.
*/
for (int i = 0; i < leftText.length(); i++) {
left = leftText.charAt(i);
right = rightText.charAt(i);
//create a new diff point
if (!hasStartedADiff && left != right) {
currentDiff = new StringDiff();
currentDiff.setOffset(i);
hasStartedADiff = true;
//last char diff must be added! (edge case)
if (i == leftText.length() - 1) {
currentDiff.setLength(1);
stringDiffs.add(currentDiff);
}
} else if (hasStartedADiff && left == right) {
//finish the current diff
currentDiff.setLength(i - currentDiff.getOffset());
stringDiffs.add(currentDiff);
hasStartedADiff = false;
}
// no need to do anything, keep going
}
return stringDiffs;
}
}
|
#!/usr/bin/env bash
DIR=$(dirname "${BASH_SOURCE[0]}")
MACHINE_TYPE=n1-standard-4
GCP_PROJECT=$(gcloud config list --format='value(core.project)')
NETWORK="projects/${GCP_PROJECT}/global/networks/default"
NODE_NUMBER=1
USER=0track@gmail.com
create_cluster() {
if [ $# -ne 3 ]; then
echo "usage: create_cluster cluster_name cluster_zone sleep_time"
fi
local name=$1
local zone=$2
local seconds=$3
sleep ${seconds}
gcloud container clusters create ${name} \
--disk-size 15\
--zone ${zone} \
--num-nodes ${NODE_NUMBER} --enable-autoscaling --min-nodes 0 --max-nodes $((2*NODE_NUMBER)) \
--machine-type ${MACHINE_TYPE} \
--network ${NETWORK} \
--preemptible \
--scopes "cloud-platform,service-control,service-management,https://www.googleapis.com/auth/ndev.clouddns.readwrite"
}
fetch_credentials() {
if [ $# -ne 2 ]; then
echo "usage: fetch_credentials name zone"
fi
local name=$1
local zone=$2
local count=0
local seconds=0
while [ ${count} != 1 ]; do
# get credential
gcloud container clusters get-credentials ${name} \
--zone ${zone}
# create alias
kubectl config set-context ${name} \
--cluster=gke_${GCP_PROJECT}_${zone}_${name} \
--user=gke_${GCP_PROJECT}_${zone}_${name}
# check connection now
count=$(kubectl --context=${name} get pods 2>&1 |
grep "No resources found." |
wc -l |
xargs
)
# sleep backoff mechanism
sleep ${seconds}
seconds=$((seconds + 1))
done
}
name="dso"
zone="us-east4"
sleep_time=$((i * 2)) # avoid "gcp db locked" error
create_cluster ${name} ${zone} ${sleep_time}
fetch_credentials ${name} ${zone}
# RBAC
kubectl create clusterrolebinding cluster-admin-binding --clusterrole cluster-admin --user ${USER}
kubectl apply -f ${DIR}/../templates/role.yaml
kubectl apply -f ${DIR}/../templates/service.yaml
|
#!/bin/bash
set -e
ADMIN_EMAIL="info@matteomattei.com"
DOMAIN=${1}
FTP_PASSWORD="$(apg -n 1 -m 8 -d)"
if [ $# -lt 1 ]; then
echo "USAGE: ${0} DOMAIN.TLD"
exit 1
fi
if [ -d /var/www/vhosts/${DOMAIN} ]
then
echo "DOMAIN ALREADY PRESENT!"
exit 1
fi
DOMAINUSER="${DOMAIN}"
if echo "${DOMAIN}" | egrep -q "^[0-9]+"
then
DOMAINUSER="a${DOMAIN}"
fi
DOMAINUSER=$(echo ${DOMAINUSER} | sed "{s/[.-]//g}")
mkdir -p /var/www/vhosts/${DOMAIN}/{httpdocs,logs}
useradd --home-dir=/var/www/vhosts/${DOMAIN} --gid=www-data --no-create-home --no-user-group --shell=/bin/false ${DOMAINUSER}
echo ${DOMAINUSER}:"${FTP_PASSWORD}" | chpasswd
chown -R ${DOMAINUSER}.www-data -R /var/www/vhosts/${DOMAIN}/httpdocs
chmod 750 /var/www/vhosts/${DOMAIN}/httpdocs
echo ${DOMAINUSER} >> /etc/vsftpd.user_list
echo "guest_username=${DOMAINUSER}" > /etc/vsftpd/users/${DOMAINUSER}
echo "local_root=/var/www/vhosts/${DOMAIN}" >> /etc/vsftpd/users/${DOMAINUSER}
CONF_FILE="/etc/apache2/sites-available/${DOMAIN}.conf"
echo "<VirtualHost *:8080>" > ${CONF_FILE}
echo " ServerAdmin ${ADMIN_EMAIL}" >> ${CONF_FILE}
echo " ServerName ${DOMAIN}" >> ${CONF_FILE}
echo " #ALIAS DO-NOT-REMOVE-OR-ALTER-THIS-LINE" >> ${CONF_FILE}
echo " ServerAlias www.${DOMAIN}" >> ${CONF_FILE}
echo " DocumentRoot /var/www/vhosts/${DOMAIN}/httpdocs/" >> ${CONF_FILE}
echo " <Directory /var/www/vhosts/${DOMAIN}/httpdocs>" >> ${CONF_FILE}
echo " Options FollowSymLinks" >> ${CONF_FILE}
echo " AllowOverride All" >> ${CONF_FILE}
echo " Allow from All" >> ${CONF_FILE}
echo " </Directory>" >> ${CONF_FILE}
echo " ErrorLog /var/www/vhosts/${DOMAIN}/logs/error.log" >> ${CONF_FILE}
echo " CustomLog /var/www/vhosts/${DOMAIN}/logs/access.log combined" >> ${CONF_FILE}
echo "</VirtualHost>" >> ${CONF_FILE}
a2ensite ${DOMAIN} > /dev/null
/etc/init.d/apache2 reload > /dev/null
/etc/init.d/vsftpd reload > /dev/null
echo ""
echo "**********************************"
echo "FTP DATA"
echo "**********************************"
echo "DOMAIN: ${DOMAIN}"
echo "USER: ${DOMAINUSER}"
echo "PASS: ${FTP_PASSWORD}"
echo "PORT: 21"
echo "PHPMYADMIN: http://${DOMAIN}/phpmyadmin"
echo ""
|
#ifndef INCLUDED_CORE_AUDIBLE_EVENT_H
#define INCLUDED_CORE_AUDIBLE_EVENT_H
#include "platform/event.h"
namespace core {
struct AudibleEvent : public Event
{
int32_t const mActorGUID;
int32_t const mId;
bool const mIsOneShot;
bool const mIsAddition;
AudibleEvent( int32_t ActorId, int32_t Id, bool IsOneShot, bool IsAddition )
: mActorGUID( ActorId )
, mId( Id )
, mIsOneShot( IsOneShot )
, mIsAddition( IsAddition ) {}
};
}
#endif//INCLUDED_CORE_AUDIBLE_EVENT_H
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
LIST_NEWEST_VERSION_OUTPUT=$1 # Package|Installed|Candidate
FLAVOR=$2
PACKAGE=$(echo $LIST_NEWEST_VERSION_OUTPUT | cut -f 1 -d "|")
grep -E -R "^$PACKAGE\|" "$FLAVOR"
|
#!/bin/bash
#source /broad/software/scripts/useuse
#reuse Perl-5.8
#reuse .samtools-0.1.19
#reuse GCC-4.9
CMD="`dirname $0`/run_Trinity_eval.pl $*"
eval $CMD
exit $?
|
<reponame>ContentPI/ui-k
import React from 'react'
import Radio from './index'
const stories = {
component: 'Radio',
props: [
{
name: 'children',
type: 'Node',
default: '',
description: 'The content of the component',
},
{
name: 'color',
type: 'Color',
default: 'primary',
description: 'The color of the badge',
},
],
stories: [
{
name: 'Radio',
description: 'Simple Radio',
render: (
<>
<Radio name="radio" label="My Radio Option 1" />
<Radio name="radio" label="My Radio Option 2" />
</>
),
prop: false,
code: `
<Radio name="radio" label="My Radio Option 1" />
<Radio name="radio" label="My Radio Option 2" />
`,
},
],
}
export default stories
|
python -m torch.distributed.launch --nproc_per_node=4 train0.py > train0.txt
python valid0.py > valid0.txt
python save_valid_features0.py > save_valid_features0.txt
python save_train_features0.py > save_train_features0.txt
|
import { Component } from '@angular/core';
@Component({
selector:'login-component',
template:`
<h1>ogin</h1>
`
})
export class loginComponent { } |
import axios from "axios";
import { API_URL, TOKEN_TTL } from "../constants";
import { storageSet, storageGet } from "../store";
import { getTokenDuration } from "./utils";
export const validateToken = token => {
const duration = getTokenDuration(token.created_at, token.expiry);
let isValid = duration <= TOKEN_TTL;
return isValid;
};
export const getToken = async () => {
let token = await storageGet("token");
if (!token || !token.value) {
token = await fetchApiToken();
}
const isValid = validateToken(token);
return isValid ? token : await fetchApiToken();
};
export const fetchApiToken = async () => {
try {
const params = {
baseURL: API_URL
};
const response = await axios.get("/token", params);
if (response.status !== 200) {
throw new Error("Unable to fetch api token");
}
const tokenResponse = response.data;
const { token, expiry_time } = tokenResponse;
const storedToken = {
value: token,
expiry: expiry_time,
created_at: Date.now()
};
await storageSet("token", storedToken);
return storedToken;
} catch (error) {
throw error;
}
};
export const fetchAndCacheScores = async domain => {
try {
const token = await getToken();
const headers = {
"Content-Type": "application/json",
"X-Auth-Token": token.value
};
const response = await axios.post(`${API_URL}/scores`, { url: domain }, { headers: headers });
if (response.status !== 200) {
throw new Error("Unable to fetch scores");
} else {
const scores = await response.data;
await storageSet(domain, scores);
return scores;
}
} catch (error) {
throw error;
}
};
|
#!/bin/bash
#----------------------------------------------------
# Sample SLURM job script
# for TACC Stampede2 KNL nodes
#
# *** Serial Job on Normal Queue ***
#
# Last revised: 27 Jun 2017
#
# Notes:
#
# -- Copy/edit this script as desired. Launch by executing
# "sbatch knl.serial.slurm" on a Stampede2 login node.
#
# -- Serial codes run on a single node (upper case N = 1).
# A serial code ignores the value of lower case n,
# but slurm needs a plausible value to schedule the job.
#
# -- For a good way to run multiple serial executables at the
# same time, execute "module load launcher" followed
# by "module help launcher".
#----------------------------------------------------
#SBATCH -J mash-n2v-200 # Job name
#SBATCH -o mash-n2v-200.o%j # Name of stdout output file
#SBATCH -e mash-n2v-200.e%j # Name of stderr error file
#SBATCH -p normal # Queue (partition) name
#SBATCH -N 1 # Total # of nodes (must be 1 for serial)
#SBATCH -n 1 # Total # of mpi tasks (should be 1 for serial)
#SBATCH -t 00:10:00 # Run time (hh:mm:ss)
#SBATCH --mail-user=jklynch@email.arizona.edu
#SBATCH --mail-type=all # Send email at begin and end of job
#SBATCH -A iPlant-Collabs # Allocation name (req'd if you have more than 1)
module list
pwd
date
module load python3
source $WORK/venv/n2v/bin/activate
node2vec \
--input ../all-imicrobe-dist_similarity_limit_200.edgelist \
--output all-imicrobe-dist_similarity_limit_200.emb \
--weighted
# --------------------------------------------------- |
class Node
{
int data;
Node next;
Node(int d) {data = d; next = null; }
}
public class LinkedList
{
Node head;
// Function to reverse the linked list
Node reverse(Node node)
{
Node prev = null;
Node current = node;
Node next = null;
while (current != null) {
next = current.next;
current.next = prev;
prev = current;
current = next;
}
node = prev;
return node;
}
} |
<gh_stars>0
#include "f.hpp"
int main() {
f1();
f2();
} |
use PHPUnit\Framework\TestCase;
class XPathCountTest extends TestCase
{
protected $xmlDocument;
protected function setUp(): void
{
// Initialize the XML document for testing
$xmlString = '<root><element>1</element><element>2</element><element>3</element></root>';
$this->xmlDocument = new DOMDocument();
$this->xmlDocument->loadXML($xmlString);
}
public function testAssertXPathCount()
{
// Create an instance of the class containing the assertXPathCount method
$testClass = new YourTestClass(); // Replace with the actual class name
// Test case where the expected count matches the actual count
$this->assertXPathCount($this->xmlDocument, 3, '//element');
// Test case where the expected count does not match the actual count
$this->expectException(ExpectationFailedException::class);
$this->assertXPathCount($this->xmlDocument, 2, '//element');
}
} |
package io.cattle.platform.condition.deployment;
import io.cattle.platform.core.addon.DependsOn;
import io.cattle.platform.core.addon.metadata.InstanceInfo;
import io.cattle.platform.core.addon.metadata.ServiceInfo;
import io.cattle.platform.core.addon.metadata.StackInfo;
public interface ServiceDependency {
boolean satified(long accountId, long stackId, Long hostId, DependsOn dependsOn, Runnable callback);
void setState(StackInfo stack, ServiceInfo service, InstanceInfo instance);
}
|
/**
* Contains classes for tool tip and call out bubbles.
*/
package io.opensphere.core.callout;
|
<gh_stars>1-10
package jua.ast;
import jua.evaluator.LuaRuntimeException;
import jua.evaluator.Scope;
import jua.objects.LuaNumber;
import jua.token.TokenOperator;
public class ExpressionDivision extends ExpressionBinary {
ExpressionDivision(TokenOperator token, Expression lhs, Expression rhs) {
super(token, lhs, rhs);
}
@Override
public LuaNumber evaluate(Scope scope) throws LuaRuntimeException {
return new LuaNumber(
LuaNumber.valueOf(lhs.evaluate(scope)).getValue()
/ LuaNumber.valueOf(rhs.evaluate(scope)).getValue());
}
}
|
<reponame>sorah/binpkgbot
require 'binpkgbot/version'
require 'binpkgbot/cli'
|
rm /etc/redis/redis.conf 2> /dev/null
cp /var/www/svv-handball/redis.conf /etc/redis/
systemctl restart redis
systemctl status redis
|
<gh_stars>0
class User < ActiveRecord::Base
include Clearance::User
after_create :setup_timetable
before_validation :set_default_name_if_blank
has_many :timetables, dependent: :destroy
validates :name, presence: true
private
def setup_timetable
#confirm there is no timetable linked to this user (shouldn't be)
self.timetables.create(name: "<default>") if self.timetables.count
end
def set_default_name_if_blank
self.name = "<Name me>" if self.name.blank?
end
end
|
#!/bin/sh
# Copy all vimiv icons to the correct icon directory
DESTDIR=$1
for i in 16 32 64 128 256 512; do
install -Dm644 icons/vimiv_${i}x${i}.png ${DESTDIR}/usr/share/icons/hicolor/${i}x${i}/apps/vimiv.png
done
install -Dm644 icons/vimiv.svg ${DESTDIR}/usr/share/icons/hicolor/scalable/apps/vimiv.svg
|
import glob
import heapq
from typing import List, IO
import tempfile
import shutil
from constants import PAGE_SIZE
def _partition(arr: List[int], low: int, high: int):
i = (low-1)
pivot = arr[high]
for j in range(low, high):
if arr[j] <= pivot:
i = i+1
arr[i], arr[j] = arr[j], arr[i]
arr[i+1], arr[high] = arr[high], arr[i+1]
return (i+1)
def qsort(arr: List[int], low: int, high: int):
"""
Quicksort arr from index low to index high inclusive.
Reference: https://www.geeksforgeeks.org/python-program-for-quicksort/
"""
if len(arr) == 1:
return arr
if low < high:
pi = _partition(arr, low, high)
qsort(arr, low, pi-1)
qsort(arr, pi+1, high)
def read_page(fp: IO, memory: List[int], idx: int):
"""
Read page into memory address idx.
Arguments:
fp: The input file.
memory: The memory.
idx: The memory address to put the page.
"""
fp.seek(0)
for i, num in enumerate(map(int, fp.read().split())):
memory[idx + i] = num
def write_page(fp: IO, memory: List[int], idx: int):
"""
Read page content at memory address idx into filename.
Arguments:
fp: The output file.
memory: The memory.
idx: The memory address of the page to be written.
"""
fp.seek(0)
fp.write(" ".join([str(num) for num in memory[idx:idx+PAGE_SIZE]]))
def merge(memory: List[int],
k: int,
run_len: int,
input_pages: List[IO],
output_pages: List[IO]):
"""
k-way merge.
Merge pages, run_len is the number of pages in a sorted run.
Read from input_pages and write to output_pages.
"""
assert len(memory) == (k + 1) * PAGE_SIZE
assert len(input_pages) == len(output_pages)
h = heapq
n = len(input_pages)
curs = [i for i in range(0, n, run_len)]
ends = [min(n, i + run_len) for i in range(0, n, run_len)]
assert len(curs) <= k
assert len(ends) <= k
assert len(curs) == len(ends)
used = [PAGE_SIZE for i in range(len(curs))]
groups = len(used)
outpages = 0
outused = 0
pq = []
def push(x): return h.heappush(pq, x)
def pop(): return h.heappop(pq)
# Push the first value of each group into pq
for i in range(groups):
if used[i] == PAGE_SIZE and curs[i] != ends[i]:
read_page(input_pages[curs[i]], memory, i * PAGE_SIZE)
used[i] = 0
curs[i] += 1
assert used[i] == 0
push((memory[i * PAGE_SIZE], i))
used[i] += 1
while outpages < len(output_pages):
[val, from_group] = pop()
# Write val into output page
memory[k * PAGE_SIZE + outused] = val
outused += 1
# Write output page out if full
if outused == PAGE_SIZE:
write_page(output_pages[outpages], memory, k * PAGE_SIZE)
outused = 0
outpages += 1
# Read a new page if this page is empty
if used[from_group] == PAGE_SIZE and \
curs[from_group] != ends[from_group]:
read_page(input_pages[curs[from_group]], memory,
from_group * PAGE_SIZE)
used[from_group] = 0
curs[from_group] += 1
# Push a new value into page if there's a value
if used[from_group] != PAGE_SIZE:
push((memory[from_group * PAGE_SIZE + used[from_group]],
from_group))
used[from_group] += 1
def phase1_sorting(memory: List[int], data_folder: str):
n = len(memory)
B = n // PAGE_SIZE
i = 0
tempfiles: List[IO] = list()
for filename in sorted(glob.glob(f"{data_folder}/*.txt")):
with open(filename) as fin:
read_page(fin, memory, i * PAGE_SIZE)
if i + 1 == B:
memory.sort()
for idx in range(0, len(memory), PAGE_SIZE):
fp = tempfile.TemporaryFile("w+")
write_page(fp, memory, idx)
tempfiles.append(fp)
i = 0
else:
i += 1
if i != 0:
qsort(memory, 0, i * PAGE_SIZE - 1)
for idx in range(0, i * PAGE_SIZE, PAGE_SIZE):
fp = tempfile.TemporaryFile("w+")
write_page(fp, memory, idx)
tempfiles.append(fp)
return tempfiles
def phase2_merging(memory: List[int], tempfiles: List[IO]):
n = len(memory)
B = n // PAGE_SIZE
num_pages = len(tempfiles)
run_len = B
k = B - 1
while run_len < num_pages:
for i in range(0, num_pages, k * run_len):
start = i
end = min(i + k * run_len, num_pages)
pages = tempfiles[start:end]
out_pages = [tempfile.TemporaryFile("w+")
for _ in range(len(pages))]
merge(memory, k, run_len, pages, out_pages)
for j in range(start, end):
tempfiles[j].close()
tempfiles[j] = out_pages[j - start]
run_len *= k
def external_merge_sort(n: int, data_folder: str = "."):
"""
External merge sort.
Arguments:
n: memory size
data_folder: The folder containing the input text files
"""
memory = [0] * n
# Phase 1: sorting
tempfiles = phase1_sorting(memory, data_folder)
# Phase 2: merging
phase2_merging(memory, tempfiles)
# Output and Close temporary files
for i, fp in enumerate(tempfiles):
fp.seek(0)
with open(f"{data_folder}/{i+1}.txt", "w") as fout:
shutil.copyfileobj(fp, fout)
fp.close()
def main():
n = int(input("n = "))
external_merge_sort(n)
if __name__ == "__main__":
main()
|
import string
str = "This, is a test-string."
str = str.translate(str.maketrans('', '', string.punctuation))
print(str) |
SELECT c.customer_id, c.name
FROM customers c
INNER JOIN orders o
ON o.customer_id = c.customer_id
WHERE o.date_ordered > NOW() - INTERVAL 1 MONTH; |
function contar() {
var valor = document.querySelector('input#valor_total')
var total = document.querySelector('span.total')
var umReal = document.querySelector('span#um_real')
var cinquentaCentavos = document.querySelector('span#cinquenta_centavos')
var vinteECinco = document.querySelector('span#vinte-e-cinco_centavos')
var dezCentavos = document.querySelector('span#dez_centavos')
var cincoCentavos = document.querySelector('span#cinco_centavos')
var real = document.querySelector('p.real')
var cinquenta = document.querySelector('p.cinquenta')
var vinteeCinco = document.querySelector('p.vinteecinco')
// Valor do input...
valorTotal = Number(valor.value)
total.textContent = `${valorTotal.toFixed(2).replace('.', ',')}`
// Cálculo simples das moedas...
const conta1 = valorTotal * 2
const conta2 = valorTotal * 4
const conta3 = valorTotal * 10
const conta4 = valorTotal * 20
// Cálculo da quantidade de moedas, que está sobrando...
const quantidadeMais = (valorTotal - parseInt(valorTotal)).toFixed(2)
// Quantidade das moedas...
umReal.textContent = `${parseInt(valorTotal)}`
cinquentaCentavos.textContent = `${parseInt(conta1)}`
vinteECinco.textContent = `${parseInt(conta2)}`
dezCentavos.textContent = `${parseInt(conta3)}`
cincoCentavos.textContent = `${parseInt(conta4)}`
// Quantidade a mais de moedas...
if (quantidadeMais == 0 || quantidadeMais == 0.50 || parseInt(valorTotal) == 0) {
real.textContent = ' '
cinquenta.textContent = ' '
vinteeCinco.textContent = ' '
} else {
real.textContent = `E R$ ${quantidadeMais} Centavos.`
if (quantidadeMais > 0.50) {
formula = quantidadeMais - 0.50
cinquenta.textContent = `E R$ ${(formula).toFixed(2)} Centavos.`
}
if (quantidadeMais > 0.75) {
formula2 = quantidadeMais - 0.75
vinteeCinco.textContent = `E R$ ${(formula2).toFixed(2)} Centavos.`
}
}
}
|
// Copyright 2019 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//+build !test
package ospatch
import (
"os"
"github.com/GoogleCloudPlatform/guest-logging-go/logger"
"github.com/GoogleCloudPlatform/osconfig/packages"
"golang.org/x/sys/windows/registry"
)
// DisableAutoUpdates disables system auto updates.
func DisableAutoUpdates() {
k, openedExisting, err := registry.CreateKey(registry.LOCAL_MACHINE, `SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU`, registry.ALL_ACCESS)
if err != nil {
logger.Errorf("error disabling Windows auto updates, error: %v", err)
}
defer k.Close()
if openedExisting {
val, _, err := k.GetIntegerValue("NoAutoUpdate")
if err == nil && val == 1 {
return
}
}
logger.Debugf("Disabling Windows Auto Updates")
if err := k.SetDWordValue("NoAutoUpdate", 1); err != nil {
logger.Errorf("error disabling Windows auto updates, error: %v", err)
}
if _, err := os.Stat(`C:\Program Files\Google\Compute Engine\tools\auto_updater.ps1`); err == nil {
logger.Debugf("Removing google-compute-engine-auto-updater package")
if err := packages.RemoveGooGetPackages([]string{"google-compute-engine-auto-updater"}); err != nil {
logger.Errorf(err.Error())
}
}
}
|
package engine;
import database.ObjectFactory;
import database.firebase.TrackableObject;
import database.jsonhelpers.JSONDataFolders;
import engine.camera.Camera;
import engine.entities.Entity;
import engine.events.*;
import javafx.collections.FXCollections;
import javafx.collections.MapChangeListener;
import javafx.collections.ObservableMap;
import javafx.scene.layout.StackPane;
import util.ErrorDisplay;
import util.math.num.Vector;
import util.pubsub.PubSub;
import util.pubsub.messages.BGMessage;
import util.pubsub.messages.NonBGMessage;
import java.util.HashMap;
import java.util.Map;
public class EntityManager {
private Entity root;
private Map<String, Entity> levels;
private Entity currentLevel;
private int mode = -1;
private String BGType;
private int grid;
private ObjectFactory BGObjectFactory;
private ObjectFactory layerFactory;
private ObjectFactory levelFactory;
private ObservableMap<String, Vector> levelSize;
private Camera camera;
private String currentLevelName;
private boolean isGaming;
public EntityManager(Entity root, int gridSize, boolean gaming) {
this.root = root;
this.levels = new HashMap<>();
this.grid = gridSize;
this.isGaming = gaming;
this.levelSize = FXCollections.observableMap(new HashMap<>());
BGType = "";
setRoot(root);
//writeRootToDatabase(root);
}
public void setRoot(Entity root) {
this.root = root;
levels.clear();
levelSize.clear();
setupPubSub();
setupFactories();
addLevels();
}
private void addLevels() {
if (root.getChildren().isEmpty()) {
addLevel("level 1", 2000, 2000);
currentLevel = levels.get("level 1");
currentLevelName = "level 1";
} else {
root.getChildren().forEach(e -> {
levels.put((String) e.getProperty("levelname"), e);
levelSize.put((String) e.getProperty("levelname"), new Vector(0.0 + (int) e.getProperty("mapwidth"), 0.0 + (int) e.getProperty("mapheight")));
for (Entity each: e.getChildren()) {
new AddLayerEvent(each).fire(e);
recursiveAdd(each);
}
new MouseDragEvent(isGaming).fire(e);
new MapSetupEvent().fire(e);
});
currentLevel = root.getChildren().get(0);
currentLevelName = (String) currentLevel.getProperty("levelname");
}
}
private void setupFactories() {
BGObjectFactory = new ObjectFactory("BGEntity", JSONDataFolders.DEFAULT_USER_ENTITY);
layerFactory = new ObjectFactory("layer", JSONDataFolders.DEFAULT_USER_ENTITY);
levelFactory = new ObjectFactory("level", JSONDataFolders.DEFAULT_USER_ENTITY);
}
private void setupPubSub(){
PubSub.getInstance().subscribe("ADD_BG", message -> {
BGMessage bgMessage = (BGMessage) message;
addBG(bgMessage.getPos());
});
PubSub.getInstance().subscribe("ADD_NON_BG", message -> {
NonBGMessage nonBGMessage = (NonBGMessage) message;
addNonBG(nonBGMessage.getPos(), nonBGMessage.getUID());
});
}
private void recursiveAdd(Entity layer){
for(int i = 0; i < layer.getChildren().size(); i++){
layer.getChildren().get(i).addTo(layer);
recursiveAdd(layer.getChildren().get(i));
}
}
public void setCamera(Camera c) {
camera = c;
}
/**
* add background block from the current selected BGType
* BGtype is stored as a field inside manager, can be changed by library panel calling setBGType
* @param pos
*/
public void addBG(Vector pos) {
// System.out.println(root.getChildren().size());
// System.out.println(currentLevel.getChildren().get(0));
if (mode == 0 && !isGaming) {
Entity BGblock = BGObjectFactory.newObject();
BGblock.addTo(currentLevel.getChildren().get(0));
new InitialImageEvent(new Vector(grid, grid), pos).fire(BGblock);
new ImageViewEvent(BGType).fire(BGblock);
}
}
/**
* add nonbg user-defined entity
* @param pos
* @param uid
*/
public void addNonBG(Vector pos, String uid) {
Entity entity = (Entity) TrackableObject.objectForUID(uid);
addNonBGPrivate(pos, entity);
}
private void addNonBGPrivate(Vector pos, Entity entity) {
if (mode > 0 && !isGaming) {
if (mode > currentLevel.getChildren().size() - 1) {
addLayer();
}
entity.addTo(currentLevel.getChildren().get(mode));
new InitialImageEvent(new Vector(grid, grid), pos).fire(entity);
entity.substitute();
//new MouseDragEvent(false).fire(entity);
//the BGType here should not be applied to the image, mode should check for it
}
}
/**
* change background type for clicking
* @param type
*/
public void setMyBGType (String type) { BGType = type; }
/**
* select BG layer
*/
public void selectBGLayer() {
selectLayer(0);
}
/**
* select any layer
* @param layer
*/
public void selectLayer(int layer) {
mode = layer;
currentLevel.getChildren().forEach(e -> deselect(e));
viewOnly(currentLevel.getChildren().get(0));
select(currentLevel.getChildren().get(layer));
}
/**
* select all layer
*/
public void allLayer() {
mode= -1;
currentLevel.getChildren().forEach(e -> viewOnly(e));
}
/**
* clear entities on current layer
*/
public void clearOnLayer() {
if (mode == 0) currentLevel.getChildren().get(0).clearLayer();
else if(mode == -1) currentLevel.getChildren().forEach(e -> e.clearLayer());
else currentLevel.getChildren().get(mode).clearLayer();
}
private void select(Entity layer) {
TransparentMouseEvent viewTrans = new TransparentMouseEvent(false);
ViewVisEvent viewVis = new ViewVisEvent(true);
layer.getChildren().forEach(e -> {
viewTrans.fire(e);
viewVis.fire(e);
});
}
private void deselect(Entity layer) {
TransparentMouseEvent viewTrans = new TransparentMouseEvent(true);
ViewVisEvent viewVis = new ViewVisEvent(false);
layer.getChildren().forEach(e -> {
viewTrans.fire(e);
viewVis.fire(e);
});
}
private void viewOnly(Entity layer) {
TransparentMouseEvent viewTrans = new TransparentMouseEvent(true);
ViewVisEvent viewVis = new ViewVisEvent(true);
layer.getChildren().forEach(e -> {
viewTrans.fire(e);
viewVis.fire(e);
});
}
/**
* add layer to current level
*/
public void addLayer() {
addLayer(currentLevel);
}
public void deleteLayer() {
if (mode > 0) {
((StackPane)currentLevel.getNodes().getChildren().get(0)).getChildren().remove(currentLevel.getChildren().get(mode).getNodes());
currentLevel.remove(currentLevel.getChildren().get(mode));
mode = 0;
}
}
private void addLayer(Entity level) {
Entity layer = layerFactory.newObject();
layer.addTo(level);
layer.setProperty("gridsize", grid);
layer = layer.substitute();
AddLayerEvent addLayer = new AddLayerEvent(layer);
addLayer.fire(level);
}
/**
* add new level
* @param name
* @param mapWidth
* @param mapHeight
*/
public void addLevel(String name, int mapWidth, int mapHeight) {
if (levels.containsKey(name)) {
new ErrorDisplay("Level Name", "Level name already exists").displayError();
return;
}
Entity level = levelFactory.newObject();
level.addTo(root);
level.setProperty("gridsize", grid);
level.setProperty("mapwidth", mapWidth);
level.setProperty("mapheight", mapHeight);
level = level.substitute();
new MouseDragEvent(isGaming).fire(level);
new MapSetupEvent().fire(level);
levels.put(name, level);
levelSize.put(name, new Vector(mapWidth, mapHeight));
level.setProperty("levelname", name);
level.setProperty("mapwidth", mapWidth);
level.setProperty("mapheight", mapHeight);
addLayer(level);
}
/**
* Change current level
*
* @param level: new level
*/
public Entity changeLevel(String level) {
if (!levels.containsKey(level)) {
new ErrorDisplay("Level Doesn't Exist", "Oops 😧 !! Level " + level + " does not exist").displayError();
return currentLevel;
}
if (currentLevel.equals(levels.get(level))) {
camera.changeLevel(currentLevel);
return currentLevel;
}
currentLevel = levels.get(level);
currentLevelName = level;
camera.changeLevel(currentLevel);
return currentLevel;
}
public Entity getCurrentLevel() {
return currentLevel;
}
public String getCurrentLevelName() {
return currentLevelName;
}
public Entity getRoot() {
return root;
}
public void addMapListener(MapChangeListener<String, Vector> listener) {
levelSize.addListener(listener);
}
public void changeLevelName(String oldName, String newName) {
if (oldName.equals(currentLevelName)) currentLevelName = newName;
Entity ent = levels.get(oldName);
levels.remove(oldName);
levels.put(newName, ent);
Vector temp = levelSize.get(oldName);
levelSize.remove(oldName);
levelSize.put(newName, temp);
}
public void deleteLevel(String name) {
levels.remove(name);
levelSize.remove(name);
}
public void setIsGaming(boolean gaming) {
isGaming = gaming;
}
public boolean isGaming() { return isGaming; }
public Map<String, Vector> getMap() {
return levelSize;
}
}
|
/*jshint -W053 */
import mx from '../../multiplex';
import { qmodule, qtest } from '../../qunit';
qmodule('equals');
qtest('basic equals', function (assert) {
assert.ok(mx.equals(null, null), 'null values are equals');
assert.ok(mx.equals(undefined, undefined), 'undefined values are equal');
assert.ok(mx.equals(null, undefined) === false, 'null and undefined values are not equal');
assert.ok(mx.equals(undefined, null) === false, 'undefined and null values are not equal');
});
qtest('numeric equals', function (assert) {
var POSITIVE_INFINITY = Number.POSITIVE_INFINITY || Infinity;
var NEGATIVE_INFINITY = Number.NEGATIVE_INFINITY || -Infinity;
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 0x1FFFFFFFFFFFFF;
var MIN_SAFE_INTEGER = Number.MIN_SAFE_INTEGER || -0x1FFFFFFFFFFFFF;
assert.ok(mx.equals(NaN, NaN), 'NaN values are equal');
assert.ok(mx.equals(0, NaN) === false, 'no numeric value equals NaN');
assert.ok(mx.equals(NaN, 0) === false, 'NaN equals no numeric value');
assert.ok(mx.equals(1, 1), 'simple numeric values are equal');
assert.ok(mx.equals(0, 1) === false, 'simple numeric values not equal');
assert.ok(mx.equals(0.1, 0.1), 'simple float numeric values are equal');
assert.ok(mx.equals(0.1, 0.2) === false, 'simple float numeric values not equal');
assert.ok(mx.equals(1e-100, 1e-100), 'high precision float number equals');
assert.ok(mx.equals(1e-100, 5e-100) === false, 'high precision float number non equals');
assert.ok(mx.equals(1, new Number(1)) === false, 'number value not equals number object');
assert.ok(mx.equals(new Number(1), 1) === false, 'number object not equals number value');
assert.ok(mx.equals(MAX_SAFE_INTEGER, MAX_SAFE_INTEGER), 'MAX_SAFE_INTEGER equals MAX_SAFE_INTEGER');
assert.ok(mx.equals(MIN_SAFE_INTEGER, MIN_SAFE_INTEGER), 'MAX_SAFE_INTEGER equals MAX_SAFE_INTEGER');
assert.ok(mx.equals(MAX_SAFE_INTEGER, MIN_SAFE_INTEGER) === false, 'MAX_SAFE_INTEGER not equals MIN_SAFE_INTEGER');
assert.ok(mx.equals(MIN_SAFE_INTEGER, MAX_SAFE_INTEGER) === false, 'MIN_SAFE_INTEGER not equals MAX_SAFE_INTEGER');
assert.ok(mx.equals(POSITIVE_INFINITY, POSITIVE_INFINITY), 'POSITIVE_INFINITY equals POSITIVE_INFINITY');
assert.ok(mx.equals(NEGATIVE_INFINITY, NEGATIVE_INFINITY), 'NEGATIVE_INFINITY equals NEGATIVE_INFINITY');
assert.ok(mx.equals(POSITIVE_INFINITY, NEGATIVE_INFINITY) === false, 'POSITIVE_INFINITY not equals NEGATIVE_INFINITY');
assert.ok(mx.equals(NEGATIVE_INFINITY, POSITIVE_INFINITY) === false, 'NEGATIVE_INFINITY not equals POSITIVE_INFINITY');
for (var i = 0; i < 64; i++) {
var num1 = Math.pow(2, i);
var num2 = Math.pow(2, i + 1);
assert.ok(mx.equals(num1, num1), 'equal numbers: ' + num1);
assert.ok(mx.equals(num1, num2) === false, 'non equal numbers: "' + num1 + '", "' + num2 + '"');
assert.ok(mx.equals(num2, num1) === false, 'non equal numbers: "' + num2 + '", "' + num1 + '"');
}
});
qtest('string equals', function (assert) {
assert.equal(mx.compare('', null), 1, 'any string value not equals null');
assert.equal(mx.compare('', undefined), 1, 'any string value not equals undefined');
assert.ok(mx.equals('', ''), 'empty string are equal');
assert.ok(mx.equals('a', 'a'), 'equal 1 character string values');
assert.ok(mx.equals('string', 'string'), 0, 'equal multi character string');
assert.ok(mx.equals(new Array(10000).join('A'), new Array(10000).join('A')), 0, 'equal long string');
assert.ok(mx.equals('b', 'a') === false, 'non equal 1 character string');
assert.ok(mx.equals('string b', 'string a') === false, 'non equal multi character string');
assert.ok(mx.equals(new Array(10000).join('A') + 'b', new Array(10000).join('A') + 'a') === false, 'non equal long string');
assert.ok(mx.equals('a', new String('a')) === false, 'string value not equals string object');
assert.ok(mx.equals(new String('a'), 'a') === false, 'string object not equals string value');
assert.ok(mx.equals('1', 1) === false, 'string not equals numeric value');
assert.ok(mx.equals('true', true) === false, 'string not equals boolean value');
assert.ok(mx.equals('[object Object]', {}) === false, 'string not equals object value');
var characters = '0123456789abcdefghijklmnopqrstuvwxyz';
for (var i = 0; i < characters.length - 1; i++) {
var char1 = characters.charAt(i);
var char2 = characters.charAt(i + 1);
assert.ok(mx.equals(char1, char1), 'equal ASCII characters: "' + char1 + '"!');
assert.ok(mx.equals(char1, char2) === false, 'non equal ASCII characters: "' + char1 + '", "' + char2 + '"!');
assert.ok(mx.equals(char2, char1) === false, 'non equal ASCII character: "' + char2 + '", "' + char1 + '"!');
}
});
qtest('boolean equals', function (assert) {
assert.ok(mx.equals(true, true), 'true values are equal');
assert.ok(mx.equals(false, false), 'false values are equal');
assert.ok(mx.equals(true, false) === false, 'true and false values not equal');
assert.ok(mx.equals(false, true) === false, 'false and true values not equal');
assert.ok(mx.equals(true, new Boolean(true)) === false, 'true and Boolean(true) values are not equal');
assert.ok(mx.equals(false, new Boolean(false)) === false, 'false and Boolean(false) values are not equal');
});
qtest('Date equals', function (assert) {
assert.ok(mx.equals(new Date(2016, 0, 1), new Date(2016, 0, 1)), 'simple Date values are equal');
assert.ok(mx.equals(new Date(2016, 0, 1), new Date(2017, 0, 1)) === false, 'simple Date values not equal');
for (var i = 1; i <= 365; i++) {
var date1 = new Date(2016, 0, i);
var date2 = new Date(2016, 0, i + 1);
assert.ok(mx.equals(date1, date1), 'equal dates: ' + date1);
assert.ok(mx.equals(date1, date2) === false, 'non equal dates: "' + date1 + '", "' + date2 + '"');
assert.ok(mx.equals(date2, date1) === false, 'non equal dates: "' + date2 + '", "' + date1 + '"');
}
});
qtest('Other types equals', function (assert) {
if (typeof Symbol === 'function') {
assert.ok(mx.equals(Symbol('test'), Symbol('test')) === false, 'simple Symbols are not equal');
}
else {
assert.ok(mx.equals(1, 1), 'dummy test to pass by earlier versions of node');
}
});
qtest('equals using __eq__ method', function (assert) {
function SimpleEquals(val) {
this._val = val;
this[mx.runtime.equalsSymbol] = function (obj) {
return this._val === obj._val;
};
}
assert.equal(mx.equals(new SimpleEquals(1), new SimpleEquals(1)), true, 'equal objects overriding equals method');
assert.equal(mx.equals(new SimpleEquals(1), new SimpleEquals(0)), false, 'non-equal objects overriding equals method');
assert.equal(mx.equals(new SimpleEquals(0), new SimpleEquals(1)), false, 'non-equal objects overriding equals method');
});
qtest('equals using object literals', function (assert) {
assert.equal(mx.equals({}, {}), true, 'equal empty objects literals');
assert.equal(mx.equals({ val: 1 }, { val: 1 }), true, 'equal objects literals with properties');
assert.equal(mx.equals({ val: 1, sum: { name: 'A' } }, { val: 1, sum: { name: 'A' } }), true, 'equal object literals with complex object literals as properties');
assert.equal(mx.equals({ val: 1 }, { val: 2 }), false, 'non equal objects literals with properties');
assert.equal(mx.equals({ val: 1, sum: { name: 'A' } }, { val: 1, sum: { name: 'B' } }), false, 'non equal object literals with complex object literals as properties');
var v1 = { val: 1, toString: Object.prototype.toString };
var v2 = { val: 1, toString: Object.prototype.toString };
assert.equal(mx.equals(v1, v2), true, 'object literal equality works at runtime, meaning property change after testing equality might result in non equality');
v1.name = 1;
assert.equal(mx.equals(v1, v2), false, 'adding new property to an object literal does not change its hash code, but ruins equality');
});
qtest('class type equals', function (assert) {
function SimpleClass(val) {
this._val = val;
}
var o1 = new SimpleClass(1);
var o2 = new SimpleClass(1);
var o3 = new SimpleClass(2);
assert.equal(mx.equals(o1, o1), true, 'objects with the same reference are equal');
assert.equal(mx.equals(o1, o2), false, 'identical objects are not equal');
assert.equal(mx.equals(o2, o3), false, 'non identical objects are not equal');
});
|
def num_occurrences(text, substring):
count = 0
for i in range(len(text)-len(substring)+1):
if text[i:i+len(substring)] == substring:
count += 1
return count |
<gh_stars>1-10
from math import floor
import copy
import genes
import genetic_tree as gt
from config import * # Dicen que hacer esto es feo, pero me parece necesario
from aliment import Aliment, AlimentSource
class Procedures():
# Convertir en alimento
def be_eaten(self, territory):
aliment_units = self.size / par_size_to_food_ratio
nutrients = {
'carbohydrates': floor((self.nutrients['carbohydrates'] / aliment_units) + ((self.energy / aliment_units) / 2) / par_energy_by_carbohydrate),
'proteins' : floor((self.nutrients['proteins'] / aliment_units)), # Si se cambia las proteinas necesarias para crecer, hay que añadir ese valor aquí
'fats' : floor((self.nutrients['fats'] / aliment_units) + ((self.energy / aliment_units) / 2) / par_energy_by_fat),
'vitamins' : floor(self.nutrients['vitamins'] / aliment_units),
'minerals' : floor(self.nutrients['minerals'] / aliment_units)}
# Reflejamos la perdida de nutrientes y tamaño
self.nutrients['carbohydrates'] -= nutrients['carbohydrates']
self.nutrients['proteins'] -= nutrients['proteins']
self.nutrients['fats'] -= nutrients['fats']
self.nutrients['vitamins'] -= nutrients['vitamins']
self.nutrients['minerals'] -= nutrients['minerals']
self.size -= self.genome['constitution_growth_rate']
if self.size < 0:
self.to_die(territory)
return Aliment(self.genetic_node.species, nutrients)
# Reproducirse
def to_reproduce(self, territory):
for i in range(self.genome['fertility']):
sprout = copy.copy(self)
sprout.__init__(100, 100, 100, 100, 100, 100, 0, 1, self.genetic_node, genes.genomes_crossing(self.genome, self.genome)) # Se realiza una copia e inicialización para mantener el tipo de objeto (si existe una manera mejor, lo ignoro)
territory.elements['vegetables'].append(sprout)
# Morir
def to_die(self, territory):
territory.elements['vegetables'].remove(self)
self.energy = -9000 |
from typing import List, Optional
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from rich.table import Table
from rich import box
from rich.align import Align
class TaskStatus(Enum):
"""Enum for setting the state of tasks"""
WAITING = 0
RUNNING = 1
DONE = 2
ERROR = -1
class ObjectStatus(Enum):
"""Enum for tracking the status of an object in the object store"""
ERROR = -1
UNKNOWN = 0
DOES_NOT_EXIST = 1
IN_PROGRESS = 2
EXISTS = 3
@dataclass
class ObjectState:
"""Dataclass for tracking the state of a file to be uploaded or downloaded"""
path: Path
uri: str
status: ObjectStatus
msg: str
size: Optional[int] = None
is_dir: bool = False
def get_task_status(tasks: List[str], status: List[TaskStatus]) -> Align:
"""Take a list of task statuses and render a table
Args:
tasks: list of tasks
status: list of TaskStatus values
Returns:
the rendered Table
"""
table = Table(box=box.SIMPLE_HEAVY, show_footer=False)
table_centered = Align.center(table)
table.add_column("Status", no_wrap=True)
table.add_column("Tasks", no_wrap=True)
for t, s in zip(tasks, status):
if s == TaskStatus.WAITING:
table.add_row(":hourglass:", t)
elif s == TaskStatus.RUNNING:
table.add_row(":face_with_monocle:", t)
elif s == TaskStatus.DONE:
table.add_row(":white_check_mark:", t)
elif s == TaskStatus.ERROR:
table.add_row(":x:", t)
else:
table.add_row(":question:", t)
return table_centered
|
<reponame>ferclaverino/bots<gh_stars>0
'use strict';
function streamEndPoint(req, res, camera) {
res.redirect(camera.url);
}
module.exports = streamEndPoint;
|
# frozen_string_literal: true
module Clinvoice
module RenderItems
def self.call(pdf, data_items)
pdf.move_down 20
pdf.table(items(data_items), width: pdf.bounds.width) do
style(row(1..-1).columns(0..-1), padding: [4, 5, 4, 5], borders: [:bottom], border_color: 'dddddd')
style(row(0), background_color: 'e9e9e9', border_color: 'dddddd', font_style: :bold)
style(row(0).columns(0..-1), borders: %i[top bottom])
style(row(0).columns(0), borders: %i[top left bottom])
style(row(0).columns(-1), borders: %i[top right bottom])
style(row(-1), border_width: 2)
style(column(2..-1), align: :right)
style(columns(0), width: 280)
end
pdf.move_down 10
end
private
def self.items(data_items)
header = ['Description', 'Unit Cost', 'Quantity', 'Line Total']
blank_line = [' ', ' ', ' ', ' ']
[header] + formatted_items(data_items) + [blank_line]
end
def self.formatted_items(items)
items.map do |item|
[
item[0],
Clinvoice::Helper.format_currency(item[1]),
item[2],
Clinvoice::Helper.format_currency(item[3])
]
end
end
end
end
|
<filename>tests/engine/FileWatcher/tst_FileWatcherTest.cpp
/**
* @author <NAME> <<EMAIL>>
*
* @section LICENSE
* See LICENSE for more informations.
*
*/
#include <QString>
#include <QtTest>
#include <QCoreApplication>
#include <QScopedPointer>
#include <QSignalSpy>
#include <QFile>
#include <QTextStream>
#include <engine/FileWatcher.h>
#include <TestCommon.h>
class FileWatcherTest : public QObject
{
Q_OBJECT
public:
FileWatcherTest();
private Q_SLOTS:
void initTestCase();
void cleanupTestCase();
void testBaseImplementation();
void testSetUpdateInterval();
void testAddEmptyPath();
void testSetGetFilePath();
void testSizeChanged();
void testFileCheckTimer();
void testSizeOffset();
};
FileWatcherTest::FileWatcherTest()
{
}
void FileWatcherTest::initTestCase()
{
}
void FileWatcherTest::cleanupTestCase()
{
}
void FileWatcherTest::testBaseImplementation()
{
FileWatcher watcher;
watcher.setFilePath("kdkdkdkd");
QVERIFY2(!watcher.filePath().isEmpty(), "Base implementation of setFilePath wasn't called");
}
void FileWatcherTest::testSetUpdateInterval()
{
FileWatcher watcher;
watcher.setUpdateInterval(34215);
QVERIFY2(watcher.updateInterval() == 34215, "Failed setting update interval. Maybe parent implementation not called.");
}
void FileWatcherTest::testAddEmptyPath()
{
FileWatcher watcher;
watcher.setFilePath("");
QVERIFY2(watcher.filePath().isEmpty(), "Failed set empty filePath");
}
void FileWatcherTest::testSetGetFilePath()
{
QString filePath = TestCommon::generateExistingFileInPath(QStringLiteral("testSetGetFilePath.log"));
FileWatcher watcher;
watcher.setFilePath(filePath);
QVERIFY2(watcher.filePath() == filePath, "Failed set/get filePath");
}
void FileWatcherTest::testSizeChanged()
{
QString filePath = TestCommon::generateExistingFileInPath(QStringLiteral("testSizeChanged.log"));
QScopedPointer<FileWatcher> fileWatcher(new FileWatcher);
QSignalSpy spy(fileWatcher.data(), SIGNAL(sizeChanged(qint64, qint64)));
fileWatcher->setFilePath(filePath);
QFile outFile(filePath);
QVERIFY(outFile.open(QIODevice::WriteOnly));
QTextStream stream(&outFile);
QString testLine("This is the first line");
stream << testLine;
stream.flush();
outFile.close();
spy.wait();
QVERIFY2(spy.count() == 1, "Signal for changed size wasn't emitted.");
}
void FileWatcherTest::testFileCheckTimer()
{
QString filePath = TestCommon::generateExistingFileInPath(QStringLiteral("testSizeChanged.log"));
QScopedPointer<FileWatcher> fileWatcher(new FileWatcher);
// Block QFileSystemWatcher's signal so the sizeChanged signal of FileWatcher will only be
// emitted by the timer event
fileWatcher->m_fileSystemWatcher->blockSignals(true);
QSignalSpy spy(fileWatcher.data(), SIGNAL(sizeChanged(qint64, qint64)));
fileWatcher->setFilePath(filePath);
QFile outFile(filePath);
QVERIFY(outFile.open(QIODevice::WriteOnly));
QTextStream stream(&outFile);
QString testLine("This is the first line");
stream << testLine;
stream.flush();
outFile.close();
spy.wait(5000);
QVERIFY2(spy.count() == 1, "Signal for changed size wasn't emitted by timer event.");
// Test after setting update interval explicitly. First kill timer to check if it is
// started with setUpdateInterval.
fileWatcher->killTimer(fileWatcher->m_timerId);
fileWatcher->m_timerId = 0;
spy.clear();
fileWatcher->setUpdateInterval(100);
QVERIFY(outFile.open(QIODevice::WriteOnly));
testLine = QStringLiteral("This is the second line");
stream << testLine;
stream.flush();
outFile.close();
spy.wait(5000);
QVERIFY2(spy.count() == 1, "Signal wasn't emitted, after setting update interval explicitly.");
}
void FileWatcherTest::testSizeOffset()
{
QString filePath = TestCommon::generateExistingFileInPath(QStringLiteral("testSizeOffset.log"));
// Create a file with content so there is already an offset
QFile outFile(filePath);
QVERIFY(outFile.open(QIODevice::WriteOnly));
QTextStream stream(&outFile);
stream << "This is the first line\n" << "This is the second line\n";
stream.flush();
outFile.close();
qint64 offsetFileSize = outFile.size();
Q_ASSERT(offsetFileSize != 0);
// Now simmulate that the file gets new content until FileWatcher was initialized
QVERIFY(outFile.open(QIODevice::Append | QIODevice::Text));
stream << "This is the last line";
stream.flush();
outFile.close();
Q_ASSERT(outFile.size() > offsetFileSize);
QScopedPointer<FileWatcher> fileWatcher(new FileWatcher);
fileWatcher->setSizeOffset(offsetFileSize);
// Block QFileSystemWatcher's signal so the sizeChanged signal of FileWatcher will only be
// emitted by the timer event
fileWatcher->m_fileSystemWatcher->blockSignals(true);
QSignalSpy spy(fileWatcher.data(), SIGNAL(sizeChanged(qint64, qint64)));
fileWatcher->setFilePath(filePath);
QVERIFY(outFile.open(QIODevice::Append | QIODevice::Text));
stream << "This is the last line";
stream.flush();
outFile.close();
spy.wait(5000);
QVERIFY2(spy.count() == 1, "Signal for changed size wasn't emitted by timer event.");
QVariantList parameters = spy.at(0);
Q_ASSERT(parameters.count() == 2);
qint64 oldSize = parameters.at(0).toInt();
qint64 newSize = parameters.at(1).toInt();
QVERIFY2(oldSize == offsetFileSize, "Offset size wasn't used as old size");
QVERIFY2(newSize == outFile.size(), "New size isn't file's new size");
}
QTEST_MAIN(FileWatcherTest)
#include "tst_FileWatcherTest.moc"
|
<reponame>FernanddoSalas/blog-api
"""Users Permissions."""
# Django REST Framework
from rest_framework.permissions import BasePermission
class IsAccountOwner(BasePermission):
"""Is Account Owner."""
def has_object_permission(self, request, view, obj):
"""Verify that requesting user is equal than object."""
return request.user == obj
|
/*******************************************************************************
* Copyright 1993-2008 NVIDIA Corporation. All rights reserved.
*
* NOTICE TO USER:
*
* This source code is subject to NVIDIA ownership rights under U.S. and
* international Copyright laws.
*
* This software and the information contained herein is PROPRIETARY and
* CONFIDENTIAL to NVIDIA and is being provided under the terms and conditions
* of a Non-Disclosure Agreement. Any reproduction or disclosure to any third
* party without the express written consent of NVIDIA is prohibited.
*
* NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE CODE FOR
* ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF
* ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOURCE CODE,
* INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, AND
* FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL NVIDIA BE LIABLE FOR
* ANY SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOURCE CODE.
*
* U.S. Government End Users. This source code is a "commercial item" as that
* term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting of "commercial
* computer software" and "commercial computer software documentation" as such
* terms are used in 48 C.F.R. 12.212 (SEPT 1995) and is provided to the U.S.
* Government only as a commercial end item. Consistent with 48 C.F.R.12.212
* and 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all U.S. Government
* End Users acquire the source code with only those rights set forth herein.
*
******************************************************************************/
#include <sstream>
#include <oac/TextLogger.h>
#include <assert.h>
#ifdef WIN32
# pragma warning(disable:4018)
# pragma warning(disable:4996)
#endif
using namespace std;
oacTextLogger::oacTextLogger(int max_columns)
{
pMaxColumns = max_columns;
}
oacTextLogger::~oacTextLogger()
{
}
static const char *Basename(const char *cmd)
{
int Len = (int)strlen(cmd);
if(Len <= 0)
return(cmd);
for(int i=Len-1; i > 0; --i)
if(cmd[i] == '/' || cmd [i] == '\\')
{
if(i + 1 < Len)
return(&cmd[i+1]);
break;
}
return(cmd);
}
static string FormatFileLine(const string &file, int line)
{
if(file.size() < 1)
return(string(""));
ostringstream Str;
Str << Basename(file.data()) << ", " << line << ": ";
return(Str.str());
}
static bool IsWhiteSpace(char c)
{
return(c == ' ' || c == '\t');
}
static char *FindNewLinePoint(char *str, size_t last_index)
{
assert(last_index > 0);
char *Ptr = str + last_index;
for(int i=0; i < last_index; ++i)
if(str[i] == '\n')
return(str + i + 1);
Ptr = str + last_index;
for(; Ptr > str && !IsWhiteSpace(*Ptr); --Ptr);
if(IsWhiteSpace(*Ptr))
{
for(; Ptr - 1 > str && IsWhiteSpace(Ptr[-1]); --Ptr);
if(((Ptr - str) * 1000) / last_index >= 750)
return(Ptr);
}
return(str + last_index);
}
static ostringstream &FormatParagraph(const string &src,
const string &indent,
ostringstream &dst,
int max_columns)
{
assert(max_columns > indent.size());
char *TmpSrc = strdup(src.data());
size_t SrcLen = src.size();
bool FirstFlag = true;
char *Ptr = TmpSrc;
while(SrcLen > 0)
{
while(SrcLen > 0 && IsWhiteSpace(*Ptr))
{
Ptr++;
SrcLen--;
}
if(SrcLen == 0)
break;
size_t IndentSize = indent.size();
if(FirstFlag)
IndentSize = dst.str().size();
else
dst << indent;
if(IndentSize + SrcLen <= max_columns)
{
dst << Ptr;
break;
}
char *NewLinePoint = FindNewLinePoint(Ptr, max_columns - IndentSize);
char Tmp = *NewLinePoint;
*NewLinePoint = 0;
size_t PtrLen = strlen(Ptr);
dst << Ptr;
assert(PtrLen > 0);
if(Ptr[PtrLen - 1] != '\n')
dst << "\n";
*NewLinePoint = Tmp;
SrcLen -= (NewLinePoint - Ptr);
Ptr = NewLinePoint;
FirstFlag = false;
}
free(TmpSrc);
return(dst);
}
void oacTextLogger::Emit(const oacLog::Entry &entry, int group_level)
{
ostringstream Str;
string IndentStr(".");
int i;
#if 0
char Buf[1024];
sprintf(Buf, "%d: ", group_level);
IndentStr = Buf;
#endif
for(i=0; i < group_level; ++i)
IndentStr += "..";
IndentStr += " ";
Str << IndentStr;
switch(entry.Type)
{
case oacLog::TYPE_WARNING:
Str << "Warning: " << entry.Msg;
IndentStr += " ";
break;
case oacLog::TYPE_ERROR:
Str << "ERROR: " << FormatFileLine(entry.File, entry.Line);
IndentStr += " ";
break;
case oacLog::TYPE_TEST_COND:
if(entry.TestPassed)
return;
Str << "FAILED: ";
IndentStr += " !! ";
Str << FormatFileLine(entry.File, entry.Line)
<< entry.TestCond << "\n" << entry.Msg;
break;
case oacLog::TYPE_MESSAGE:
Str << entry.Msg;
IndentStr += " ";
break;
case oacLog::TYPE_GROUP_START:
Str << "Test group: \"" << oacLog::CurrentGroupName() << "\"";
IndentStr += " ";
break;
case oacLog::TYPE_GROUP_END:
Str << oacLog::TestsSummary();
break;
default:
assert("Shouldn't be here!!!" == NULL);
}
ostringstream FormattedStr;
FormatParagraph(Str.str(),
IndentStr,
FormattedStr,
pMaxColumns);
EmitLine(FormattedStr.str().data());
switch(entry.Type)
{
case oacLog::TYPE_GROUP_START:
break;
}
}
|
<gh_stars>0
package io.hnfmr.tagless
import scala.language.higherKinds
import cats._
import cats.implicits._
import cats.data.Const
object OptimizingFinalTagless extends App {
trait KVStore[F[_]] {
def get(key: String): F[Option[String]]
def put(key: String, a: String): F[Unit]
}
def program0[M[_]: FlatMap, F[_]](a: String)(K: KVStore[M])(implicit P: Parallel[M, F]) =
for {
_ <- K.put("A", a)
x <- (K.get("B"), K.get("C")).parMapN(_ |+| _)
_ <- K.put("X", x.getOrElse("-"))
} yield x
// This is essentially a Semigroupal Tuple4 mapped into F
def program1[F[_] : Apply](K: KVStore[F]): F[List[String]] =
(K.get("Cats"), K.get("Dogs"), K.put("Mice", "42"), K.get("Cats"))
.mapN((f, s, _, t) => List(f, s, t).flatten)
def analysisInterpreter: KVStore[Const[(Set[String], Map[String, String]), ?]] =
new KVStore[Const[(Set[String], Map[String, String]), ?]] {
def get(key: String) = Const((Set(key), Map.empty))
def put(key: String, a: String) = Const((Set.empty, Map(key -> a)))
}
// Here the F[_] is Const[(Set[String], Map[String, String]), ?]
// The result of Tuple4 Semigroup4 is Set("Cats", "Dogs"), due to analysisInterpreter having KVStore with F[_] defined above
// But this is all done within the F[_] context and getConst
println(program1(analysisInterpreter).getConst)
def optimizedProgram[F[_]: Applicative](K: KVStore[F]): F[List[String]] = {
val (gets, puts) = program1(analysisInterpreter).getConst
puts.toList.traverse { case (k, v) => K.put(k, v) } *> gets.toList.traverse(K.get).map(_.flatten)
}
def monadicProgram[F[_] : Monad](K: KVStore[F]): F[Unit] = for {
list <- optimizedProgram(K)
_ <- K.put("Birds", list.headOption.getOrElse("128"))
} yield ()
} |
package com.umd.ece552.utils;
/**
*
* @author ingegarcia
*
*/
public interface Iterator {
public boolean hasNext();
public Object next();
public boolean isLast();
public boolean isFirst();
public int size();
public int getIndex();
public void restart();
}
|
<reponame>cugg/BusinessParameters
package be.kwakeroni.evelyn.client;
import be.kwakeroni.evelyn.model.DatabaseAccessor;
import be.kwakeroni.evelyn.model.Event;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BinaryOperator;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public final class CachingClientTable<E> implements ClientTable<E> {
private final ClientTable<E> delegate;
private final Function<String, ? extends ClientOperation<E>> operationMap;
private final Function<? super E, String> idGetter;
private Map<String, E> cache;
public CachingClientTable(DatabaseAccessor accessor, Function<String, ? extends ClientOperation<E>> operationMap, Function<E, String> idGetter) {
this(new DefaultClientTable<>(accessor, operationMap), operationMap, idGetter);
}
CachingClientTable(ClientTable<E> delegate, Function<String, ? extends ClientOperation<E>> operationMap, Function<? super E, String> idGetter) {
this.delegate = delegate;
this.operationMap = operationMap;
this.idGetter = idGetter;
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public Stream<E> findAll() {
return getCache().values().stream();
}
@Override
public E findById(String id) {
return getCache().get(id);
}
@Override
public synchronized Event append(String user, String operation, String objectId, String data) {
ensureCacheIsLoaded();
Event event = this.delegate.append(user, operation, objectId, data);
applyEvent(event);
return event;
}
private Map<String, E> getCache() {
ensureCacheIsLoaded();
return cache;
}
private synchronized void ensureCacheIsLoaded() {
if (cache == null) {
cache = this.delegate.findAll().collect(Collectors.toMap(this.idGetter, Function.identity(), noDuplicates(), HashMap::new));
}
}
private synchronized void applyEvent(Event event) {
E entity = operationMap.apply(event.getOperation()).operate(findById(event.getObjectId()), event);
getCache().put(event.getObjectId(), entity);
}
private BinaryOperator<E> noDuplicates() {
return (e1, e2) -> {
throw new IllegalStateException("Duplicate key: " + this.idGetter.apply(e1));
};
}
}
|
// https://www.codechef.com/DEC19A/problems/CHFRAN
#include <bits/stdc++.h>
using namespace std;
using ii = tuple<int, int>;
using vii = vector<ii>;
using qii = priority_queue<ii, vii, greater<ii>>;
int main() {
int t, n, l, r;
cin >> t;
while (t--) {
cin >> n;
vii a(n);
int m = 2e9, M = 0;
qii q;
for (int i = 0; i < n; i++) {
cin >> l >> r;
if (l > M) M = l;
if (r < m) m = r;
q.push({l, -1});
q.push({r, 1});
}
if (m >= M) {
cout << "-1\n";
continue;
}
sort(a.begin(), a.end());
int c = 0;
m = 2e9;
while (!q.empty()) {
int x, y;
tie(x, y) = q.top();
q.pop();
if (x >= M) break;
y = -y;
c += y;
if (y < 0 && c < m) m = c;
}
cout << m << '\n';
}
}
|
import os
def use_bigquery() -> bool:
return (
os.environ.get('BIGQUERY') and
os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') and
os.environ.get('BIGQUERY_LOCATION') and
os.environ.get('BIGQUERY_DATASET')
) |
import pytest
import time
from selenium import webdriver
@pytest.fixture
def driver(request):
wd = webdriver.Chrome()
request.addfinalizer(wd.quit)
return wd
def test_login_admin(driver):
driver.get('http://localhost/litecart/admin/')
driver.find_element_by_name('username').send_keys('admin')
driver.find_element_by_name('password').send_keys('<PASSWORD>')
driver.find_element_by_name('login').click()
time.sleep(1)
|
#!/usr/bin/env bash
# link the uploaded nginx config to enable it
echo -e "\e[0m--"
rm -rf /etc/nginx/sites-enabled/*
for vhost in default pma; do
ln -sf /etc/nginx/sites-available/$vhost /etc/nginx/sites-enabled/020-$vhost
test -L /etc/nginx/sites-enabled/020-$vhost && echo -e "\e[0mLinking nginx $vhost config: \e[1;32mOK\e[0m" || echo -e "Linking nginx $vhost config: \e[1;31mFAILED\e[0m";
done
# restart nginx
echo -e "\e[0m--"
service nginx restart
# Determine the public ip address and show a message
IP_ADDR=`ifconfig eth1 | grep inet | grep -v inet6 | awk '{print $2}' | cut -c 6-`
PROJECT_NAME="cologne-phonetic"
echo -e "\e[0m--\nAdd to your /etc/hosts:\n\n\e[1;31m$IP_ADDR\twww.$PROJECT_NAME.de pma.$PROJECT_NAME.de\e[0m\n"
echo -e "\e[0m--\nRun \e[1;31mvagrant ssh\e[0m"
echo -e "\e[0m--\nRun \e[1;31mphp /vagrant/build/tools/composer.phar -d=/vagrant update -o -v\e[0m"
echo -e "\e[0m--\nBrowse application under \e[1;31mhttp://www.$PROJECT_NAME.de\e[0m"
echo -e "\e[0m--\nBrowse MySQL under \e[1;31mhttp://pma.$PROJECT_NAME.de\e[0m"
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-N-VB-ADJ/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-N-VB-ADJ/13-512+512+512-NER-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_named_entities_first_third_sixth --eval_function last_sixth_eval |
#!/bin/sh
if [ ! -d "$HOME/.yadr" ]; then
echo "Installing YADR for the first time"
git clone --depth=1 https://github.com/stinoga/dotfiles-1.git "$HOME/.yadr"
cd "$HOME/.yadr"
[ "$1" = "ask" ] && export ASK="true"
rake install
else
echo "YADR is already installed"
fi
|
import { SearchResult } from "../models/home/reserves-lib";
export const MOCK_RESERVES_LIB_SEARCH = {
bookCount: 0,
pageCount: 0,
data: [],
} as SearchResult; |
# Get a high-level overview of your instances, optionally limited by VPC
function instancenames () {
filter=""
# POSIX if statement so it works regardless of shell
if [ ! -z $1 ] ;then
filter="Name=vpc-id,Values=${1}"
fi
aws ec2 describe-instances --filters $filter --query 'Reservations[*].Instances[*].{Name:Tags[?Key==`Name`].Value[]
| [0],ID:InstanceId,State:State.Name,"Private IP":PrivateIpAddress, VPC:VpcId, AZ:Placement.AvailabilityZone}' --output table
} |
#!/usr/bin/env bash
# Source: https://github.com/tttapa/RPi-Cpp-Toolchain/blob/103eb26a24a7e03b9672638ece72881311a6c9df/toolchain/toolchain.sh
set -e
cd "$(dirname "${BASH_SOURCE[0]}")"
function print_usage {
echo
echo "Usage"
echo " $0 <board> [--push] [--pull] [--build-toolchain] [--export] [--export-toolchain]"
echo
echo "Boards"
echo " rpi"
echo " Raspberry Pi 1 and Zero, without development tools"
echo
echo " rpi-dev"
echo " Raspberry Pi 1 and Zero, with development tools"
echo
echo " rpi3-armv8"
echo " Raspberry Pi 3, 32-bit, without development tools"
echo
echo " rpi3-armv8-dev"
echo " Raspberry Pi 3, 32-bit, with development tools"
echo
echo " rpi3-aarch64"
echo " Raspberry Pi 3, 64-bit, without development tools"
echo
echo " rpi3-aarch64-dev"
echo " Raspberry Pi 3, 64-bit, with development tools"
echo
echo "Options"
echo " --push"
echo " After building, push the resulting image to Docker Hub"
echo
echo " --pull"
echo " Don't build the image locally, pull everything from Docker Hub"
echo
echo " --build-toolchain"
echo " Build the toolchains locally instead of pulling them from Docker Hub"
echo
echo " --export"
echo " Export the toolchain, sysroot and staging area to your computer"
echo
echo " --export-toolchain"
echo " Export only the toolchain to your computer"
echo
}
# Check the number of arguments
if [ "$#" -lt 1 ]; then
echo
echo "Build or pull the Raspberry Pi GCC toolchain and cross-compiled libraries."
print_usage
exit 0
fi
# Check the board name
name="$1"
case "$name" in
rpi)
target=armv6-rpi-linux-gnueabihf
arch=armv6
board=rpi
dev=nodev
;;
rpi-dev)
target=armv6-rpi-linux-gnueabihf
arch=armv6
board=rpi
dev=dev
;;
rpi3-armv8)
target=armv8-rpi3-linux-gnueabihf
arch=armv8
board=rpi3
dev=nodev
;;
rpi3-armv8-dev)
target=armv8-rpi3-linux-gnueabihf
arch=armv8
board=rpi3
dev=dev
;;
rpi3-aarch64)
target=aarch64-rpi3-linux-gnu
arch=aarch64
board=rpi3
dev=nodev
;;
rpi3-aarch64-dev)
target=aarch64-rpi3-linux-gnu
arch=aarch64
board=rpi3
dev=dev
;;
*) echo; echo "Unknown board option '$1'"; print_usage; exit 1 ;;
esac
# Parse the other options
shift
build_toolchain=false
build=true
push=false
export=false
export_toolchain=false
docker_build_cpuset=
while (( "$#" )); do
case "$1" in
--push) push=true ;;
--pull) build_toolchain=false; build=false ;;
--build-toolchain) build_toolchain=true ;;
--export) export=true ;;
--export-toolchain) export_toolchain=true; build=false ;;
--cpuset-cpus=*) docker_build_cpuset="$1" ;;
*) echo; echo "Unknown option '$1'"; print_usage; exit 1 ;;
esac
shift
done
# Add -dev to tag if development build was selected
case "$dev" in
nodev)
docker_target=build
tag=$target
;;
dev)
docker_target=developer-build
tag=$target-dev
;;
esac
# Build or pull the Docker image with the cross-compilation toolchain
image=tttapa/rpi-cross-toolchain:$target
if [ $build_toolchain = true ]; then
pushd docker/merged
echo "Building Docker image $image"
. env/$target.env
build_args=$(./env/env2arg.py env/$target.env)
pushd cross-toolchain
docker build \
--tag $image \
${build_args} \
--build-arg HOST_TRIPLE=$target \
${docker_build_cpuset} .
popd
popd
# Push the Docker image
if [ $push = true ]; then
echo "Pushing Docker image $image"
docker push $image
fi
else
echo "Pulling Docker image $image"
[ ! -z $(docker images -q $image) ] || docker pull $image
fi
: ' # Disabled for now
# Build or pull the Docker image with the cross-native toolchain
image=tttapa/rpi-cross-native-toolchain:$target
if [ $build_toolchain = true ] && [ $dev = dev ]; then
pushd docker/merged
echo "Building Docker image $image"
. env/$target.env
build_args=$(./env/env2arg.py env/$target.env)
pushd cross-native-toolchain
docker build \
--tag $image \
${build_args} \
--build-arg HOST_TRIPLE=$target \
${docker_build_cpuset} .
popd
popd
# Push the Docker image
if [ $push = true ]; then
echo "Pushing Docker image $image"
docker push $image
fi
elif [ $dev = dev ]; then
echo "Pulling Docker image $image"
[ ! -z $(docker images -q $image) ] || docker pull $image
fi
'
# Build or pull the Docker image with cross-compiled libraries
image=tttapa/rpi-cross:$tag
if [ $build = true ]; then
pushd docker/merged
echo "Building Docker image $image"
. env/$target.env
build_args=$(./env/env2arg.py env/$target.env)
pushd cross-build
docker build \
--tag $image \
${build_args} \
--target $docker_target \
${docker_build_cpuset} .
popd
popd
# Push the Docker image
if [ $push = true ]; then
echo "Pushing Docker image $image"
docker push $image
fi
elif [ $export_toolchain = false ]; then
echo "Pulling Docker image $image"
[ ! -z $(docker images -q $image) ] || docker pull $image
fi
# Export the toolchain etc. from the Docker image to the computer
image=tttapa/rpi-cross:$tag
if [ $export = true ]; then
. ./scripts/export.sh
export_all $image $target $target
fi
image=tttapa/rpi-cross-toolchain:$target
if [ $export_toolchain = true ]; then
. ./scripts/export-toolchain.sh
export_toolchain $image $target $target
fi
|
<gh_stars>0
/*
* Copyright (c) 2010, <NAME>
* All rights reserved.
*
* Made available under the BSD license - see the LICENSE file
*/
package sim.stats;
public class Average {
int m_numSamples;
double m_x;
double m_sum;
double m_min;
double m_max;
double m_error;
public Average() {
clear();
}
public Average(double x) {
clear();
m_x = x;
}
/*
* Copy constructor
*
public Average(Average a) {
m_numSamples = a.numSamples();
m_x = a.x();
m_sum = a.sum();
m_min = a.min();
m_max = a.max();
}*/
/*
* Create an average of a set of averages
*
public Average(Average[] a) {
clear();
for(int i=0; i<a.length; i++) {
m_sum += a[i].average();
m_numSamples++;
if(a[i].max() > m_max)
m_max = a[i].max();
if(a[i].min() < m_min)
m_min = a[i].min();
}
}*/
public void addSample(double value) {
m_numSamples++;
m_sum += value;
if(value > m_max)
m_max = value;
if(value < m_min)
m_min = value;
}
public void clear() {
m_numSamples = 0;
m_sum = 0.0;
m_min = Double.MIN_VALUE;
m_max = Double.MAX_VALUE;
m_error = 0;
}
public String toString() {
String s = "";
return s;
}
public int numSamples() { return m_numSamples; }
public void setX(double x) { m_x = x; }
public double x() { return m_x; }
public double average() { return m_numSamples==0.0 ? 0.0 : m_sum / (double) m_numSamples; }
public double sum() { return m_sum; }
public double min() { return m_min; }
public double max() { return m_max; }
public void setError(double e) { m_error = e; }
public double error() { return m_error; }
}
|
<gh_stars>0
import { AnyExtension, EnableRules } from '../types'
export default function isExtensionRulesEnabled(extension: AnyExtension, enabled: EnableRules): boolean {
if (Array.isArray(enabled)) {
return enabled.some(enabledExtension => {
const name = typeof enabledExtension === 'string'
? enabledExtension
: enabledExtension.name
return name === extension.name
})
}
return enabled
}
|
#!/usr/bin/env bash
app_install_node()
{
parse_node_args "$@"
if [[ "$SKIP_DEPS" != "Y" ]]; then
heading "Checking Dependencies..."
check_program_dependencies "${DEPENDENCIES_PROGRAMS[@]}"
check_nodejs_dependencies "${DEPENDENCIES_NODEJS[@]}"
fi
app_uninstall_node "$@"
heading "Installing Node to $BRIDGECHAIN_PATH..."
PREFIX=$(sh -c "jq '.$PREFIX' $__dir/prefixes.json")
if [[ -z "$PREFIX" ]]; then
PREFIX=$(sh -c "jq '.M' $__dir/prefixes.json")
fi
DB=$(sudo -u postgres psql -t -c "\l $DATABASE_NAME" | awk '{$1=$1};1' | awk '{print $1}')
if [[ "$DB" == "$DATABASE_NAME" ]]; then
RECREATE_DATABASE="Y"
if [[ "$INTERACTIVE" == "Y" ]]; then
read -p "Database $DATABASE_NAME already exists. Recreate? [y/N]: " RECREATE_DATABASE
fi
if [[ "$RECREATE_DATABASE" =~ ^(yes|y) ]]; then
dropdb "$DATABASE_NAME"
else
abort 1 "Database $DATABASE_NAME already exists."
fi
fi
PQ_USER=$(sudo -u postgres psql -t -c "SELECT usename FROM pg_catalog.pg_user WHERE usename = '$USER'" | awk '{$1=$1};1')
if [[ "$PQ_USER" == "$USER" ]]; then
RECREATE_USER="N"
if [[ "$INTERACTIVE" == "Y" ]]; then
read -p "User $USER already exists. Recreate? [y/N]: " RECREATE_USER
fi
if [[ "$RECREATE_USER" =~ ^(yes|y) ]]; then
sudo -u postgres psql -c "DROP USER $USER"
sudo -u postgres psql -c "CREATE USER $USER WITH PASSWORD 'password' CREATEDB;"
else
echo "Skipping User Creation for $USER"
fi
else
sudo -u postgres psql -c "CREATE USER $USER WITH PASSWORD 'password' CREATEDB;"
fi
createdb "$DATABASE_NAME"
rm -rf "$BRIDGECHAIN_PATH"
git clone https://github.com/OckhamConsulting/ock-node.git -b explorer "$BRIDGECHAIN_PATH"
cd "$BRIDGECHAIN_PATH"
npm install libpq
npm install secp256k1
npm install bindings
npm install
local YEAR=$(date +"%-Y")
local MONTH=$(expr $(date +"%-m") - 1)
local DAY=$(date +"%-d")
local HOUR=$(date +"%-H")
local MINUTE=$(date +"%-M")
local SECOND=$(date +"%-S")
local FORGERS_OFFSET=$(expr $FORGERS + 1)
mv "$BRIDGECHAIN_PATH/networks.json" "$BRIDGECHAIN_PATH/networks.json.orig"
jq ".$CHAIN_NAME = {\"messagePrefix\": \"$CHAIN_NAME message:\\n\", \"bip32\": {\"public\": 70617039, \"private\": 70615956}, \"pubKeyHash\": $PREFIX, \"wif\": 187, \"client\": {\"token\": \"$TOKEN\", \"symbol\": \"$SYMBOL\", \"explorer\": \"http://$EXPLORER_IP:$EXPLORER_PORT\"}}" "$BRIDGECHAIN_PATH/networks.json.orig" > "$BRIDGECHAIN_PATH/networks.json"
cd "$BRIDGECHAIN_PATH/tasks"
rm -rf demo
mkdir demo
sed -i -e "s/bitcoin/$CHAIN_NAME/g" createGenesisBlock.js
sed -i -e "s/var db_name = \"ock_\" + network_name;/var db_name = \"$DATABASE_NAME\";/g" createGenesisBlock.js
sed -i -e "s/for(var i=1; i<52; i++){/for(var i=1; i<$FORGERS_OFFSET; i++){/g" createGenesisBlock.js
sed -i -e "s/for(var i=0;i<51;i++){/for(var i=0;i<$FORGERS;i++){/g" createGenesisBlock.js
sed -i -e "s/var totalpremine = 2100000000000000;/var totalpremine = $TOTAL_PREMINE;/g" createGenesisBlock.js
sed -i -e "s/4100/$NODE_PORT/g" createGenesisBlock.js
sed -i -e "s/send: 10000000/send: $FEE_SEND/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/vote: 100000000/vote: $FEE_VOTE/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/secondsignature: 500000000/secondsignature: $FEE_SECOND_PASSPHRASE/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/delegate: 2500000000/delegate: $FEE_DELEGATE/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/multisignature: 500000000/multisignature: $FEE_MULTISIG/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/activeDelegates: 51/activeDelegates: $FORGERS/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/maximumVotes: 1/maximumVotes: $MAX_VOTES/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/blocktime: 8/blocktime: $BLOCK_TIME/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/maxTxsPerBlock: 50/maxTxsPerBlock: $TXS_PER_BLOCK/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/offset: 75600/offset: $REWARD_HEIGHT_START/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/200000000, \/\//$REWARD_PER_BLOCK, \/\//g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/200000000 \/\//$REWARD_PER_BLOCK \/\//g" "$BRIDGECHAIN_PATH/helpers/constants.js"
sed -i -e "s/totalAmount: 12500000000000000,/totalAmount: $MAX_TOKENS_PER_ACCOUNT,/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
if [[ "$UPDATE_EPOCH" == "Y" ]]; then
sed -i -e "s/epochTime: new Date(Date.UTC(2017, 2, 21, 13, 0, 0, 0))/epochTime: new Date(Date.UTC($YEAR, $MONTH, $DAY, $HOUR, $MINUTE, $SECOND, 0))/g" "$BRIDGECHAIN_PATH/helpers/constants.js"
fi
node createGenesisBlock.js
jq ".peers.list = [{\"ip\":\"$NODE_IP\", \"port\":$NODE_PORT}]" "$BRIDGECHAIN_PATH/tasks/demo/config.$CHAIN_NAME.json" > "$BRIDGECHAIN_PATH/config.$CHAIN_NAME.json"
cp "$BRIDGECHAIN_PATH/tasks/demo/config.$CHAIN_NAME.autoforging.json" "$BRIDGECHAIN_PATH"
cp "$BRIDGECHAIN_PATH/tasks/demo/genesisBlock.$CHAIN_NAME.json" "$BRIDGECHAIN_PATH"
local PASSPHRASE=$(sh -c "jq '.passphrase' $BRIDGECHAIN_PATH/tasks/demo/genesisPassphrase.$CHAIN_NAME.json")
local ADDRESS=$(sh -c "jq '.address' $BRIDGECHAIN_PATH/tasks/demo/genesisPassphrase.$CHAIN_NAME.json")
echo "Your Genesis Details are:"
echo " Passphrase: $PASSPHRASE"
echo " Address: $ADDRESS"
success "Bridgechain Installed!"
}
app_uninstall_node()
{
process_node_stop "$@"
heading "Uninstalling..."
DB=$(sudo -u postgres psql -t -c "\l $DATABASE_NAME" | awk '{$1=$1};1' | awk '{print $1}')
if [[ "$DB" == "$DATABASE_NAME" ]]; then
dropdb "$DATABASE_NAME"
fi
rm -rf "$BRIDGECHAIN_PATH"
success "Uninstall OK!"
}
|
<reponame>joef551/camel-cql
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.metis.cassandra;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.camel.Exchange;
import org.apache.camel.Predicate;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Test;
import org.junit.FixMethodOrder;
import org.junit.runners.MethodSorters;
import com.datastax.driver.core.utils.UUIDs;
import static org.metis.utils.Constants.*;
/**
* This test will select users whose name is "tcodd". Cassandra should return 2
* entries from the videodb that match the query.
*
* @author jfernandez
*
*/
// Test methods will be executed in ascending order by name
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class InsertVideoTest extends BaseTest {
private static String videoid = UUIDs.random().toString();
private static String event_timestamp = UUIDs.timeBased().toString();
// insert a record into the video event table
@Test
public void testA() throws Exception {
// the route will place the request method of "insert" into the Exchange
Map<String, String> map = new HashMap<String, String>();
map.put("username", "jfernandez");
map.put("videoid", videoid);
map.put("event_timestamp", event_timestamp);
map.put("event", "start");
map.put("video_timestamp", "500000");
template.requestBodyAndHeader("direct:start", map, CASSANDRA_METHOD,
"insert");
}
/**
* Ensure user jfernandez was inserted
*
* @throws Exception
*/
@Test
public void testB() throws Exception {
String JSON = "{\"username\":\"jfernandez\",\"videoid\":\"" + videoid
+ "\"}";
getMockEndpoint("mock:result")
.expectedMessagesMatches(new TestResult());
template.requestBodyAndHeader("direct:start", JSON, CASSANDRA_METHOD,
"select");
assertMockEndpointsSatisfied();
}
/**
* Delete user jfernandez
*
* @throws Exception
*/
@Test
public void testC() throws Exception {
// this is what we send to the CqlEndpoint
String JSON = "{\"username\":\"jfernandez\",\"videoid\":\"" + videoid
+ "\"}";
template.requestBodyAndHeader("direct:start", JSON, CASSANDRA_METHOD,
"delete");
}
/**
* Ensure user jfernandez was deleted
*
* @throws Exception
*/
@Test
public void testD() throws Exception {
// this is what we send to the CqlEndpoint
String JSON = "{\"username\":\"jfernandez\",\"videoid\":\"" + videoid
+ "\"}";
getMockEndpoint("mock:result").expectedMessagesMatches(
new TestResult2());
// feed the route, which starts the test
template.requestBodyAndHeader("direct:start", JSON, CASSANDRA_METHOD,
"select");
// ask the mock endpoint if it received the expected body and
// value.
assertMockEndpointsSatisfied();
}
@Override
// this is the route used by this test case.
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// the message is read in from the direct:start endpoint,
// sent to Cassandra component, then the reply is sent
// on to the mock endpoint. The mock endpoint will then validate
// it via the TestResult predicate.
from("direct:start").to("cql:video").to("mock:result");
}
};
}
/**
* This predicate ensures that the payload returned for TestB is as
* expected.
*/
private class TestResult implements Predicate {
public boolean matches(Exchange exchange) {
Object payLoad = exchange.getIn().getBody();
if (payLoad == null || !(payLoad instanceof List)) {
return false;
}
List<Object> list = (List) payLoad;
if (list.size() != 1) {
return false;
}
payLoad = list.get(0);
if (!(payLoad instanceof Map)) {
return false;
}
Map map = (Map) payLoad;
if (map.size() != 5) {
return false;
}
Object value = map.get("username");
if (!(value instanceof String)) {
return false;
}
if (!"jfernandez".equals(value)) {
return false;
}
return true;
}
}
/**
* This predicate ensures that the payload returned for TestD is as
* expected.
*/
private class TestResult2 implements Predicate {
public boolean matches(Exchange exchange) {
Object payLoad = exchange.getIn().getBody();
if (payLoad != null) {
if (payLoad instanceof List) {
if (((List) payLoad).isEmpty()) {
return true;
}
}
return false;
}
return true;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.