text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Colin Davenport, August 2018
# Keep all lines Where col3 is greater than X (currently 200). Good for idxstats files eg metagenomics
# sorted descending in col3 for within experiment clarity
for i in `find . -name "*.bam.txt"`
do
awk -F "\t" '$3>=20' $i | sort -t$'\t' -k3 -nr > $i.filt.sort.csv
done
# unsorted to maintain row comparability over experiments
#for i in `find . -name "*.bam.txt"`
# do
# awk -F "\t" '$3>=20' $i > $i.filt.unsort.csv
#done
|
const zeros = require('zeros')
/**
* Calculate local mean
*/
function localMean (pixels, size) {
// pixels is expected to be grayscaled
let [width, height] = pixels.shape
// I like Shakutori method!
let rowSumsCol = new Array(height)
for (let y = 0; y < height; y++) {
let rowSums = new Array(width).fill(0)
for (let x = 0; x < size; x++) {
rowSums[0] += pixels.get(x, y)
}
for (let xEnd = size; xEnd < width; xEnd++) {
let xStart = xEnd - size + 1
rowSums[xStart] = rowSums[xStart - 1] + pixels.get(xEnd, y) - pixels.get(xStart - 1, y)
}
rowSumsCol[y] = rowSums
}
let mWidth = width - size + 1
let mHeight = height - size + 1
let mean = zeros([mWidth, mHeight])
for (let x = 0; x < mWidth; x++) {
// Set x, 0
for (let y = 0; y < size; y++) {
let prev = mean.get(x, 0)
mean.set(x, 0, prev + rowSumsCol[y][x])
}
}
for (let x = 0; x < mWidth; x++) {
for (let y = 1; y < mHeight; y++) {
mean.set(x, y, mean.get(x, y - 1) - rowSumsCol[y - 1][x] + rowSumsCol[y + size - 1][x])
}
}
// Devide
for (let x = 0; x < mWidth; x++) {
for (let y = 0; y < mHeight; y++) {
mean.set(x, y, mean.get(x, y) / (size * size))
}
}
return mean
}
module.exports = localMean
|
<filename>app/displayobjects/Sparkle/Sparkle.js
/**
* Test particle
*
* @exports PIXI.Sparkle
* @extends Particle
*/
import {Texture, BLEND_MODES} from 'pixi.js';
import Particle from '../Particle/Particle.js';
import WHITE from './sparkle01.png';
export default class Sparkle extends Particle {
constructor(texture) {
const def_texture = Texture.fromImage(WHITE);
super(texture || def_texture);
this.scale.x = .2;
this.scale.y = .2;
this.alpha = .8;
this.rotation = Math.random() * Math.PI;
this.blendMode = BLEND_MODES.ADD;
}
postUpdate(e) {
this.alpha -= 0.005;
this.rotation += 0.01;
this.scale.x -= 0.001;
this.scale.y -= 0.001;
}
}
|
#!/usr/bin/env python
# rectangle_area.py: Calculate the area of a rectangle
import argparse
def calculate_area(length, width):
return length * width
def main():
parser = argparse.ArgumentParser(description="Calculate the area of a rectangle")
parser.add_argument("length", type=float, help="length of the rectangle")
parser.add_argument("width", type=float, help="width of the rectangle")
args = parser.parse_args()
area = calculate_area(args.length, args.width)
print(f"The area of the rectangle with length {args.length} and width {args.width} is {area}")
if __name__ == "__main__":
main() |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.tmdd.emulation.entitydata;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.fhwa.c2cri.tmdd.emulation.exceptions.InvalidValueException;
/**
*
* @author TransCore ITS, LLC Created: Jan 31, 2016
*/
public class EntityDataValidator
{
// Note we do not test for values of type AnyType (normally used for the mandatory extension data frame).
public static void validateValue(String baseType, String minLength, String maxLength, String minInclusive, String maxInclusive, String enumeration, String elementValue) throws InvalidValueException
{
if (baseType.equalsIgnoreCase("string"))
{
validateString(minLength, maxLength, elementValue);
}
else if (baseType.equalsIgnoreCase("anySimpleType"))
{
validateEnumValue(enumeration, elementValue);
}
else if (baseType.equalsIgnoreCase("dateTime"))
{
validateDateTimeValue(elementValue);
}
else if (baseType.equalsIgnoreCase("decimal"))
{
validateDecimalValue(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("float"))
{
validateFloatValue(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("int"))
{
validateIntValue(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("Int-latitude32"))
{
validateIntLatitude32Value(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("Int-longitude32"))
{
validateIntLongitude32Value(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("short"))
{
validateShortValue(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("unsignedByte"))
{
validateUnsignedByteValue(minInclusive, maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("unsignedInt"))
{
validateUnsignedIntValue(minInclusive,maxInclusive, elementValue);
}
else if (baseType.equalsIgnoreCase("unsignedShort"))
{
validateUnsignedShortValue(minInclusive, maxInclusive, elementValue);
}
}
/**
* Validate that a given value string has a valid number of characters.
*
* @param minLength
* @param maxLength
* @param elementValue
* @throws InvalidValueException
*/
private static void validateString(String minLength, String maxLength, String elementValue) throws InvalidValueException
{
// Throw an exception if the value is null when the length must be greater that 0.
if ((elementValue == null) && (Integer.parseInt(minLength) > 0))
{
throw new InvalidValueException("Element Value was null.");
} else
{
// If minLength and maxLength = -1, then any non-null value is allowed.
if ((Integer.parseInt(minLength)==-1) && (Integer.parseInt(maxLength)==-1)) {
return;
// Throw an exception if the length of the value is outside of the specified range.
} else if ((elementValue.length() < Integer.parseInt(minLength)) || (elementValue.length() > Integer.parseInt(maxLength)))
{
throw new InvalidValueException("The length of " + elementValue + " (" + elementValue.length() + ") is outside of the defined range - min (" + minLength + ") to max (" + maxLength + ").");
}
}
}
/**
* Validate that a given value string is within the specified enumeration.
*
* @param enumeration
* @param elementValue
* @throws InvalidValueException
*/
public static void validateEnumValue(String enumeration, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is element is null.
if ((elementValue == null))
{
throw new InvalidValueException("The Element definition was null.");
}
//Throw an exception if the value of the enumeration is null.
else if((enumeration == null))
{
throw new InvalidValueException("The Enumeration definition was null.");
}
else
{
//The Regex expression used to parse the string.
Pattern regPass = Pattern.compile("\\((.*?)\\)\\s(.*?)[;}]");
//The Matcher class used to match the Regex expression against the input string.
Matcher matcher = regPass.matcher(enumeration);
//A loop control method that searches through the input string.
while(matcher.find())
{
//A condition statement to check if the value found in the input string matches the value from the formal parameter of the method.
if(matcher.group(1).equals(elementValue) || matcher.group(2).equals(elementValue))
{
break;
}
}
//A condition statement that checks if the entire string has been searched and there were not matches.
if(matcher.hitEnd() && !matcher.matches())
{
throw new InvalidValueException("The element value of " + elementValue + " was not defined for the enumeration type.");
}
}
}
/**
* Validate that a given value string is a valid DateTime for the specified DateTime.
*
* @param elementValue
* @throws InvalidValueException
*/
public static void validateDateTimeValue(String elementValue)throws InvalidValueException
{
//Throw an exception if the value is null.
if ((elementValue == null))
{
throw new InvalidValueException("The DateTime value was null.");
}
else
{
//The Regex expression used to parse the string.
Pattern regPass = Pattern.compile("-?([1-9][0-9]{3,}|0[0-9]{3})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])T(([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\\.[0-9]+)?|(24:00:00(\\.0+)?))(Z|(\\+|-)((0[0-9]|1[0-3]):[0-5][0-9]|14:00))?");
//The Matcher class used to match the Regex expression against the input string.
Matcher matcher = regPass.matcher(elementValue);
//A loop control method that searches through the input string.
while(matcher.find())
{
//If a match is found, meaning that the DateTime is correct in the input string, the method returns.
return;
}
throw new InvalidValueException("The DateTime value was not correct for the DateTime type.");
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateDecimalValue(String minInclusive, String maxInclusive, String elementValue)throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
//Throw an exception if the element is outside of the specified range.
if(Double.parseDouble(elementValue) < Double.parseDouble(minInclusive) || Double.parseDouble(elementValue) > Double.parseDouble(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateFloatValue(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
//Throw an exception if the element is outside of the specified range.
if((Float.parseFloat(minInclusive) < -Float.MIN_VALUE) || (Float.parseFloat(maxInclusive) > Float.MAX_VALUE))
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Float.toString(-Float.MIN_VALUE);
maxInclusive = Float.toString(Float.MAX_VALUE);
}
else
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Float.");
}
}
//Throw an exception if the element is outside of the specified range.
if((Float.parseFloat(elementValue) < Float.parseFloat(minInclusive)) || (Float.parseFloat(elementValue) > Float.parseFloat(maxInclusive)))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateIntValue(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Integer.toString(-Integer.MIN_VALUE);
maxInclusive = Integer.toString(Integer.MAX_VALUE);
}
//Throw an exception if the element is outside of the specified range.
else if((Integer.parseInt(minInclusive) < -Integer.MIN_VALUE) || (Integer.parseInt(maxInclusive) > Integer.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Integer.");
}
//Throw an exception if the element is outside of the specified range.
if(Integer.parseInt(elementValue) < Integer.parseInt(minInclusive) || Integer.parseInt(elementValue) > Integer.parseInt(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
//Throw an exception if the NumberFormat has an error.
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateIntLatitude32Value(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Integer.toString(-90000000);
maxInclusive = Integer.toString(90000000);
}
else if((Integer.parseInt(minInclusive) < -Integer.MIN_VALUE) || (Integer.parseInt(maxInclusive) > Integer.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an IntLatitude32.");
}
//Throw an exception if the NumberFormat has an error.
if(Integer.parseInt(elementValue) < Integer.parseInt(minInclusive) || Integer.parseInt(elementValue) > Integer.parseInt(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateIntLongitude32Value(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Integer.toString(-180000000);
maxInclusive = Integer.toString(180000000);
}
//Throw an exception if the NumberFormat has an error.
else if((Integer.parseInt(minInclusive) < -Integer.MIN_VALUE) || (Integer.parseInt(maxInclusive) > Integer.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an IntLongitude32.");
}
//Throw an exception if the NumberFormat has an error.
if(Integer.parseInt(elementValue) < Integer.parseInt(minInclusive) || Integer.parseInt(elementValue) > Integer.parseInt(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateShortValue(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Short.toString(Short.MIN_VALUE);
maxInclusive = Short.toString(Short.MAX_VALUE);
}
//Throw an exception if the NumberFormat has an error.
else if((Short.parseShort(minInclusive) < Short.MIN_VALUE) || (Short.parseShort(maxInclusive) > Short.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Short.");
}
//Throw an exception if the of the value is outside of the specified range.
if(Short.parseShort(elementValue) < Short.parseShort(minInclusive) || Short.parseShort(elementValue) > Short.parseShort(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateUnsignedByteValue(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
//Throw an exception if the of the value is less than the minimum that the datatype will allow.
if(Short.parseShort(elementValue) < 0)
{
throw new InvalidValueException(" The value of " + elementValue + " did not meet the specifications of an Unsigned-Byte.");
}
//Throw an exception if the NumberFormat has an error.
else if((Short.parseShort(minInclusive) < Short.MIN_VALUE) || (Short.parseShort(maxInclusive) > Short.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Unsigned-Byte BaseType UnsignedShort.");
}
//Throw an exception if the of the value is less than the minimum or greater than the maximum for the specified datatype.
else if(Short.parseShort(minInclusive) < 0 || Short.parseShort(maxInclusive) > Short.parseShort("255"))
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Short.toString(Short.parseShort("0"));
maxInclusive = Short.toString(Short.parseShort("255"));
}
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Unsigned-Byte.");
}
//Throw an exception if the of the value is outside of the specified range.
if(Short.parseShort(elementValue) < Short.parseShort(minInclusive) || Short.parseShort(elementValue) > Short.parseShort(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateUnsignedIntValue(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
//Throw an exception if the of the value is less than the minimum that the datatype will allow.
if(Long.parseLong(elementValue) < 0)
{
throw new InvalidValueException(" The value of " + elementValue + " did not meet the specifications of an Unsigned-Int.");
}
//Throw an exception if the NumberFormat has an error.
else if((Long.parseLong(minInclusive) < -Long.MIN_VALUE) || (Long.parseLong(maxInclusive) > Long.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Unsigned-Int BaseType UnsignedLong.");
}
//Throw an exception if the of the value is less than the minimum or greater than the maximum for the specified datatype.
else if(Long.parseLong(minInclusive) < 0 || Long.parseLong(maxInclusive) > Long.parseLong("4294967295"))
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Long.toString(0);
maxInclusive = Long.toString(Long.parseLong("4294967295"));
}
else
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Unsigned-Int.");
}
}
//Throw an exception if the of the value is outside of the specified range.
if(Long.parseLong(elementValue) < Long.parseLong(minInclusive) || Long.parseLong(elementValue) > Long.parseLong(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
}
}
/**
* Validate that a given value string is within the specified range of minInclusive and maxInclusion.
*
* @param minInclusion
* @param maxInclusion
* @param elementValue
* @throws InvalidValueException
*/
public static void validateUnsignedShortValue(String minInclusive, String maxInclusive, String elementValue) throws InvalidValueException
{
//Throw an exception if the value is null.
if(elementValue == null)
{
throw new InvalidValueException("Element Value was null.");
}
else
{
try
{
//Throw an exception if the of the value is less than the minimum that the datatype will allow.
if(Integer.parseInt(elementValue) < 0)
{
throw new InvalidValueException(" The value of " + elementValue + " did not meet the specifications of an Unsigned-Short.");
}
//Throw an exception if the of the value is less than the minimum or greater than the maximum for the specified datatype.
else if(Integer.parseInt(minInclusive) < 0 || Integer.parseInt(maxInclusive) > 65535)
{
/*A condition statment to check if the values of minInclusive and maxInclusive are "-1".
If the values are they are considered default and the datatype min and max are assigned
*/
if(minInclusive.equals("-1") && maxInclusive.equals("-1"))
{
minInclusive = Integer.toString(0);
maxInclusive = Integer.toString(Integer.parseInt("65535"));
}
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Unsigned-Short.");
}
//Throw an exception if the NumberFormat has an error.
if((Integer.parseInt(minInclusive) < -Integer.MIN_VALUE) || (Integer.parseInt(maxInclusive) > Integer.MAX_VALUE))
{
throw new InvalidValueException("The value of minInclusive or maxInclusive did not meet the specifications of an Unsigned-Short BaseType UnsignedInt.");
}
}
catch(NumberFormatException Nex)
{
throw new InvalidValueException("The input string for minInclusive, maxInclusive, or elementValue is too small or too large for the datatype to process. " + Nex.getMessage());
}
//Throw an exception if the of the value is outside of the specified range.
if(Integer.parseInt(elementValue) < Integer.parseInt(minInclusive) || Integer.parseInt(elementValue) > Integer.parseInt(maxInclusive))
{
throw new InvalidValueException("The value of " + elementValue + " is outside of the defined range - min (" + minInclusive + ") to max (" + maxInclusive + ").");
}
}
}
}
|
#!/bin/bash
set -eou pipefail
#
# Script will be run from: /var/lib/cloud/instances/<instance-id>/
# Output for debugging goes to: /var/log/cloud-init-output.log
#
#
# Attach EBS Volume
# See: https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-using-volumes.html
#
mkfs -t xfs /dev/nvme1n1
mkdir /load-test
mount /dev/nvme1n1 /load-test
chmod 777 /load-test
#
# Install Prometheus
#
wget --quiet -O /load-test/prometheus.tar.gz https://github.com/prometheus/prometheus/releases/download/v2.21.0/prometheus-2.21.0.linux-amd64.tar.gz
tar xzf /load-test/prometheus.tar.gz --directory=/load-test
rm /load-test/prometheus.tar.gz
#
# Configure Prometheus
#
cat <<EOF >/load-test/prometheus.yml
global:
scrape_interval: 500ms
scrape_configs:
- job_name: db_subsetter
static_configs:
- targets: ['${pg-target-ip}:9092']
EOF
#
# Start prometheus
# https://prometheus.io/docs/introduction/first_steps/
#
/load-test/prometheus-2.21.0.linux-amd64/prometheus --config.file=/load-test/prometheus.yml &
#
# Install Grafana
#
wget --quiet -O /load-test/grafana.tar.gz https://dl.grafana.com/oss/release/grafana-7.2.1.linux-amd64.tar.gz
tar xzf /load-test/grafana.tar.gz --directory=/load-test
rm /load-test/grafana.tar.gz
wget --quiet -O /load-test/grafana-dashboard.json https://raw.githubusercontent.com/bluerogue251/DBSubsetter/master/grafana-dashboard.json
#
# Configure Grafana
#
cat <<EOF >/load-test/grafana.ini
[auth.anonymous]
enabled = true
org_role = Admin
EOF
#
# Start Grafana
#
/load-test/grafana-7.2.1/bin/grafana-server --homepath /load-test/grafana-7.2.1/ --config=/load-test/grafana.ini &
sleep 5
#
# Connect Grafana to Prometheus
#
curl \
-X POST \
-H 'Content-Type: application/json' \
--data '{"name": "prometheus", "type": "prometheus", "url": "http://localhost:9090", "access": "proxy", "jsonData": { "timeInterval": "500ms" } }' \
admin:admin@localhost:3000/api/datasources
#
# Create Grafana Dashboard
#
curl \
-X POST \
-H 'Content-Type: application/json' \
--data @/load-test/grafana-dashboard.json \
admin:admin@localhost:3000/api/dashboards/db
#
# Set Dashboard as Grafana Homepage
#
curl \
-X PUT \
-H 'Content-Type: application/json' \
--data '{ "homeDashboardId": 1 }' \
admin:admin@localhost:3000/api/org/preferences
|
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
pub type Py_uhash_t = ::libc::size_t;
fn custom_hash(py_object: &PyObject) -> Py_uhash_t {
let mut hasher = DefaultHasher::new();
py_object.hash(&mut hasher);
hasher.finish() as Py_uhash_t
} |
<filename>src/main/java/org/sayantan/assesment/DiceRoll/SampleRestApplication.java
/**
* Copyright 2005-2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.sayantan.assesment.DiceRoll;
import java.util.Arrays;
import org.apache.cxf.Bus;
import org.apache.cxf.endpoint.Server;
import org.apache.cxf.jaxrs.JAXRSServerFactoryBean;
import org.apache.cxf.jaxrs.swagger.Swagger2Feature;
import org.apache.cxf.jaxrs.validation.JAXRSBeanValidationInInterceptor;
import org.apache.cxf.jaxrs.validation.JAXRSBeanValidationOutInterceptor;
import org.apache.cxf.jaxrs.validation.ValidationExceptionMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.orm.jpa.vendor.HibernateJpaSessionFactoryBean;
import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
@SpringBootApplication
public class SampleRestApplication {
@Autowired
private Bus bus;
public static void main(String[] args) {
SpringApplication.run(SampleRestApplication.class, args);
}
// @Bean
// public Server rsServer() {
// // setup CXF-RS
// JAXRSServerFactoryBean endpoint = new JAXRSServerFactoryBean();
// endpoint.setBus(bus);
// endpoint.setServiceBeans(Arrays.<Object>asList(new DiceServiceImpl()));
// endpoint.setAddress("/diceservice");
// endpoint.setFeatures(Arrays.asList(new Swagger2Feature()));
// return endpoint.create();
// }
@Bean
public DiceServiceImpl diceServiceImpl() {
return new DiceServiceImpl();
}
@Bean
public JacksonJsonProvider jacksonJsonProvider() {
return new JacksonJsonProvider();
}
@Bean
public HibernateJpaSessionFactoryBean sessionFactory() {
return new HibernateJpaSessionFactoryBean();
}
@Bean
public DiceRollExceptionMapper diceRollExceptionMapper() {
return new DiceRollExceptionMapper();
}
@Bean
public ValidationExceptionMapper validationExceptionMapper() {
ValidationExceptionMapper exceptionMapper = new ValidationExceptionMapper();
exceptionMapper.setAddMessageToResponse(true);
return exceptionMapper;
}
@Bean
public JAXRSBeanValidationInInterceptor jaxRSBeanValidationInInterceptor() {
JAXRSBeanValidationInInterceptor inInterceptor=new JAXRSBeanValidationInInterceptor();
return inInterceptor;
}
@Bean
public JAXRSBeanValidationOutInterceptor jaxRSBeanValidationOutInterceptor() {
return new JAXRSBeanValidationOutInterceptor ();
}
}
|
package com.limpoxe.fairy.core.android;
import android.content.Context;
import android.content.res.Resources;
import android.os.Build;
import com.limpoxe.fairy.util.RefInvoker;
/**
* Created by cailiming on 16/10/30.
*/
public class HackContextThemeWrapper extends HackContextWrapper {
private static final String ClassName = "android.view.ContextThemeWrapper";
private static final String Field_mResources = "mResources";
private static final String Field_mTheme = "mTheme";
private static final String Method_attachBaseContext = "attachBaseContext";
public HackContextThemeWrapper(Object instance) {
super(instance);
}
public final void attachBaseContext(Object paramValues) {
RefInvoker.invokeMethod(instance, ClassName, Method_attachBaseContext, new Class[]{Context.class}, new Object[]{paramValues});
}
public final void setResources(Resources resources) {
if (Build.VERSION.SDK_INT > 16) {
RefInvoker.setField(instance, ClassName, Field_mResources, resources);
}
}
public final void setTheme(Resources.Theme theme) {
RefInvoker.setField(instance, ClassName, Field_mTheme, theme);
}
}
|
package nuwaplt
var Module2Host = map[string]string{}
var Host2Module = map[string]string{}
var ModuleNames = make([]Name, 0)
var ModuleNamesUniq = map[string]int{}
type Name struct {
Name string `json:"name"`
}
func Reload() {
//// 依据平台注册信息更新默认值
Update()
//for _, module := range GetModules() {
// Module2Host[module.Name] = module.KVs[KContext]
//}
//
//for m, h := range Module2Host {
// Host2Module[h] = m
//}
}
// ResetModuleInfo 清空原有数据,否则模块信息变更需要重启sharingan,影响用户体验
func ResetModuleInfo() {
ModuleNames = make([]Name, 0)
ModuleNamesUniq = map[string]int{}
}
|
package lifya.generic.rule;
/**
* A parsing rule for a rule enclosed in quotes
*/
public class RuleInQuotes extends JoinRule{
/**
* <p>Creates a syntactic rule for parsing a rule enclosed in quotes. Consider a typical tuple rule:</p>
* <p> <TUPLE> :- (<ARGS>) </p>
* <p>Can be defined using a constructor call like this:</p>
* <p><i>new ListRule("TUPLE", "ARGS", Symbol.TAG, "(", ")")</i></p>
* @param type Type of the rule
* @param quoted_rule Quoted rule
* @param quotes_type Type of the quotes
* @param left Left quotation
* @param right Right quotation
*/
public RuleInQuotes(String type, String quoted_rule, String quotes_type, String left, String right) {
super(type, new String[] {quotes_type, quoted_rule, quotes_type}, new String[] {left, null, right});
}
}
|
<filename>client/javascripts/components/Alert.tsx
import * as React from 'react'
import { Alert } from 'react-bootstrap'
export default (props: {
alertType: 'success' | 'danger'
message: string
onDismiss: () => void
}) => (
<Alert bsStyle={props.alertType} onDismiss={props.onDismiss}>
<strong>{props.alertType === 'success' ? 'Success: ' : 'Error: '}</strong>
{props.message}
</Alert>
)
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.radioUnchecked = void 0;
var radioUnchecked = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M8 0c-4.418 0-8 3.582-8 8s3.582 8 8 8 8-3.582 8-8-3.582-8-8-8zM8 14c-3.314 0-6-2.686-6-6s2.686-6 6-6c3.314 0 6 2.686 6 6s-2.686 6-6 6z"
}
}]
};
exports.radioUnchecked = radioUnchecked; |
<reponame>werliefertwas/dashing_resque<filename>spec/lib/dashing_resque/version_spec.rb
require 'spec_helper'
describe DashingResque do
it 'has a version number' do
expect(described_class::VERSION).not_to be_nil
end
end
|
package lit.litfx.controls.output;
import javafx.animation.Animation;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import javafx.util.Duration;
/**
*
* @author phillsm1
*/
public class AnimatedText extends Text {
public static String DEFAULT_FONT = "Consolas Bold";
public static double DEFAULT_FONT_SIZE = 16.0;
public static Color DEFAULT_COLOR = Color.GREEN;
public static enum ANIMATION_STYLE {TYPED} //exploding, particle, fadein
private ANIMATION_STYLE animationStyle;
private String textString;
private double animationTimeMS = 30;
public AnimatedText() {
this("", new Font(DEFAULT_FONT, DEFAULT_FONT_SIZE), DEFAULT_COLOR, ANIMATION_STYLE.TYPED);
}
public AnimatedText(String textString, Font font, Color color, ANIMATION_STYLE animationStyle) {
super(textString);
this.textString = textString;
this.animationStyle = animationStyle;
getStyleClass().add("litlog-text");
setFont(font);
setFill(color);
}
public AnimatedText(String textString ) {
super(textString);
this.textString = textString;
this.animationStyle = ANIMATION_STYLE.TYPED;
getStyleClass().add("litlog-text");
}
public void animate(String newText) {
animateTyped(newText);
}
public void animate() {
switch(getAnimationStyle()) {
case TYPED: animateTyped(getText());
}
}
private void animateTyped(String str) {
final IntegerProperty i = new SimpleIntegerProperty(0);
Timeline timeline = new Timeline();
setText("");
String animatedString = str;
KeyFrame keyFrame1 = new KeyFrame( Duration.millis(getAnimationTimeMS()), event -> {
if (i.get() > animatedString.length()) {
timeline.stop();
} else {
setText(animatedString.substring(0, i.get()));
i.set(i.get() + 1);
}
});
timeline.getKeyFrames().addAll(keyFrame1);
timeline.setCycleCount(Animation.INDEFINITE);
timeline.play();
}
/**
* @return the animationStyle
*/
public ANIMATION_STYLE getAnimationStyle() {
return animationStyle;
}
/**
* @param animationStyle the style to set
*/
public void setStyle(ANIMATION_STYLE animationStyle) {
this.animationStyle = animationStyle;
}
/**
* @return the textString
*/
public String getTextString() {
return textString;
}
/**
* @param textString the textString to set
*/
public void setTextString(String textString) {
this.textString = textString;
}
/**
* @return the animationTimeMS
*/
public double getAnimationTimeMS() {
return animationTimeMS;
}
/**
* @param animationTimeMS the animationTimeMS to set
*/
public void setAnimationTimeMS(double animationTimeMS) {
this.animationTimeMS = animationTimeMS;
}
}
|
describe("Zest.Telephony.Views.AgentsView", function() {
describe("#initialize", function() {
var view;
beforeEach(function() {
jasmine.Ajax.useMock();
setFixtures("<div class='wrapper'> </div>");
});
describe("when agents are not provided", function() {
it("request a list of transferable agents for an agent", function() {
view = new Zest.Telephony.Views.AgentsView({
el: $('.wrapper'),
currentAgentId: 123
});
var request = mostRecentAjaxRequest();
expect(request.url).toBe('/zestphone/agents?csr_id=123');
});
});
});
describe("filter", function() {
var agents;
var view;
beforeEach(function() {
setFixtures("<div class='wrapper'> </div>");
agents = new Zest.Telephony.Collections.Agents([
{
id: 123,
name: 'abc',
csr_type: 'A'
},
{
id: 456,
name: 'xyz',
csr_type: 'B'
}
]);
view = new Zest.Telephony.Views.AgentsView({
el: $('.wrapper'),
agents: agents
});
});
describe("when the query is empty", function() {
it("shows all agents", function() {
spyOn(view.agents, "reset");
view.filter("");
expect(view.agents.reset).toHaveBeenCalledWith(agents.toJSON());
});
});
describe("when query is provided", function() {
it("shows matched agents only", function() {
spyOn(view.agents, "reset");
view.filter("xyz");
expect(view.agents.reset).toHaveBeenCalledWith([agents.at(1).toJSON()]);
});
});
});
describe("#render", function(){
var agents, view;
beforeEach(function() {
setFixtures("<div class='wrapper'/>");
agents = new Zest.Telephony.Collections.Agents([
{
id: 123,
name: 'abc',
csr_id: 1000,
csr_type: 'A'
},
{
id: 456,
name: 'xyz',
csr_id: 1001,
csr_type: 'B'
}
]);
view = new Zest.Telephony.Views.AgentsView({
el: $('.wrapper'),
currentAgentId: 1000,
agents: agents
});
});
it ("excludes the current agent", function() {
view.render();
expect($('li', view.el).length).toBe(1);
});
});
});
|
def weighted_prob(word, category):
# basic probability of a word - calculated by calc_prob
basic_prob = calc_prob(word, category)
# total_no_of_appearances - in all the categories
if word in feature_set:
tot = sum(feature_set[word].values())
else:
tot = 0
if tot == 0:
return 0 # Return 0 if the word does not appear in any category
# Weighted probability calculation
weighted_probability = (0.5 + (tot * basic_prob)) / (1.0 + tot)
return weighted_probability |
<gh_stars>0
//Initialize the app
(function () {
angular.module('app', ["app.core"]);
})();
|
package org.geektimes.projects.user.web.controller;
import java.util.Set;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import org.apache.commons.lang.StringUtils;
import org.geektimes.projects.user.domain.User;
import org.geektimes.projects.user.service.UserService;
import org.geektimes.web.mvc.controller.PageController;
/**
* @Desc: 用户控制器
* @author: liuawei
* @date: 2021-03-01 16:39
*/
@Path("/register")
public class AuthController implements PageController {
@Resource(name = "bean/UserService")
private UserService userService;
@Resource(name = "bean/Validator")
private Validator validator;
/**
* 请求示例:
*
* 请求处理成功跳转成功页面 127.0.0.1:8080/register?name=evan&password=<PASSWORD>
* 请求处理失败跳转失败页面 127.0.0.1:8080/register?name=evan&password=
*
* @param request
* HTTP 请求
* @param response
* HTTP 相应
* @return
* @throws Throwable
*/
@POST
@Override
public String execute(HttpServletRequest request, HttpServletResponse response) throws Throwable {
String phoneNumber = request.getParameter("phoneNumber");
String password = request.getParameter("password");
if (request.getMethod().equalsIgnoreCase("GET")){
return "login-form.jsp";
}else {
// 参数校验
User user = new User();
// 为了满足作业的ID判断
user.setId(1L);
user.setPhoneNumber(phoneNumber);
user.setPassword(password);
Set<ConstraintViolation<User>> validators = validator.validate(user);
for (ConstraintViolation<User> c : validators) {
if (StringUtils.isNotBlank(c.getMessage())){
request.setAttribute("msg",c.getMessage());
return "registerFailed.jsp";
}
}
if (userService.register(user)) {
request.getServletContext().log("注册成功");
return "registerSuccess.jsp";
}
return "registerFailed.jsp";
}
}
} |
import re
def extract_module_names(import_statements):
module_names = []
for statement in import_statements:
match = re.search(r'(?:import|from)\s+([\w.]+)', statement)
if match:
modules = match.group(1).split(',')
module_names.extend([module.strip() for module in modules])
return list(set(module_names)) |
<reponame>spjuanjoc/solid_principles_cpp
//
// Created by juan.castellanos on 10/01/20.
//
#ifndef ILOGGER_H
#define ILOGGER_H
#include <string>
struct ILogger
{
virtual ~ILogger() = default;
virtual void Log(const std::string& s) = 0;
protected:
ILogger() = default;
ILogger(const ILogger&) = default;
ILogger(ILogger&&) = default;
ILogger& operator=(const ILogger&) = default;
ILogger& operator=(ILogger&&) = default;
};
#endif //ILOGGER_H
|
/// <reference path="../event/EventDispatcher.ts"/>
/// <reference path="Assert.ts"/>
module WOZLLA.utils {
export class StateMachine extends WOZLLA.event.EventDispatcher {
public static INIT:string = 'state.init';
public static CHANGE:string = 'state.change';
_defaultState:string;
_currentState:string;
_currentTransition:ITransition;
_stateConfig:any = {};
defineState(name:string, isDefault:boolean=false) {
Assert.isUndefined(this._stateConfig[name], 'state "' + name + '" has been defined');
this._stateConfig[name] = {
data: {}
};
if(isDefault) {
this._defaultState = name;
}
}
getStateData(state:string, key:string) {
Assert.isNotUndefined(this._stateConfig[state], 'state "' + state + '" not defined');
return this._stateConfig[state].data[key];
}
setStateData(state:string, key:string, data:any) {
Assert.isNotUndefined(this._stateConfig[state], 'state "' + state + '" not defined');
this._stateConfig[state].data[key] = data;
}
defineTransition(fromState:string, toState:string, transition:ITransition) {
Assert.isNotUndefined(this._stateConfig[fromState], 'state "' + fromState + '" not defined');
Assert.isNotUndefined(this._stateConfig[toState], 'state "' + toState + '" not defined');
this._stateConfig[fromState][toState] = transition;
}
init() {
this._currentState = this._defaultState;
this.dispatchEvent(new WOZLLA.event.Event(StateMachine.INIT, false, new StateEventData(this._currentState)));
}
getCurrentState():string {
return this._currentState;
}
changeState(state:string) {
var from, to, transition;
Assert.isNotUndefined(this._stateConfig[state]);
from = this._currentState;
to = state;
transition = this._stateConfig[state][to] || EmptyTransition.getInstance();
if(this._currentTransition) {
this._currentTransition.cancel();
}
transition.reset();
transition.execute(from, to, () => {
this._currentTransition = null;
this._currentState = to;
this.dispatchEvent(new WOZLLA.event.Event(StateMachine.CHANGE, false, new StateEventData(this._currentState)));
});
this._currentTransition = transition;
}
}
export class StateEventData {
public state:string;
constructor(state:string) {
this.state = state;
}
}
export interface ITransition {
reset();
cancel();
execute(fromState:string, toState:string, onComplete:Function);
}
export class EmptyTransition implements ITransition {
private static instance:EmptyTransition;
public static getInstance():EmptyTransition {
if(!EmptyTransition.instance) {
EmptyTransition.instance = new EmptyTransition();
}
return EmptyTransition.instance;
}
_canceled:boolean = false;
reset() {
this._canceled = false;
}
cancel() {
this._canceled = true;
}
execute(fromState:string, toState:string, onComplete:Function) {
if(this._canceled) {
return;
}
onComplete();
}
}
} |
#!/bin/bash
# a stands for overall number of connections
a=$1
# b stands for tags (packet loss rate, etc.)
b=$2
ifconfig ens3f1 192.168.0.4
echo "+++${b}" >>Throughput.txt
for i in $(eval echo {0..${a}})
do
#perf record --call-graph lbr -- taskset 64 ./picoquicdemo -l n -D 192.168.0.1 4433
myval=$(echo "2^${i}"|bc)
taskset $myval ./picoquicdemo -l n -D 192.168.0.1 4433 &
done
sleep 15
echo "---${b}" >>Throughput.txt
|
package info.javaspec.jupiter.syntax.fixture;
import info.javaspec.jupiter.syntax.fixture.Minimax.GameState;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.DynamicNode;
import org.junit.jupiter.api.TestFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import static org.junit.jupiter.api.Assertions.assertEquals;
@DisplayName("Fixture syntax: Minimax")
class MinimaxSpecs {
@TestFactory
DynamicNode specs() {
JavaSpec<Minimax> spec = new JavaSpec<>();
return spec.describe(Minimax.class, () -> {
spec.describe("score", () -> {
String max = "Max";
String min = "Min";
spec.subject(() -> new Minimax(max, min));
spec.context("given a game that is already over", () -> {
spec.it("scores a game ending in a draw as 0", () -> {
GameWithKnownStates game = new GameWithKnownStates(true);
assertEquals(0, spec.subject().score(game, max));
});
spec.it("scores a game won by the maximizer as +1", () -> {
GameWithKnownStates game = new GameWithKnownStates(true, max);
assertEquals(+1, spec.subject().score(game, max));
});
spec.it("scores a game won by the minimizer as -1", () -> {
GameWithKnownStates game = new GameWithKnownStates(true, min);
assertEquals(-1, spec.subject().score(game, max));
});
});
spec.context("given a game with 1 move left", () -> {
spec.it("the maximizer picks the move with the highest score", () -> {
GameWithKnownStates game = new GameWithKnownStates(false);
game.addKnownState("ThenDraw", new GameWithKnownStates(true));
game.addKnownState("ThenMaxWins", new GameWithKnownStates(true, max));
assertEquals(+1, spec.subject().score(game, max));
});
spec.it("the minimizer picks the move with the lowest score", () -> {
GameWithKnownStates game = new GameWithKnownStates(false);
game.addKnownState("ThenDraw", new GameWithKnownStates(true));
game.addKnownState("ThenMaxLoses", new GameWithKnownStates(true, min));
assertEquals(-1, spec.subject().score(game, min));
});
});
spec.context("given a game that has 2 or more moves left", () -> {
AtomicReference<GameWithKnownStates> game = new AtomicReference<>();
spec.beforeEach(() -> {
GameWithKnownStates theGame = new GameWithKnownStates(false);
GameWithKnownStates leftTree = new GameWithKnownStates(false);
theGame.addKnownState("Left", leftTree);
leftTree.addKnownState("ThenDraw", new GameWithKnownStates(true));
leftTree.addKnownState("ThenMaxWins", new GameWithKnownStates(true, max));
GameWithKnownStates rightTree = new GameWithKnownStates(false);
theGame.addKnownState("Right", rightTree);
rightTree.addKnownState("ThenDraw", new GameWithKnownStates(true));
rightTree.addKnownState("ThenMaxLoses", new GameWithKnownStates(true, min));
game.set(theGame);
});
spec.it("the maximizer assumes that the minimizer will pick the lowest score", () -> {
assertEquals(0, spec.subject().score(game.get(), max));
});
spec.it("the minimizer assumes that the maximizer will pick the highest score", () -> {
assertEquals(0, spec.subject().score(game.get(), min));
});
});
});
});
}
private static final class GameWithKnownStates implements GameState {
private final boolean isOver;
private final String winner;
private final Map<String, GameWithKnownStates> nextGames = new LinkedHashMap<>();
public GameWithKnownStates(boolean isOver) {
this.isOver = isOver;
this.winner = null;
}
public GameWithKnownStates(boolean isOver, String winner) {
this.isOver = isOver;
this.winner = winner;
}
public void addKnownState(String nextMove, GameWithKnownStates nextGame) {
this.nextGames.put(nextMove, nextGame);
}
@Override
public Collection<String> availableMoves() {
return new ArrayList<>(this.nextGames.keySet());
}
@Override
public String findWinner() {
return this.winner;
}
@Override
public boolean isOver() {
return this.isOver;
}
@Override
public GameState move(String move) {
return this.nextGames.get(move);
}
}
}
|
<filename>guns-api/src/main/java/com/stylefeng/guns/api/user/UserApi.java
package com.stylefeng.guns.api.user;
import com.stylefeng.guns.api.user.vo.UserInfoModel;
import com.stylefeng.guns.api.user.vo.UserModel;
/**
* Created by xianpeng.xia
* on 2019-09-06 01:14
*/
public interface UserApi {
int login(String username, String password);
boolean register(UserModel userModel);
boolean checkUsername(String username);
UserInfoModel getUserInfo(int uuid);
UserInfoModel updateUserInfoModel(UserInfoModel userInfoModel);
}
|
<reponame>iamareebjamal/roboclub-amu
package amu.roboclub.models;
public class Contribution {
public String contributor, purpose, remark, amount;
public Contribution() {
}
}
|
/**
* PROJECT HEADER
* @file solution.h
* @author: <NAME>
* Contact: <EMAIL>
* @date: 02/05/2021
* Subject: Diseño y Análisis de Algoritmos
* Practice: Numberº8
* Purpose: Maximum Diversity Problem
*/
#ifndef SOLUTION_H
#define SOLUTION_H
#include <iostream>
#include <vector>
#include "./element.h"
#include "./formulas.h"
#pragma once
class Solution {
public:
Solution() {};
Solution(int dimensionK, int numberOfElementsN);
~Solution() {};
std::vector<Element> getSolution() const;
void addElementToSolution(Element newElement);
double calculateObjectiveFunction();
double getZ() const;
int size();
void resize(int size);
std::string printFile();
void print();
bool operator ==(const Solution& solution);
bool operator!=(const Solution& solution);
Element& operator[](int index);
private:
std::vector<Element> solution_;
int dimensionK_;
int numberOfElementsN_;
double z_;
};
#endif // !SOLUTION_H |
#!/bin/sh
HTTPD_PIDFILE=/home/dan/lampstack-7.1.20-0/apache2/logs/httpd.pid
HTTPD="/home/dan/lampstack-7.1.20-0/apache2/bin/httpd -f /home/dan/lampstack-7.1.20-0/apache2/conf/httpd.conf"
STATUSURL="http://localhost/server-status"
HTTPD_STATUS=""
HTTPD_PID=""
ERROR=0
SERVER=both
. /home/dan/lampstack-7.1.20-0/apache2/bin/envvars
get_pid() {
PID=""
PIDFILE=$1
# check for pidfile
if [ -f "$PIDFILE" ] ; then
PID=`cat $PIDFILE`
fi
}
get_apache_pid() {
get_pid $HTTPD_PIDFILE
if [ ! "$PID" ]; then
return
fi
if [ "$PID" -gt 0 ]; then
HTTPD_PID=$PID
fi
}
is_service_running() {
PID=$1
if [ "x$PID" != "x" ] && kill -0 $PID 2>/dev/null ; then
RUNNING=1
else
RUNNING=0
fi
return $RUNNING
}
is_apache_running() {
get_apache_pid
is_service_running $HTTPD_PID
RUNNING=$?
if [ $RUNNING -eq 0 ]; then
HTTPD_STATUS="apache not running"
else
HTTPD_STATUS="apache already running"
fi
return $RUNNING
}
test_apache_config() {
if $HTTPD -t; then
ERROR=0
else
ERROR=8
echo "apache config test fails, aborting"
exit $ERROR
fi
}
start_apache() {
test_apache_config
is_apache_running
RUNNING=$?
if [ $RUNNING -eq 1 ]; then
echo "$0 $ARG: httpd (pid $HTTPD_PID) already running"
else
cleanpid
if $HTTPD ; then
echo "$0 $ARG: httpd started at port 8080"
else
echo "$0 $ARG: httpd could not be started"
ERROR=3
fi
fi
}
stop_apache() {
NO_EXIT_ON_ERROR=$1
test_apache_config
is_apache_running
RUNNING=$?
if [ $RUNNING -eq 0 ]; then
echo "$0 $ARG: $HTTPD_STATUS"
if [ "x$NO_EXIT_ON_ERROR" != "xno_exit" ]; then
exit
else
return
fi
fi
get_apache_pid
kill $HTTPD_PID
COUNTER=40
while [ $RUNNING -eq 1 ] && [ $COUNTER -ne 0 ]; do
COUNTER=`expr $COUNTER - 1`
sleep 2
is_apache_running
RUNNING=$?
done
is_apache_running
RUNNING=$?
if [ $RUNNING -eq 0 ]; then
echo "$0 $ARG: httpd stopped"
else
echo "$0 $ARG: httpd could not be stopped"
ERROR=4
fi
}
cleanpid() {
rm -f $HTTPD_PIDFILE
}
if [ "x$1" = "xstart" ]; then
start_apache
elif [ "x$1" = "xstop" ]; then
stop_apache
elif [ "x$1" = "xstatus" ]; then
is_apache_running
echo "$HTTPD_STATUS"
elif [ "x$1" = "xcleanpid" ]; then
cleanpid
fi
exit $ERROR
|
def print_multiplication_table(rows, columns):
# print column headings
for i in range(1, columns+1):
print(f'\t{i}', end='')
print()
# print rows
for i in range(1, rows+1):
print(f'{i}\t', end='')
for j in range(1, columns+1):
print(f'{j*i}\t', end='')
print() |
input_list = [1,2,3]
num_elements = len(input_list)
def create_symmetric_matrix(input_list):
matrix = [[0 for x in range(num_elements)] for y in range(num_elements)]
for i in range(num_elements):
for j in range(num_elements):
if i == j:
matrix[i][j] = input_list[i]
else:
matrix[i][j] = input_list[j]
return matrix
res = create_symmetric_matrix(input_list)
print(res) |
<gh_stars>0
function rand_code(chars, lon){
code = "";
for (x=0; x < lon; x++)
{
rand = Math.floor(Math.random()*chars.length);
code += chars.substr(rand, 1);
}
return code;
}
caracteres = "0123456789ABCD";
longitud = 4;
//alert(rand_code(caracteres, longitud));
//devuelve una cadena aleatoria de 20 caracteres
function openWin() {
window.open("codigodescuento.html","_blank","toolbar=yes, location=yes, directories=no, status=no, menubar=yes, scrollbars=yes, resizable=no, copyhistory=yes, width=400, height=400");
} |
#!/bin/bash
ZK="zk1:2181,zk2:2181,zk3:2181"
if solr zk ls /solr -z $ZK ; then
echo "/solr root exists in ZooKeeper. Skip creation of /solr root."
else
echo "Create /solr root path in ZooKeeper"
if solr zk mkroot /solr -z $ZK ; then
echo "Upload solr.xml to zookeeper"
solr zk cp file:/opt/solr/server/solr/solr.xml zk:/solr.xml -z $ZK/solr
echo "Clone nexus repo"
git clone https://github.com/dataplumber/nexus.git /opt/solr/nexus
echo "Upload nexustiles config to zookeeper"
solr zk upconfig -n nexustiles -d /opt/solr/nexus/data-access/config/schemas/solr/nexustiles/conf -z $ZK/solr
fi
fi
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.dmr.stream;
import java.io.Flushable;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import org.jboss.dmr.ModelType;
/**
* DMR writer. Instances of this interface are not thread safe.
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @see ModelStreamFactory
*/
public interface ModelWriter extends Flushable, AutoCloseable {
/**
* Writes DMR <code>object start</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeObjectStart() throws IOException, ModelException;
/**
* Writes DMR <code>object end</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeObjectEnd() throws IOException, ModelException;
/**
* Writes DMR <code>property start</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writePropertyStart() throws IOException, ModelException;
/**
* Writes DMR <code>property end</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writePropertyEnd() throws IOException, ModelException;
/**
* Writes DMR <code>list start</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeListStart() throws IOException, ModelException;
/**
* Writes DMR <code>list end</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeListEnd() throws IOException, ModelException;
/**
* Writes DMR <code>undefined</code> token.
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeUndefined() throws IOException, ModelException;
/**
* Writes DMR <code>string</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeString( String data ) throws IOException, ModelException;
/**
* Writes DMR <code>expression</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeExpression( String data ) throws IOException, ModelException;
/**
* Writes DMR <code>bytes</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeBytes( byte[] data ) throws IOException, ModelException;
/**
* Writes DMR <code>true</code> or <code>false</code> token.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeBoolean( boolean data ) throws IOException, ModelException;
/**
* Writes DMR <code>number</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeInt( int data ) throws IOException, ModelException;
/**
* Writes DMR <code>number</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeLong( long data ) throws IOException, ModelException;
/**
* Writes DMR <code>number</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeBigInteger( BigInteger data ) throws IOException, ModelException;
/**
* Writes DMR <code>number</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeBigDecimal( BigDecimal data ) throws IOException, ModelException;
/**
* Writes DMR <code>number</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeDouble( double data ) throws IOException, ModelException;
/**
* Writes DMR <code>type</code>.
* @param data to encode
* @return this writer instance
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
ModelWriter writeType( ModelType data ) throws IOException, ModelException;
/**
* Writes all cached data.
* @throws IOException if some I/O error occurs
*/
@Override
void flush() throws IOException;
/**
* Free resources associated with this writer. Never closes underlying input stream or writer.
* @throws IOException if some I/O error occurs
* @throws ModelException if invalid DMR write attempt is detected
*/
@Override
void close() throws IOException, ModelException;
}
|
package com.github.sriki77.apiproxy.instrument.model;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamAsAttribute;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
import org.w3c.dom.Node;
@XStreamAlias("Flow")
public class Flow implements NodeHolder, LocationProvider {
@XStreamAlias("Request")
protected RequestFlow requestFlow;
@XStreamAlias("Response")
protected ResponseFlow responseFlow;
@XStreamAlias("Condition")
protected String condition;
@XStreamAlias("name")
@XStreamAsAttribute
private String name;
@XStreamOmitField
protected LocationProvider parent;
@Override
public void holdNode(Node node) {
NodeHolder.holdNode(requestFlow, NodeHolder.findMyselfUsingXpath(node, getReqNodeXPath()));
NodeHolder.holdNode(responseFlow, NodeHolder.findMyselfUsingXpath(node, getResNodeXPath()));
}
protected String getReqNodeXPath() {
return String.format("//Flow[@name='%s']/Request", name);
}
protected String getResNodeXPath() {
return String.format("//Flow[@name='%s']/Response", name);
}
public RequestFlow getRequestFlow() {
return requestFlow == null ? new RequestFlow() : requestFlow;
}
public ResponseFlow getResponseFlow() {
return responseFlow == null ? new ResponseFlow() : responseFlow;
}
@Override
public void setParent(LocationProvider parent) {
this.parent = parent;
LocationProvider.setParent(requestFlow, this);
LocationProvider.setParent(responseFlow, this);
}
@Override
public String location() {
return LocationProvider.append(parent, name);
}
public String getName() {
return name;
}
}
|
package com.exception;
import org.junit.Test;
public class ExceptionTest {
@Test(expected = NullPointerException.class)
public void testRuntimeExceptionForThrowException() {
PersonThrowException personThrowException = new PersonThrowException();
personThrowException.throwNullPointException();
}
@Test(expected = NullPointerException.class)
public void testRuntimeExceptionForNoThrowException() {
PersonThrowException personThrowException = new PersonThrowException();
personThrowException.noThrowNullPointException();
}
@Test
public void test2(){
int total = 0;
for(int i=0; i<10;i++){
total += 8.28 * getSubTotal(i+1);
System.out.println(8.28 * getSubTotal(i+1));
}
System.out.println(total);
}
public double getSubTotal(int i){
double subTotal = 1;
for(int j =0; j< i; j++){
subTotal = subTotal * (1.15/1.09);
}
return subTotal;
}
}
|
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
import os.path as osp
import platform
import shutil
import sys
import tempfile
import warnings
from importlib import import_module
from mmcv.utils import Config, ConfigDict
from mmcv.utils.config import BASE_KEY, DEPRECATION_KEY
from mmcv.utils.misc import import_modules_from_strings
from mmcv.utils.path import check_file_exist
from mpa.utils.logger import get_logger
logger = get_logger()
class MPAConfig(Config):
@staticmethod
def _file2dict(filename, use_predefined_variables=True):
filename = osp.abspath(osp.expanduser(filename))
check_file_exist(filename)
fileExtname = osp.splitext(filename)[1]
if fileExtname not in ['.py', '.json', '.yaml', '.yml']:
raise IOError('Only py/yml/yaml/json type are supported now!')
with tempfile.TemporaryDirectory() as temp_config_dir:
temp_config_file = tempfile.NamedTemporaryFile(
dir=temp_config_dir, suffix=fileExtname)
if platform.system() == 'Windows':
temp_config_file.close()
temp_config_name = osp.basename(temp_config_file.name)
# Substitute predefined variables
if use_predefined_variables:
Config._substitute_predefined_vars(filename,
temp_config_file.name)
else:
shutil.copyfile(filename, temp_config_file.name)
# Substitute base variables from placeholders to strings
base_var_dict = Config._pre_substitute_base_vars(
temp_config_file.name, temp_config_file.name)
if filename.endswith('.py'):
temp_module_name = osp.splitext(temp_config_name)[0]
sys.path.insert(0, temp_config_dir)
Config._validate_py_syntax(filename)
mod = import_module(temp_module_name)
sys.path.pop(0)
cfg_dict = {
name: value
for name, value in mod.__dict__.items()
if not name.startswith('__')
}
# delete imported module
del sys.modules[temp_module_name]
elif filename.endswith(('.yml', '.yaml', '.json')):
import mmcv
cfg_dict = mmcv.load(temp_config_file.name)
# close temp file
temp_config_file.close()
# check deprecation information
if DEPRECATION_KEY in cfg_dict:
deprecation_info = cfg_dict.pop(DEPRECATION_KEY)
warning_msg = f'The config file {filename} will be deprecated ' \
'in the future.'
if 'expected' in deprecation_info:
warning_msg += f' Please use {deprecation_info["expected"]} ' \
'instead.'
if 'reference' in deprecation_info:
warning_msg += ' More information can be found at ' \
f'{deprecation_info["reference"]}'
warnings.warn(warning_msg)
cfg_text = filename + '\n'
with open(filename, 'r', encoding='utf-8') as f:
# Setting encoding explicitly to resolve coding issue on windows
cfg_text += f.read()
if BASE_KEY in cfg_dict:
cfg_dir = osp.dirname(filename)
base_filename = cfg_dict.pop(BASE_KEY)
base_filename = base_filename if isinstance(
base_filename, list) else [base_filename]
cfg_dict_list = list()
cfg_text_list = list()
for f in base_filename:
_cfg_dict, _cfg_text = MPAConfig._file2dict(osp.join(cfg_dir, f))
cfg_dict_list.append(_cfg_dict)
cfg_text_list.append(_cfg_text)
base_cfg_dict = dict()
# for c in cfg_dict_list:
# duplicate_keys = base_cfg_dict.keys() & c.keys()
# if len(duplicate_keys) > 0:
# raise KeyError('Duplicate key is not allowed among bases. '
# f'Duplicate keys: {duplicate_keys}')
# base_cfg_dict.update(c)
for c in cfg_dict_list:
if len(base_cfg_dict.keys() & c.keys()) > 0:
# raise KeyError(f'Duplicate key is not allowed among bases [{base_cfg_dict.keys() & c.keys()}]')
logger.warning(f'Duplicate key is detected among bases [{base_cfg_dict.keys() & c.keys()}]')
logger.debug(f'base = {base_cfg_dict}, cfg = {c}')
base_cfg_dict = Config._merge_a_into_b(base_cfg_dict, c)
logger.debug(f'merged dict = {base_cfg_dict}')
else:
base_cfg_dict.update(c)
# Subtitute base variables from strings to their actual values
cfg_dict = Config._substitute_base_vars(cfg_dict, base_var_dict,
base_cfg_dict)
base_cfg_dict = Config._merge_a_into_b(cfg_dict, base_cfg_dict)
cfg_dict = base_cfg_dict
# merge cfg_text
cfg_text_list.append(cfg_text)
cfg_text = '\n'.join(cfg_text_list)
return cfg_dict, cfg_text
@staticmethod
def fromfile(filename,
use_predefined_variables=True,
import_custom_modules=True):
cfg_dict, cfg_text = MPAConfig._file2dict(filename,
use_predefined_variables)
if import_custom_modules and cfg_dict.get('custom_imports', None):
import_modules_from_strings(**cfg_dict['custom_imports'])
return Config(cfg_dict, cfg_text=cfg_text, filename=filename)
def copy_config(cfg):
if not isinstance(cfg, Config):
ValueError(f'cannot copy this instance {type(cfg)}')
# new_cfg = copy.deepcopy(cfg)
# new_cfg._cfg_dict = copy.deepcopy(cfg._cfg_dict)
# new_cfg.filename = cfg.filename
import pickle
data = pickle.dumps(cfg)
return pickle.loads(data)
def update_or_add_custom_hook(cfg: Config, hook_cfg: ConfigDict):
"""Update hook cfg if same type is in custom_hook or append it
"""
custom_hooks = cfg.get('custom_hooks', [])
custom_hooks_updated = False
for custom_hook in custom_hooks:
if custom_hook['type'] == hook_cfg['type']:
custom_hook.update(hook_cfg)
custom_hooks_updated = True
break
if not custom_hooks_updated:
custom_hooks.append(hook_cfg)
cfg['custom_hooks'] = custom_hooks
|
import React from "react"
const defaultState = {
users: []
}
const UsersContext = React.createContext(defaultState)
class UsersProvider extends React.Component {
state = {
users: []
}
setUsers = (newUsers) => {
this.setState({ users: newUsers })
}
render() {
const { children } = this.props
const { users } = this.state
return (
<UsersContext.Provider
value={{
users,
setUsers: this.setUsers
}}
>
{children}
</UsersContext.Provider>
)
}
}
export default UsersContext
export { UsersProvider }
|
import java.util.Random;
public class RandomNumber {
public static int getRand(){
Random rand = new Random();
return rand.nextInt(2);
}
} |
# platform = multi_platform_rhel
DIRS="/bin /usr/bin /usr/local/bin /sbin /usr/sbin /usr/local/sbin /usr/libexec"
for dirPath in $DIRS; do
find "$dirPath" -perm /022 -exec chmod go-w '{}' \;
done
|
#!/bin/bash
#SBATCH --job-name=/data/unibas/boittier/test-neighbours2
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition=short
#SBATCH --output=/data/unibas/boittier/test-neighbours2_%A-%a.out
hostname
# Path to scripts and executables
cubefit=/home/unibas/boittier/fdcm_project/mdcm_bin/cubefit.x
fdcm=/home/unibas/boittier/fdcm_project/fdcm.x
ars=/home/unibas/boittier/fdcm_project/ARS.py
# Variables for the job
n_steps=10
n_charges=24
scan_name=SCAN_amide1.pdb-
suffix=.xyz.chk
cubes_dir=/data/unibas/boittier/fdcm/amide/scan-large
output_dir=/data/unibas/boittier/test-neighbours2
frames=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/frames.txt
initial_fit=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/24_charges_refined.xyz
initial_fit_cube=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/amide1.pdb.chk
prev_frame=69
start_frame=70
next_frame=89
acd=/home/unibas/boittier/fdcm_project/0_fit.xyz.acd
start=$start_frame
next=$next_frame
dir='frame_'$next
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
initial_fit=$output_dir/"frame_"$start/"frame_"$start'-'$prev_frame'-'$start'.xyz'
# Go to the output directory
mkdir -p $output_dir
cd $output_dir
mkdir -p $dir
cd $dir
# Do Initial Fit
# for initial fit
esp1=$cubes_dir/$scan_name$start$suffix'.p.cube'
dens1=$cubes_dir/$scan_name$start$suffix'.d.cube'
esp=$cubes_dir/$scan_name$next$suffix'.p.cube'
dens=$cubes_dir/$scan_name$next$suffix'.d.cube'
# adjust reference frame
python $ars -charges $initial_fit -pcube $dens1 -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS.log
# do gradient descent fit
$fdcm -xyz $output_name.global -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
# adjust reference frame
python $ars -charges $output_name -pcube $esp -pcube2 $esp -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -esp $esp -dens $dens -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
echo $PWD
|
/*
* Copyright The OpenTelemetry Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opentelemetry.instrumentation.auto.redisson;
import static io.opentelemetry.instrumentation.auto.redisson.RedissonClientTracer.TRACER;
import static java.util.Collections.singletonMap;
import static net.bytebuddy.matcher.ElementMatchers.isMethod;
import static net.bytebuddy.matcher.ElementMatchers.named;
import com.google.auto.service.AutoService;
import io.opentelemetry.context.Scope;
import io.opentelemetry.javaagent.tooling.Instrumenter;
import io.opentelemetry.trace.Span;
import java.util.Map;
import net.bytebuddy.asm.Advice;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
import org.redisson.client.RedisConnection;
@AutoService(Instrumenter.class)
public final class RedissonInstrumentation extends Instrumenter.Default {
public RedissonInstrumentation() {
super("redisson", "redis");
}
@Override
public ElementMatcher<TypeDescription> typeMatcher() {
return named("org.redisson.client.RedisConnection");
}
@Override
public String[] helperClassNames() {
return new String[] {packageName + ".RedissonClientTracer"};
}
@Override
public Map<? extends ElementMatcher<? super MethodDescription>, String> transformers() {
return singletonMap(
isMethod().and(named("send")), RedissonInstrumentation.class.getName() + "$RedissonAdvice");
}
public static class RedissonAdvice {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static void onEnter(
@Advice.This RedisConnection connection,
@Advice.Argument(0) Object arg,
@Advice.Local("otelSpan") Span span,
@Advice.Local("otelScope") Scope scope) {
span = TRACER.startSpan(connection, arg);
scope = TRACER.startScope(span);
}
@Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class)
public static void stopSpan(
@Advice.Thrown Throwable throwable,
@Advice.Local("otelSpan") Span span,
@Advice.Local("otelScope") Scope scope) {
scope.close();
if (throwable != null) {
TRACER.endExceptionally(span, throwable);
} else {
TRACER.end(span);
}
}
}
}
|
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button button = findViewById(R.id.button);
button.setOnClickListener(view ->
Toast.makeText(MainActivity.this, getRandomFact(), Toast.LENGTH_SHORT).show());
}
private String getRandomFact() {
// implement logic to get a random fact
}
} |
<gh_stars>0
import { Map2D, Map2DNode } from "./util/map2D";
import { p, readLines } from "./util/util";
const lines = readLines("input/a09.txt");
const map = new Map2D<number>();
for (let y = 0; y < lines.length; y++) {
const line = lines[y];
for (let x = 0; x < line.length; x++) {
map.set(x, y, parseInt(line[x]));
}
}
function getBasinSize(node: Map2DNode<number>) {
const candidates: Map2DNode<number>[] = [node];
const seen: Record<string, boolean> = {};
while (candidates.length > 0) {
const current = candidates.shift()!;
if (!seen[current.getNodeKey()]) {
seen[current.getNodeKey()] = true;
current.get4Neighbors().forEach((neighbor) => {
if (neighbor.value !== undefined && neighbor.value < 9 && neighbor.value >= current.value!) {
candidates.push(neighbor);
}
});
}
}
return Object.getOwnPropertyNames(seen).length;
}
let lowPointSum = 0;
const basinSizes: number[] = [];
map.forEachNode((node) => {
const height = node.value;
if (height !== undefined) {
const lowerNeighbors = node
.get4Neighbors()
.filter((neighborNode) => neighborNode.value !== undefined && neighborNode.value <= height);
if (lowerNeighbors.length === 0) {
lowPointSum += height + 1;
basinSizes.push(getBasinSize(node));
}
}
});
p(lowPointSum);
p(
basinSizes
.sort((a, b) => b - a)
.slice(0, 3)
.reduce((prev, current) => prev * current, 1)
);
|
/*!
* @file
* @brief Implementation of the QuadMat class and the invert function
* @author <NAME>
* <EMAIL>
*
*/
#ifndef _TL_QUADMAT_
#define _TL_QUADMAT_
#include <iostream>
#include "exceptions.h"
namespace spectral{
/*! @brief POD container for quadratic fixed size matrices
*
* @ingroup containers
* The QuadMat stores its values inside the object.
* i.e. a QuadMat<double, 2> stores four double variables continously
* in memory. Therefore it is well suited for the use in the Matrix
* class (because memcpy and memset correctly work on this type)
* \note T and n should be of small size to reduce object size.
* \note QuadMat is an aggregate so you can use initializer lists in c++11..
* @tparam T tested with double and std::complex<double>
* @tparam n size of the Matrix, assumed to be small
*/
template <class T, size_t n>
class QuadMat
{
public:
/*! @brief No values are assigned*/
QuadMat() = default;
/*! @brief Initialize elements to a value
*
* @param value The initial value
*/
QuadMat( const T& value)
{
for( unsigned i=0; i<n*n; i++)
data[i] = value;
}
/*! @brief Use c++0x new feature*/
/*
QuadMat( std::initializer_list<T> l)
{
if( l.size() != n*n)
throw Message( "Initializer list has wrong size", _ping_);
unsigned i=0;
for( auto& s: l)
data[i++] = s;
}
*/
/*! @brief set memory to 0
*/
void zero()
{
for( size_t i = 0; i < n*n; i++)
data[i] = 0;
}
/*! @brief access operator
*
* Performs a range check if TL_DEBUG is defined
* @param i row index
* @param j column index
* @return reference to value at that location
*/
T& operator()(const size_t i, const size_t j){
#ifdef TL_DEBUG
if( i >= n || j >= n)
throw BadIndex( i, n, j, n, _ping_);
#endif
return data[ i*n+j];
}
/*! @brief const access operator
*
* Performs a range check if TL_DEBUG is defined
* @param i row index
* @param j column index
* @return const value at that location
*/
const T& operator()(const size_t i, const size_t j) const {
#ifdef TL_DEBUG
if( i >= n || j >= n)
throw BadIndex( i, n, j, n, _ping_);
#endif
return data[ i*n+j];
}
/*! @brief two Matrices are considered equal if elements are equal
*
* @param rhs Matrix to be compared to this
* @return true if rhs does not equal this
*/
const bool operator!=( const QuadMat& rhs) const{
for( size_t i = 0; i < n*n; i++)
if( data[i] != rhs.data[i])
return true;
return false;
}
/*! @brief two Matrices are considered equal if elements are equal
*
* @param rhs Matrix to be compared to this
* @return true if rhs equals this
*/
const bool operator==( const QuadMat& rhs) const {return !((*this != rhs));}
/*! @brief puts a matrix linewise in output stream
*
* @param os the outstream
* @param mat the matrix to output
* @return the outstream
*/
friend std::ostream& operator<<(std::ostream& os, const QuadMat<T,n>& mat)
{
for( size_t i=0; i < n ; i++)
{
for( size_t j = 0;j < n; j++)
os << mat(i,j) << " ";
os << "\n";
}
return os;
}
/*! @brief Read values into a Matrix from given istream
*
* The values are filled linewise into the matrix. Values are seperated by
* whitespace charakters. (i.e. newline, blank, etc)
* @param is The istream
* @param mat The Matrix into which the values are written
* @return The istream
*/
friend std::istream& operator>> ( std::istream& is, QuadMat<T,n>& mat){
{
for( size_t i=0; i<n; i++)
for( size_t j=0; j<n; j++)
is >> mat(i, j);
return is;
}
}
private:
T data[n*n];
};
/*! @brief Return the One QuadMat
* @return Matrix containing ones on the diagonal and zeroes elsewhere
* @ingroup containers
*/
template< size_t n>
QuadMat<double, n> One()
{
QuadMat<double, n> E(0);
for( unsigned i=0; i<n; i++)
E(i,i) = 1;
return E;
}
/*! @brief Return the Zero QuadMat
* @return QuadMat containing only zeroes
* @ingroup containers
*/
template< size_t n>
QuadMat<double, n> Zero()
{
QuadMat<double, n> E(0);
return E;
}
/*! @brief inverts a 2x2 matrix of given type
*
* \note throws a Message if Determinant is zero.
* @tparam T The type must support basic algorithmic functionality (i.e. +, -, * and /)
* @param in The input matrix
* @param out The output matrix contains the invert of in on output.
* Inversion is inplace if in and out dereference the same object.
*/
template<class T>
void invert(const QuadMat<T, 2>& in, QuadMat< T,2>& out);
template<class T>
void invert(const QuadMat<T, 2>& m, QuadMat<T,2>& m1)
{
T det, temp;
det = m(0,0)*m(1,1) - m(0,1)*m(1,0);
if( det== (T)0) throw Message("Determinant is Zero\n", _ping_);
temp = m(0,0);
m1(0,0) = m(1,1)/det;
m1(0,1) /=-det;
m1(1,0) /=-det;
m1(1,1) = temp/det;
}
/*! @brief inverts a 3x3 matrix of given type
*
* (overloads the 2x2 version)
* \note throws a Message if Determinant is zero.
* @tparam The type must support basic algorithmic functionality (i.e. +, -, * and /)
* @param in The input matrix
* @param out The output matrix contains the invert of in on output.
* Inversion is inplace if in and out dereference the same object.
*/
template< typename T>
void invert( const QuadMat< T, 3>& in, QuadMat<T,3>& out );
template< typename T>
void invert( const QuadMat< T, 3>& m, QuadMat<T,3>& m1 )
{
T det, temp00, temp01, temp02, temp10, temp11, temp20;
det = m(0,0)*(m(1,1)*m(2,2)-m(2,1)*m(1,2))+m(0,1)*(m(1,2)*m(2,0)-m(1,0)*m(2,2))+m(0,2)*(m(1,0)*m(2,1)-m(2,0)*m(1,1));
if( det== (T)0) throw Message("Determinant is Zero\n", _ping_);
temp00 = m(0,0);
temp01 = m(0,1);
temp02 = m(0,2);
m1(0,0) = (m(1,1)*m(2,2) - m(1,2)*m(2,1))/det;
m1(0,1) = (m(0,2)*m(2,1) - m(0,1)*m(2,2))/det;
m1(0,2) = (temp01*m(1,2) - m(0,2)*m(1,1))/det;
temp10 = m(1,0);
temp11 = m(1,1);
m1(1,0) = (m(1,2)*m(2,0) - m(1,0)*m(2,2))/det;
m1(1,1) = (temp00*m(2,2) - temp02*m(2,0))/det;
m1(1,2) = (temp02*temp10 - temp00*m(1,2))/det;
temp20 = m(2,0);
m1(2,0) = (temp10*m(2,1) - temp11*m(2,0))/det;
m1(2,1) = (temp01*temp20 - temp00*m(2,1))/det;
m1(2,2) = (temp00*temp11 - temp10*temp01)/det;
}
} //namespace spectral
#endif //_TL_QUADMAT_
|
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import("funhtml.*");
import("jsutils.cmp");
import("jsutils.eachProperty");
import("exceptionutils");
import("execution");
import("stringutils.trim");
import("etherpad.sessions.getSession");
import("etherpad.utils.*");
function _splitCommand(cmd) {
var parts = [[], []];
var importing = true;
cmd.split("\n").forEach(function(l) {
if ((trim(l).length > 0) &&
(trim(l).indexOf("import") != 0)) {
importing = false;
}
if (importing) {
parts[0].push(l);
} else {
parts[1].push(l);
}
});
parts[0] = parts[0].join("\n");
parts[1] = parts[1].join("\n");
return parts;
}
function getResult(cmd) {
var resultString = (function() {
try {
var parts = _splitCommand(cmd);
result = execution.fancyAssEval(parts[0], parts[1]);
} catch (e) {
// if (e instanceof JavaException) {
// e = new net.appjet.bodylock.JSRuntimeException(e.getMessage(), e.javaException);
// }
if (appjet.config.devMode) {
(e.javaException || e.rhinoException || e).printStackTrace();
}
result = exceptionutils.getStackTracePlain(e);
}
var resultString;
try {
resultString = ((result && result.toString) ? result.toString() : String(result));
} catch (ex) {
resultString = "Error converting result to string: "+ex.toString();
}
return resultString;
})();
return resultString;
}
function _renderCommandShell() {
// run command if necessary
if (request.params.cmd) {
var cmd = request.params.cmd;
var resultString = getResult(cmd);
getSession().shellCommand = cmd;
getSession().shellResult = resultString;
response.redirect(request.path+(request.query?'?'+request.query:''));
}
var div = DIV({style: "padding: 4px; margin: 4px; background: #eee; "
+ "border: 1px solid #338"});
// command div
var oldCmd = getSession().shellCommand || "";
var commandDiv = DIV({style: "width: 100%; margin: 4px 0;"});
commandDiv.push(FORM({style: "width: 100%;",
method: "POST", action: request.path + (request.query?'?'+request.query:'')},
TEXTAREA({name: "cmd",
style: "border: 1px solid #555;"
+ "width: 100%; height: 160px; font-family: monospace;"},
html(oldCmd)),
INPUT({type: "submit"})));
// result div
var resultDiv = DIV({style: ""});
var isResult = getSession().shellResult != null;
if (isResult) {
resultDiv.push(DIV(
PRE({style: 'border: 1px solid #555; font-family: monospace; margin: 4px 0; padding: 4px;'},
getSession().shellResult)));
delete getSession().shellResult;
resultDiv.push(DIV({style: "text-align: right;"},
A({href: qpath({})}, "clear")));
} else {
resultDiv.push(P("result will go here"));
}
var t = TABLE({border: 0, cellspacing: 0, cellpadding: 0, width: "100%",
style: "width: 100%;"});
t.push(TR(TH({width: "49%", align: "left"}, " Command:"),
TH({width: "49%", align: "left"}, " "+(isResult ? "Result:" : ""))),
TR(TD({valign: "top", style: 'padding: 4px;'}, commandDiv),
TD({valign: "top", style: 'padding: 4px;'}, resultDiv)));
div.push(t);
return div;
}
function handleRequest() {
var body = BODY();
body.push(A({href: '/ep/admin/'}, html("« Admin")));
body.push(BR(), BR());
body.push(_renderCommandShell());
response.write(HTML(body));
}
|
import React, { Component } from 'react';
import { BekreftetKorrektInformasjon, scrollTo, SoknadOppsummering, sykepengesoknadstatuser, Utvidbar, VaerKlarOverAt } from '@navikt/digisyfo-npm';
import Soknadstatuspanel from '../../statuspanel/Soknadstatuspanel';
import { sykepengesoknad as sykepengesoknadPt } from '../../../propTypes/index';
import RelaterteSoknaderContainer from '../../relaterte-soknader/RelaterteSoknaderContainer';
import KorrigertAvContainer from './KorrigertAvContainer';
import SykepengesoknadHeader from '../../../components/soknad-felles/SykepengesoknadHeader';
import SykmeldingUtdragContainer from '../../SykmeldingUtdragContainer';
const { KORRIGERT, SENDT, TIL_SENDING } = sykepengesoknadstatuser;
class SoknadSendt extends Component {
scrollTilTopp() {
scrollTo(this.sendtSoknad, 300);
}
render() {
const { sykepengesoknad } = this.props;
const oppsummeringsoknad = sykepengesoknad.oppsummering;
return (<div ref={(c) => {
this.sendtSoknad = c;
}}>
<SykepengesoknadHeader sykepengesoknad={sykepengesoknad} />
{
sykepengesoknad.status === KORRIGERT
&& <KorrigertAvContainer sykepengesoknad={sykepengesoknad} />
}
<Soknadstatuspanel sykepengesoknad={sykepengesoknad} />
<SykmeldingUtdragContainer sykepengesoknad={sykepengesoknad} />
<Utvidbar tittel="Oppsummering" className="blokk">
<div className="blokk--s">
<SoknadOppsummering oppsummeringsoknad={sykepengesoknad.oppsummering} />
</div>
<BekreftetKorrektInformasjon oppsummeringsoknad={sykepengesoknad.oppsummering} />
</Utvidbar>
<div className="redaksjonelt-innhold oppsummering__vaerKlarOverAt panel blokk">
<VaerKlarOverAt oppsummeringsoknad={oppsummeringsoknad} />
</div>
{(sykepengesoknad.status === SENDT || sykepengesoknad.status === TIL_SENDING) &&
<RelaterteSoknaderContainer sykepengesoknadId={sykepengesoknad.id} />}
</div>);
}
}
SoknadSendt.propTypes = {
sykepengesoknad: sykepengesoknadPt,
};
export default SoknadSendt;
|
<filename>src/main/scala/de/htwg/se/durak/model/roundComponent/roundBaseImpl/Round.scala
package de.htwg.se.durak.model.roundComponent.roundBaseImpl
import de.htwg.se.durak.model.gameElementsComponent.{CardDeckInterface, FieldInterface}
import de.htwg.se.durak.model.playerComponent.Player
import de.htwg.se.durak.model.roundComponent.{GameDataInterface, RoundDataInterface, RoundInterface, TurnDataInterface}
/**
* Handler for creating round elements
*/
class Round extends RoundInterface {
/**
* Create RoundData
*
* @param siteID ID of the selected site/game state
* @param validateInputList function which returns a boolean dependent on the parameter
* @param validateInput Possible inputs if [[RoundDataInterface.validateInput]] is None
* @param param Optional list of string parameter
* @return RoundData based on parameter
*/
def createRoundData(siteID: Int,
validateInputList: List[String],
validateInput: Option[String => Boolean],
param: Option[List[String]]): RoundDataInterface = {
RoundData(siteID, validateInputList, validateInput, param)
}
/**
* Create RoundData with minimal parameter
*
* @param siteID ID of the selected site/game state
* @param validateInputList function which returns a boolean dependent on the parameter
* @return RoundData based on parameter
*/
def createRoundData(siteID: Int,
validateInputList: List[String]): RoundDataInterface = {
new RoundData(siteID, validateInputList)
}
/**
* Create TurnData
*
* @param players List of active player
* @param playerDecks List of the player decks
* @param currentPlayer Current player ID
* @param defendPlayer Player that defends
* @param field Game field
* @param mainDeck Main deck with remaining cards
* @param outDeck Deck with cards that are outside
* @param trump current game trump
* @param turnType Current turn type
* @return TurnData based on parameter
*/
//noinspection ScalaStyle
def createTurnData(players: List[Player],
playerDecks: List[CardDeckInterface]
, currentPlayer: Int,
defendPlayer: Int,
field: FieldInterface,
mainDeck: CardDeckInterface,
outDeck: CardDeckInterface,
trump: Int,
turnType: Int): TurnDataInterface = {
TurnData(players, playerDecks,currentPlayer, defendPlayer, field, mainDeck, outDeck, trump, turnType)
}
/**
* Create GameData
*
* @param roundData Data with actual site, possible inputs and custom parameter
* @param turnData Data with all game information
* @return GameData instance based on parameter
*/
def createGameData(roundData: RoundDataInterface, turnData: Option[TurnDataInterface]): GameDataInterface = {
GameData(roundData, turnData)
}
}
|
module Bocu
class JsonParser
DATA_KEYS = %i[channels coubs].freeze
DEFAULT_METADATA = {}.freeze
def initialize(body)
@json = MultiJson.load(body, symbolize_keys: true)
end
def call
{ data: json, metadata: metadata }
end
private
attr_reader :json
def metadata
return DEFAULT_METADATA if no_data_keys?
json.slice!(*DATA_KEYS)
end
def no_data_keys?
(json.keys & DATA_KEYS).empty?
end
end
end
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_charging_station = void 0;
var ic_charging_station = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M14.5,11l-3,6v-4h-2l3-6v4H14.5z M7,1h10c1.1,0,2,0.9,2,2v18c0,1.1-0.9,2-2,2H7c-1.1,0-2-0.9-2-2V3C5,1.9,5.9,1,7,1z M7,6 v12h10V6H7z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M14.5,11l-3,6v-4h-2l3-6v4H14.5z M7,1h10c1.1,0,2,0.9,2,2v18c0,1.1-0.9,2-2,2H7c-1.1,0-2-0.9-2-2V3C5,1.9,5.9,1,7,1z M7,6 v12h10V6H7z"
},
"children": []
}]
}]
}]
};
exports.ic_charging_station = ic_charging_station; |
<reponame>palerdot/BlingFire<filename>blingfireclient.library/inc/FASetImageA.h<gh_stars>1000+
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_SETIMAGEA_H_
#define _FA_SETIMAGEA_H_
#include "FAConfig.h"
namespace BlingFire
{
///
/// Image pointer set up interface for objects represented as memory dumps.
///
class FASetImageA {
public:
virtual void SetImage (const unsigned char * pImage) = 0;
};
}
#endif
|
#!/bin/bash -ev
. ./setenv.sh
sudo cp ./libsodium/libsodium-host/lib/libsodium.so /usr/local/lib
pushd jni
./compile.sh
popd
mvn -q clean install
./singleTest.sh
|
#!/bin/bash
clear
python3 Pixiv.py
clear
|
import { BasicType } from '@/constants';
import { CreateInstance } from '@/typings';
import { merge } from 'lodash';
import { IText } from '.';
export const createInstance: CreateInstance<IText> = (payload) => {
const defaultData: IText = {
type: BasicType.TEXT,
data: {
value: {
content: 'Make it easy for everyone to compose emails!',
},
},
attributes: {
'font-size': '13px',
padding: '10px 25px 10px 25px',
'line-height': 1,
align: 'left',
},
children: [],
};
return merge(defaultData, payload);
};
|
#!/bin/sh
#
# Copyright (C) 2010, 2012-2014 Internet Systems Consortium, Inc. ("ISC")
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id: setup.sh,v 1.3 2010/06/08 23:50:24 tbox Exp $
SYSTEMTESTTOP=..
. $SYSTEMTESTTOP/conf.sh
zone=example
infile=ns1/example.db.in
zonefile=ns1/example.db
$PK11GEN -b 1024 -l robie-zsk1 -i 01
$PK11GEN -b 1024 -l robie-zsk2 -i 02
$PK11GEN -b 2048 -l robie-ksk
zsk1=`$KEYFRLAB -a RSASHA1 -l robie-zsk1 example`
zsk2=`$KEYFRLAB -a RSASHA1 -l robie-zsk2 example`
ksk=`$KEYFRLAB -a RSASHA1 -f ksk -l robie-ksk example`
cat $infile $zsk1.key $ksk.key > $zonefile
$SIGNER -a -P -g -r $RANDFILE -o $zone $zonefile > /dev/null 2> signer.err || cat signer.err
rm -f signer.err
cp $zsk2.key ns1/key
mv Kexample* ns1
|
#!/usr/bin/env bash
export MINIO_ACCESS_KEY=FJDSJ
export MINIO_SECRET_KEY=DSG643HGDS
mkdir -p /tmp/minio
minio server /tmp/minio &>/dev/null &
sleep 5
go test ./... -cover -ginkgo.noisySkippings=false
|
import { BigNumber, BigNumberish, ethers } from "ethers";
import { addresses } from "../constants";
import { abi as ierc20Abi } from "../abi/IERC20.json";
import { abi as sPHMABI } from "../abi/sPHM.json";
import { abi as AuctionAbi } from "../abi/auction.json";
import { bnToNum, setAll } from "../helpers";
import { createAsyncThunk, createSelector, createSlice } from "@reduxjs/toolkit";
import { RootState } from "src/store";
import { IBaseAddressAsyncThunk, ICalcUserBondDetailsAsyncThunk } from "./interfaces";
import { FuseProxy, IERC20, SOhmv2, WsOHM } from "src/typechain";
import { getOrLoadTreasuryAddress } from "./AppSlice";
import { SPHM } from "src/typechain/SPHM";
interface IUserBalances {
balances: {
frax: number;
PHM: string;
sPHM: string;
fPHM: string;
fsphm: string;
gPHM: string;
wsphm: string;
wsphmAsSphm: string;
pool: string;
};
}
export const getBalances = createAsyncThunk(
"account/getBalances",
async ({ address, networkID, provider }: IBaseAddressAsyncThunk) => {
const fraxContract = new ethers.Contract(addresses[networkID].frax as string, ierc20Abi, provider) as IERC20;
const sPHMContract = new ethers.Contract(addresses[networkID].sPHM as string, sPHMABI, provider) as SPHM;
const gPHMContract = new ethers.Contract(addresses[networkID].gPHM as string, ierc20Abi, provider) as IERC20;
const fPHMContract = new ethers.Contract(addresses[networkID].fPHM as string, ierc20Abi, provider) as IERC20;
const PHMContract = new ethers.Contract(addresses[networkID].PHM as string, ierc20Abi, provider) as IERC20;
//TODO: refactor to multicall for less rpc bandwidth consumption
const [fraxBalance, sPHMBalance, fPHMBalance, gPHMBalance, PHMBalance] = await Promise.all([
fraxContract.balanceOf(address),
sPHMContract.balanceOf(address),
fPHMContract.balanceOf(address),
gPHMContract.balanceOf(address),
PHMContract.balanceOf(address),
]);
return {
balances: {
frax: Number(fraxBalance.toString()) / Math.pow(10, 18),
PHM: Number(PHMBalance.toString()) / Math.pow(10, 18),
sPHM: Number(sPHMBalance.toString()) / Math.pow(10, 18),
gPHM: Number(gPHMBalance.toString()) / Math.pow(10, 18),
fPHM: Number(fPHMBalance.toString()) / Math.pow(10, 18),
},
};
},
);
interface IUserAccountDetails {
staking: {
phmStakeAllowance: number;
phmUnstakeAllowance: number;
nextRewardAmount: number;
};
auction: {
fraxAllowance: number;
tokensClaimable: number;
};
wrapping: {
wrapAllowance: number;
unwrapAllowance: number;
};
}
export const loadAccountDetails = createAsyncThunk(
"account/loadAccountDetails",
async ({ networkID, provider, address }: IBaseAddressAsyncThunk, { dispatch, getState }) => {
const fraxContract = new ethers.Contract(addresses[networkID].frax, ierc20Abi, provider);
const phmContract = new ethers.Contract(addresses[networkID].PHM, ierc20Abi, provider);
const sphmContract = new ethers.Contract(addresses[networkID].sPHM, sPHMABI, provider) as SPHM;
const treasuryAddress = await getOrLoadTreasuryAddress({ networkID, provider }, { dispatch, getState });
const phmStakeAllowance = await phmContract.allowance(address, treasuryAddress);
const phmUnstakeAllowance = await sphmContract.allowance(address, treasuryAddress);
await dispatch(getBalances({ address, networkID, provider }));
/*
Next Reward Amount (in accounts)
sPHM.rewardYield() * (
sPHM.balanceOf(user) +
gPHM.balanceOf(user) * sPHM.scalingFactor() +
fPHM.balanceOf(user) * sPHM.scalingFactor()
)
*/
const rewardYield = await sphmContract.rewardYield();
const scalingFactor = await sphmContract.scalingFactor();
const { account }: any = getState();
const { balances } = account;
const sPHMBalance = Number(balances.sPHM.toString());
const gPHMAsSPHM = (Number(balances.gPHM.toString()) / 1e18) * Number(scalingFactor.toString());
const fPHMAsSPHM = (Number(balances.fPHM.toString()) / 1e18) * Number(scalingFactor.toString());
const stakedBalance = sPHMBalance + gPHMAsSPHM + fPHMAsSPHM;
const nextRewardAmount = (rewardYield / 1e18) * stakedBalance;
const auctionContract = new ethers.Contract(addresses[networkID].PhantomAuction as string, AuctionAbi, provider);
const sPHM = new ethers.Contract(addresses[networkID].sPHM as string, ierc20Abi, provider);
const gPHM = new ethers.Contract(addresses[networkID].gPHM as string, ierc20Abi, provider);
const phantomTreasuryAddress = await getOrLoadTreasuryAddress({ networkID, provider }, { dispatch, getState });
const [fraxAllowance, tokensClaimable, wrapAllowance, unwrapAllowance] = await Promise.all([
fraxContract.allowance(address, addresses[networkID].PhantomAuction),
auctionContract.tokensClaimable(address),
sPHM.allowance(address, phantomTreasuryAddress),
gPHM.allowance(address, phantomTreasuryAddress),
]);
await dispatch(getBalances({ address, networkID, provider }));
return {
auction: {
fraxAllowance: bnToNum(fraxAllowance) / Math.pow(10, 18),
tokensClaimable: bnToNum(tokensClaimable) / Math.pow(10, 18),
},
staking: {
phmStakeAllowance: bnToNum(phmStakeAllowance) / Math.pow(10, 18),
phmUnstakeAllowance: bnToNum(phmUnstakeAllowance) / Math.pow(10, 18),
nextRewardAmount,
stakedBalance,
},
wrapping: {
wrapAllowance: +wrapAllowance.toString() / 1e18,
unwrapAllowance: +unwrapAllowance.toString() / 1e18,
},
};
},
);
export interface IUserBondDetails {
allowance: number;
interestDue: number;
bondMaturationBlock: number;
pendingPayout: string; //Payout formatted in gwei.
}
export const calculateUserBondDetails = createAsyncThunk(
"account/calculateUserBondDetails",
async ({ address, bond, networkID, provider }: ICalcUserBondDetailsAsyncThunk) => {
if (!address) {
return {
bond: "",
displayName: "",
bondIconSvg: "",
isLP: false,
allowance: 0,
balance: "0",
interestDue: 0,
bondMaturationBlock: 0,
pendingPayout: "",
};
}
// dispatch(fetchBondInProgress());
// Calculate bond details.
const bondContract = bond.getContractForBond(networkID, provider);
const reserveContract = bond.getContractForReserve(networkID, provider);
let pendingPayout, bondMaturationBlock;
const bondDetails = await bondContract.bondInfo(address);
let interestDue: BigNumberish = Number(bondDetails.payout.toString()) / Math.pow(10, 9);
bondMaturationBlock = +bondDetails.vesting + +bondDetails.lastBlock;
pendingPayout = await bondContract.pendingPayoutFor(address);
let allowance,
balance = BigNumber.from(0);
allowance = await reserveContract.allowance(address, bond.getAddressForBond(networkID));
balance = await reserveContract.balanceOf(address);
// formatEthers takes BigNumber => String
const balanceVal = ethers.utils.formatEther(balance);
// balanceVal should NOT be converted to a number. it loses decimal precision
return {
bond: bond.name,
displayName: bond.displayName,
bondIconSvg: bond.bondIconSvg,
isLP: bond.isLP,
allowance: Number(allowance.toString()),
balance: balanceVal,
interestDue,
bondMaturationBlock,
pendingPayout: ethers.utils.formatUnits(pendingPayout, "ether"),
};
},
);
interface IAccountSlice extends IUserAccountDetails, IUserBalances {
bonds: { [key: string]: IUserBondDetails };
loading: boolean;
}
const initialState: IAccountSlice = {
loading: false,
bonds: {},
balances: { frax: 0, phm: "", sphm: "", fphm: "", gphm: "", wsphmAsSphm: "", wsphm: "", fsphm: "", pool: "" },
staking: { phmStakeAllowance: 0, phmUnstakeAllowance: 0, nextRewardAmount: 0 },
auction: { fraxAllowance: 0, tokensClaimable: 0 },
wrapping: { wrapAllowance: 0, unwrapAllowance: 0 },
};
const accountSlice = createSlice({
name: "account",
initialState,
reducers: {
fetchAccountSuccess(state, action) {
setAll(state, action.payload);
},
},
extraReducers: builder => {
builder
.addCase(loadAccountDetails.pending, state => {
state.loading = true;
})
.addCase(loadAccountDetails.fulfilled, (state, action) => {
setAll(state, action.payload);
state.loading = false;
})
.addCase(loadAccountDetails.rejected, (state, { error }) => {
state.loading = false;
console.log(error);
})
.addCase(getBalances.pending, state => {
state.loading = true;
})
.addCase(getBalances.fulfilled, (state, action) => {
setAll(state, action.payload);
state.loading = false;
})
.addCase(getBalances.rejected, (state, { error }) => {
state.loading = false;
console.log(error);
})
.addCase(calculateUserBondDetails.pending, state => {
state.loading = true;
})
.addCase(calculateUserBondDetails.fulfilled, (state, action) => {
if (!action.payload) return;
const bond = action.payload.bond;
state.bonds[bond] = action.payload;
state.loading = false;
})
.addCase(calculateUserBondDetails.rejected, (state, { error }) => {
state.loading = false;
console.log(error);
});
},
});
export default accountSlice.reducer;
export const { fetchAccountSuccess } = accountSlice.actions;
const baseInfo = (state: RootState) => state.account;
export const getAccountState = createSelector(baseInfo, account => account);
|
<reponame>GoogleCloudPlatform/datacatalog-tag-engine
# Copyright 2020 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time, pytz
import json
from datetime import datetime
from datetime import timedelta
from google.cloud import firestore
from google.cloud import tasks_v2
from google.protobuf import timestamp_pb2
<EMAIL> requires function to be at the module level
@firestore.transactional
def update_doc_in_transaction(transaction, doc, update_dict):
print('*** enter update_doc_in_transaction ***')
# apply update if document is unchanged
doc_ref = doc.reference
snapshot = doc_ref.get(transaction=transaction)
if doc.update_time == snapshot.update_time:
transaction.update(doc_ref, update_dict)
#log
return True
else:
#log
return False
class TagScheduler:
"""Class for managing scheduled tasks to update
queue_id = "projects/{project}/locations/{region}/queues/{queue_name}"
app_engine_uri = task handler uri set inside the
app engine project hosting the cloud task queue
stale_timer = age of PENDING tasks that gets reset to READY (in minutes)
"""
def __init__(self,
queue_id,
app_engine_uri,
stale_time=10):
self.queue_id = queue_id
self.app_engine_uri = app_engine_uri
self.stale_time = stale_time
self.db = firestore.Client()
##################### API METHODS ############
def scan_for_update_jobs(self):
print('*** enter scan_for_update_jobs ***')
db = firestore.Client()
tag_ref = db.collection('tag_config')
tag_ref = tag_ref.where("refresh_mode", "==", "AUTO")
tag_ref = tag_ref.where("scheduling_status", "==", "READY")
tag_ref = tag_ref.where("config_status", "==", "ACTIVE")
tag_ref = tag_ref.where("next_run", "<=", datetime.utcnow())
ready_configs = list(tag_ref.stream())
print('ready_configs: ' + str(ready_configs))
#TODO: consider running transactions async
for config in ready_configs:
print('found tag config to refresh')
transaction = self.db.transaction()
payload = self._set_status_pending(transaction, config)
if payload:
doc_id = payload[0]
version = payload[1]
print('doc_id: ' + doc_id)
print('version: ' + str(version))
response = self._send_cloud_task(doc_id, version)
print('send_cloud_task response: ' + str(response))
#log success
else:
pass
print('invalid payload')
#log fail
return True
def reset_stale_jobs(self):
print('*** enter reset_stale_jobs ***')
tag_ref = self.db.collection("tag_config")
tag_ref = tag_ref.where("scheduling_status", "==", "PENDING")
tag_ref = tag_ref.where("config_status", "==", "ACTIVE")
pending_configs = list(tag_ref.stream())
for config in pending_configs:
udt = config.update_time.replace(tzinfo=pytz.UTC)
ts = datetime.utcnow().replace(tzinfo=pytz.UTC)
if (udt + timedelta(minutes=self.stale_time)) < ts:
print('found a stale config')
self._set_status_ready(config)
return True
def schedule_job(self, doc_id):
print('*** enter schedule_job ***')
collection = self.db.collection("tag_config")
tag_config = collection.document(doc_id).get()
response = self._set_status_ready(tag_config)
print('response: ' + str(response))
#Log
def get_config_and_template(self, doc_id):
print('*** enter get_config_and_template ***')
tag_config = self.db.collection('tag_config').document(doc_id).get()
template_id = tag_config.get('template_uuid')
template_config = self.db\
.collection('tag_template').document(template_id).get()
return tag_config, template_config
#End get_doc_snapshot
################ INTERNAL PROCESSING METHODS #################
def _set_status_ready(self, doc):
print('*** enter _set_status_ready ***')
doc_ref = doc.reference
snapshot = doc_ref.get()
data = snapshot.to_dict()
transaction = self.db.transaction()
task = {
'scheduling_status':'READY',
}
return update_doc_in_transaction(transaction, doc, task)
def _set_status_pending(self, transaction, doc):
print('*** enter _set_status_pending ***')
data = doc.to_dict()
version = data.get('version', 0) + 1
delta = data.get('refresh_frequency', 24)
unit = data.get('refresh_unit', 'hours')
if unit == 'hours':
next_run = datetime.utcnow() + timedelta(hours=delta)
if unit == 'days':
next_run = datetime.utcnow() + timedelta(days=delta)
print('version: ' + str(version))
print('delta: ' + str(delta))
print('next_run: ' + str(next_run))
task = {
'version': version,
'scheduling_status':'PENDING',
'next_run' : next_run
}
if update_doc_in_transaction(transaction, doc, task):
return doc.id, version
else:
return None
def _send_cloud_task(self, doc_id, version):
print('*** enter _send_cloud_task ***')
client = tasks_v2.CloudTasksClient()
task = {
'app_engine_http_request': {
'http_method': 'POST',
'relative_uri': self.app_engine_uri
}
}
payload = {'doc_id':doc_id, 'version':version}
print('payload: ' + str(payload))
payload_utf8 = json.dumps(payload).encode()
task['app_engine_http_request']['body'] = payload_utf8
response = client.create_task(parent=self.queue_id, task=task)
#print('response: ' + str(response))
return response
if __name__ == '__main__':
TASK_QUEUE = 'projects/tag-engine-283315/locations/us-east1/queues/tag-engine'
ts = TagScheduler(TASK_QUEUE, "/dynamic_auto_update")
ts.reset_stale_jobs()
ts.scan_for_update_jobs()
print("done")
|
#!/bin/bash
set -euo pipefail
# Mandatory variables for ANF resources
# Change variables according to your environment
SUBSCRIPTION_ID=""
LOCATION="WestUS"
RESOURCEGROUP_NAME="My-rg"
VNET_NAME="netapp-vnet"
SUBNET_NAME="netapp-subnet"
NETAPP_ACCOUNT_NAME="netapptestaccount"
NETAPP_POOL_NAME="netapptestpool"
NETAPP_POOL_SIZE_TIB=4
NETAPP_VOLUME_NAME="netapptestvolume"
SERVICE_LEVEL="Standard"
NETAPP_VOLUME_SIZE_GIB=100
PROTOCOL_TYPE="CIFS"
#AD variables
DOMAIN_JOIN_USERNAME="pmcadmin"
DOMAIN_JOIN_PASSWORD="Password"
SMB_SERVER_NAME="pmcsmb"
DNS_LIST="10.0.2.4,10.0.2.5"
AD_FQDN="testdomain.local"
#Cleanup Variable
SHOULD_CLEANUP="false"
# Exit error code
ERR_ACCOUNT_NOT_FOUND=100
# Utils Functions
display_bash_header()
{
echo "-----------------------------------------------------------------------------------------------------------"
echo "Azure NetApp Files CLI NFS Sample - Sample Bash script that creates Azure NetApp Files uses SMB protocol"
echo "-----------------------------------------------------------------------------------------------------------"
}
display_cleanup_header()
{
echo "----------------------------------------"
echo "Cleaning up Azure NetApp Files Resources"
echo "----------------------------------------"
}
display_message()
{
time=$(date +"%T")
message="$time : $1"
echo $message
}
# ANF create functions
# Create Azure NetApp Files Account
create_or_update_netapp_account()
{
local __resultvar=$1
local _NEW_ACCOUNT_ID=""
_NEW_ACCOUNT_ID=$(az netappfiles account create --resource-group $RESOURCEGROUP_NAME \
--name $NETAPP_ACCOUNT_NAME \
--location $LOCATION | jq ".id")
az netappfiles account ad add --resource-group $RESOURCEGROUP_NAME \
--name $NETAPP_ACCOUNT_NAME \
--username $DOMAIN_JOIN_USERNAME \
--password $DOMAIN_JOIN_PASSWORD \
--smb-server-name $SMB_SERVER_NAME \
--dns $DNS_LIST \
--domain $AD_FQDN
if [[ "$__resultvar" ]]; then
eval $__resultvar="'${_NEW_ACCOUNT_ID}'"
else
echo "${_NEW_ACCOUNT_ID}"
fi
}
# Create Azure NetApp Files Capacity Pool
create_or_update_netapp_pool()
{
local __resultvar=$1
local _NEW_POOL_ID=""
_NEW_POOL_ID=$(az netappfiles pool create --resource-group $RESOURCEGROUP_NAME \
--account-name $NETAPP_ACCOUNT_NAME \
--name $NETAPP_POOL_NAME \
--location $LOCATION \
--size $NETAPP_POOL_SIZE_TIB \
--service-level $SERVICE_LEVEL | jq ".id")
if [[ "$__resultvar" ]]; then
eval $__resultvar="'${_NEW_POOL_ID}'"
else
echo "${_NEW_POOL_ID}"
fi
}
# Create Azure NetApp Files Volume
create_or_update_netapp_volume()
{
local __resultvar=$1
local _NEW_VOLUME_ID=""
_NEW_VOLUME_ID=$(az netappfiles volume create --resource-group $RESOURCEGROUP_NAME \
--account-name $NETAPP_ACCOUNT_NAME \
--file-path $NETAPP_VOLUME_NAME \
--pool-name $NETAPP_POOL_NAME \
--name $NETAPP_VOLUME_NAME \
--location $LOCATION \
--service-level $SERVICE_LEVEL \
--usage-threshold $NETAPP_VOLUME_SIZE_GIB \
--vnet $VNET_NAME \
--subnet $SUBNET_NAME \
--protocol-types $PROTOCOL_TYPE | jq ".id")
if [[ "$__resultvar" ]]; then
eval $__resultvar="'${_NEW_VOLUME_ID}'"
else
echo "${_NEW_VOLUME_ID}"
fi
}
# ANF cleanup functions
# Delete Azure NetApp Files Account
delete_netapp_account()
{
az netappfiles account delete --resource-group $RESOURCEGROUP_NAME \
--name $NETAPP_ACCOUNT_NAME
}
# Delete Azure NetApp Files Capacity Pool
delete_netapp_pool()
{
az netappfiles pool delete --resource-group $RESOURCEGROUP_NAME \
--account-name $NETAPP_ACCOUNT_NAME \
--name $NETAPP_POOL_NAME
sleep 10
}
# Delete Azure NetApp Files Volume
delete_netapp_volume()
{
az netappfiles volume delete --resource-group $RESOURCEGROUP_NAME \
--account-name $NETAPP_ACCOUNT_NAME \
--pool-name $NETAPP_POOL_NAME \
--name $NETAPP_VOLUME_NAME
sleep 10
}
#Script Start
#Display Header
display_bash_header
# Login and Authenticate to Azure
display_message "Authenticating into Azure"
az login
# Set the target subscription
display_message "setting up the target subscription"
az account set --subscription $SUBSCRIPTION_ID
display_message "Creating Azure NetApp Files Account ..."
{
NEW_ACCOUNT_ID="";create_or_update_netapp_account NEW_ACCOUNT_ID
display_message "Azure NetApp Files Account was created successfully: $NEW_ACCOUNT_ID"
} || {
display_message "Failed to create Azure NetApp Files Account"
exit 1
}
display_message "Creating Azure NetApp Files Pool ..."
{
NEW_POOL_ID="";create_or_update_netapp_pool NEW_POOL_ID
display_message "Azure NetApp Files pool was created successfully: $NEW_POOL_ID"
} || {
display_message "Failed to create Azure NetApp Files pool"
exit 1
}
display_message "Creating Azure NetApp Files Volume..."
{
NEW_VOLUME_ID="";create_or_update_netapp_volume NEW_VOLUME_ID
display_message "Azure NetApp Files volume was created successfully: $NEW_VOLUME_ID"
} || {
display_message "Failed to create Azure NetApp Files volume"
exit 1
}
# Clean up resources
if [[ "$SHOULD_CLEANUP" == true ]]; then
#Display cleanup header
display_cleanup_header
# Delete Volume
display_message "Deleting Azure NetApp Files Volume..."
{
delete_netapp_volume
display_message "Azure NetApp Files volume was deleted successfully"
} || {
display_message "Failed to delete Azure NetApp Files volume"
exit 1
}
#Delete Capacity Pool
display_message "Deleting Azure NetApp Files Pool ..."
{
delete_netapp_pool
display_message "Azure NetApp Files pool was deleted successfully"
} || {
display_message "Failed to delete Azure NetApp Files pool"
exit 1
}
#Delete Account
display_message "Deleting Azure NetApp Files Account ..."
{
delete_netapp_account
display_message "Azure NetApp Files Account was deleted successfully"
} || {
display_message "Failed to delete Azure NetApp Files Account"
exit 1
}
fi |
def remove_pair(d, key):
if key in d:
del d[key]
return d
d = { 'a': 10, 'b': 20, 'c':30 }
remove_pair(d, 'c') |
#include "WindowManager.h" // Include necessary header file for WindowManager
#include "SkillCheck.h" // Include necessary header file for SkillCheck
class SkillCheckManager {
public:
void draw(WindowManager& window) {
m_sc.drawBars(window); // Call drawBars method of SkillCheck object m_sc
}
private:
SkillCheck m_sc; // SkillCheck object member variable
}; |
package com.codefinity.microcontinuum.common.notification;
import java.util.List;
public interface PublishedNotificationTrackerStore {
public PublishedNotificationTracker publishedNotificationTracker();
public PublishedNotificationTracker publishedNotificationTracker(String aTypeName);
public void trackMostRecentPublishedNotification(
PublishedNotificationTracker aPublishedNotificationTracker,
List<Notification> aNotifications);
public String typeName();
}
|
<filename>src/components/Sphere/fragment.js
export const fragment = `
precision highp float;
varying vec3 vPosition;
varying float vDelay;
uniform sampler2D dot;
uniform sampler2D glow;
uniform float time;
uniform float blend;
uniform vec2 resolution;
void main() {
float difftime = time - vDelay;
float d = clamp(0., 1., difftime);
float l = 1. - length(vPosition) / (resolution.x / 2.);
vec4 texture = texture2D(dot, gl_PointCoord);
vec4 glow = texture2D( glow, gl_PointCoord );
// clamp(0., 0.5, blend)
// if (vDelay != 0.) {
gl_FragColor = texture * vec4(1., 1., 1.5, 0.1) + glow * vec4(0.5, 0.5, 0.5, 0.8);
// } else {
// gl_FragColor = vec4(0, 0., 0., 0.);
// }
// gl_FragColor = vec4(1., 1., 1., clamp(0., 0.5, blend)) * texture;
// gl_FragColor = vec4(l, 0., 0., 1.);
}
`;
|
#!/bin/bash
echo "Starting ZAP..."
container="$(docker run -u zap -p 8765:8765 -d owasp/zap2docker-weekly zap.sh -daemon -host 0.0.0.0 -port 8765 -config api.disablekey=true -config api.addrs.addr.name=.* -config api.addrs.addr.regex=true)"
while ! curl --silent --output /dev/null http://127.0.0.1:8765/
do
sleep 2
done
hostIp=$(/sbin/ip -o -4 addr list eth0 | awk '{print $4}' | cut -d/ -f1)
docker exec $container zap-cli -p 8765 status
docker exec $container zap-cli -p 8765 session new
docker exec $container zap-cli -v -p 8765 quick-scan -s all --spider -r -l Medium http://$hostIp:8001/
docker stop $container > /dev/null
docker rm $container > /dev/null
|
#!/bin/bash
fileid="1ynsueTrKkX0Lnhji2HzzcrzvoA10VRUF"
html=`curl -c ./cookie -s -L "https://drive.google.com/uc?export=download&id=${fileid}"`
curl -Lb ./cookie "https://drive.google.com/uc?export=download&`echo ${html}|grep -Po '(confirm=[a-zA-Z0-9\-_]+)'`&id=${fileid}" -o resources.tar.gz
tar -zxvf resources.tar.gz
rm resources.tar.gz
echo Download finished.
|
module Awsm
module CLI
class Spin < Clibase
class_option :tables, :type => :boolean, :lazy_default => true, :default => true,
:desc => "Whether or not to draw ASCII tables."
desc 'up [AMI_ID]',
"Spin up an instance of the specified AMI"
option :image_id
option :key_name
def up( preset )
if /^ami-.+$/.match( preset )
c = Awsm::spin_config('default')
c.image_id( preset )
else
c = Awsm::spin_config( preset )
end
unless options[:image_id].nil?
unless c.image_id.nil?
override_alert( 'image_id', c.image_id, options[:image_id] )
end
c.image_id( options[:image_id] )
end
unless options[:key_name].nil?
unless c.key_name.nil?
override_alert( 'key_name', c.key_name, options[:key_name] )
end
c.key_name( options[:key_name] )
end
spin_up( c )
end
desc 'down [INSTANCE_ID]',
"Spin down the specified instance"
def down( instance_id )
response = ec2.describe_instances(
filters: [
{ name: 'instance-id', values: [ instance_id ] },
{ name: 'tag:awsm:owner', values: [ whoami ] }
]
)
if response.reservations.length == 0
say "Instance #{instance_id} not spinning."
return
end
say "Spinning down (terminating) #{instance_id}...", :red
ec2.terminate_instances(
instance_ids: [ instance_id ]
)
end
desc 'list',
"List all spinning instances"
option :simple, :type => :boolean, :default => false, :aliases => '-s',
:desc => "Display list without prettiness - good for sedding"
def list
instances = filter_instances( [
{ name: 'tag:awsm:owner', values: [ whoami ] }
] )
Table::Instance.new( instances, :pretty ).print
end
no_commands do
def whoami
me_host = `hostname -f`.strip
me_user = `whoami`.strip
"#{me_user}@#{me_host}"
end
def override_alert( field, from, to )
say "Overriding #{field} from #{from} to #{to}!", :bold
end
def instance_extant?( instance_id )
description = ec2.describe_instances(
instance_ids: [ instance_id ]
)
num_found = description.reservations.first.instances.length
if num_found == 0
return false
end
true
end
def spin_up( c )
response = ec2.run_instances(
image_id: c.image_id,
key_name: c.key_name,
instance_type: c.instance_type,
security_group_ids: c.security_groups,
subnet_id: c.subnet,
min_count: 1,
max_count: 1
)
say "Spinning up #{c.image_id}..."
instance_id = response.instances.first.instance_id
say "Instance #{instance_id} is spinning up...", :green
while instance_extant?( instance_id ) == false
say '.', :green, false
sleep(3)
end
tags = [
{ key: 'Name', value: "Temporary instance of #{c.image_id} for #{whoami}" },
{ key: 'awsm:owner', value: whoami }
]
c.tags.each do |k, v|
tags << { key: k, value: v }
end
ec2.create_tags(
resources: [ instance_id ],
tags: tags
)
say "Tagged #{instance_id}:"
tags.each do |tag|
say " #{tag[:key]} ", :cyan
say '=> '
say "#{tag[:value]}", :yellow
end
end
end #no_commands
end #class
end #module
end #module
|
class AttendanceCohortAssignment < ApplicationRecord
belongs_to :network_event
belongs_to :cohort
belongs_to :user
end
|
<reponame>jelly/patternfly-react
import React from 'react';
import { Table, TableHeader, TableBody, classNames, cellWidth, Visibility, TableProps } from '@patternfly/react-table';
interface Repository {
name: string;
branches: string;
prs: string;
workspaces: string;
lastCommit: string;
}
export const LegacyTableCellWidth: React.FunctionComponent = () => {
// In real usage, this data would come from some external source like an API via props.
const repositories: Repository[] = [
{
name: 'one - 1',
branches: 'two - 1 (visible only on md)',
prs: 'three - 1 (hidden only on md)',
workspaces: 'four - 1 (hidden on xs)',
lastCommit: 'five - 1'
},
{
name: 'one - 2',
branches: 'two - 2 (visible only on md)',
prs: 'three - 2 (hidden only on md)',
workspaces: 'four - 2 (hidden on xs)',
lastCommit: 'five - 2'
}
];
const columns: TableProps['cells'] = [
{ title: 'Header cell', transforms: [cellWidth(10)] },
{
title: 'Branches (visible only on md and 2Xl)',
columnTransforms: [
classNames(Visibility.hidden, Visibility.visibleOnMd, Visibility.hiddenOnLg, Visibility.visibleOn2Xl)
]
},
{
title: 'Pull requests (hidden only on md)',
columnTransforms: [classNames(Visibility.hiddenOnMd, Visibility.visibleOnLg)]
},
{
title: 'Workspaces (hidden on xs)',
columnTransforms: [classNames(Visibility.hidden, Visibility.visibleOnSm)]
},
{
title: 'Last commit',
transforms: [cellWidth(30)]
}
];
const rows: TableProps['rows'] = repositories.map(repo => [
repo.name,
repo.branches,
repo.prs,
repo.workspaces,
repo.lastCommit
]);
return (
<Table aria-label="Table with width and breakpoint visibility modifiers" cells={columns} rows={rows}>
<TableHeader />
<TableBody />
</Table>
);
};
|
mosquitto_pub -d -h "localhost" -p 1883 -t "v1/devices/me/attributes" -u "$WIND_TURBINE_3_ACCESS_TOKEN" -m "{"deviceType":"WeatherStation", "geoZone":"Zone B"}" |
#!/bin/sh
set -e
# If a proxy is requested, set it up
if [ "${INTERNET_PROXY}" ]; then
export http_proxy="http://${INTERNET_PROXY}:3128"
export HTTP_PROXY="http://${INTERNET_PROXY}:3128"
export https_proxy="http://${INTERNET_PROXY}:3128"
export HTTPS_PROXY="http://${INTERNET_PROXY}:3128"
export no_proxy=169.254.169.254,.s3.eu-west-2.amazonaws.com,s3.eu-west-2.amazonaws.com,secretsmanager.eu-west-2.amazonaws.com
export NO_PROXY=169.254.169.254,.s3.eu-west-2.amazonaws.com,s3.eu-west-2.amazonaws.com,secretsmanager.eu-west-2.amazonaws.com
echo "Using proxy ${INTERNET_PROXY}"
fi
# Generate a cert for Kafka mutual auth
HOSTNAME=$(hostname)
if [ "${K2HB_KAFKA_INSECURE}" != "true" ]
then
SSL_DIR="$(mktemp -d)"
export K2HB_PRIVATE_KEY_PASSWORD="$(uuidgen)"
export K2HB_KEYSTORE_PATH="${SSL_DIR}/k2hb.keystore"
export K2HB_KEYSTORE_PASSWORD="$(uuidgen)"
export K2HB_TRUSTSTORE_PATH="${SSL_DIR}/k2hb.truststore"
export K2HB_TRUSTSTORE_PASSWORD="$(uuidgen)"
if [ "${K2HB_KAFKA_CERT_MODE}" = "CERTGEN" ]; then
echo "Generating cert for host ${HOSTNAME}"
acm-pca-cert-generator \
--subject-cn "${HOSTNAME}" \
--keystore-path "${K2HB_KEYSTORE_PATH}" \
--keystore-password "${K2HB_KEYSTORE_PASSWORD}" \
--private-key-password "${K2HB_PRIVATE_KEY_PASSWORD}" \
--truststore-path "${K2HB_TRUSTSTORE_PATH}" \
--truststore-password "${K2HB_TRUSTSTORE_PASSWORD}"
echo "Cert generation result is $? for ${HOSTNAME}"
elif [ "${K2HB_KAFKA_CERT_MODE}" = "RETRIEVE" ]; then
echo "Retrieving cert from ${RETRIEVER_ACM_CERT_ARN}"
export RETRIEVER_ACM_KEY_PASSPHRASE="$(uuidgen)"
acm-cert-retriever \
--acm-key-passphrase "${RETRIEVER_ACM_KEY_PASSPHRASE}" \
--keystore-path "${K2HB_KEYSTORE_PATH}" \
--keystore-password "${K2HB_KEYSTORE_PASSWORD}" \
--private-key-password "${K2HB_PRIVATE_KEY_PASSWORD}" \
--truststore-path "${K2HB_TRUSTSTORE_PATH}" \
--truststore-password "${K2HB_TRUSTSTORE_PASSWORD}"
echo "Cert retrieve result is $? for ${RETRIEVER_ACM_CERT_ARN}"
else
echo "K2HB_KAFKA_CERT_MODE must be one of 'CERTGEN,RETRIEVE' but was ${K2HB_KAFKA_CERT_MODE}"
exit 1
fi
else
echo "Skipping cert generation for host ${HOSTNAME}"
fi
exec "${@}"
|
package academy.devonline.java.home_section001_classes.dyna_array;
public class HomeMinusArray {
int[] result = new int[6];
int count;
}
|
<reponame>smagill/opensphere-desktop
package io.opensphere.wps.ui.detail;
import jidefx.scene.control.validation.ValidationGroup;
/**
* An interface defining the methods needed to perform validation on a given form.
*/
public interface ValidatableForm
{
/**
* Gets the validation group associated with the form.
*
* @return the validation group associated with the form.
*/
ValidationGroup getValidationGroup();
/**
* Forces the form to validate all registered inputs.
*/
void performValidation();
}
|
<filename>app/controllers/inspect/checks_controller.rb<gh_stars>1-10
class Inspect::ChecksController < Inspect::BaseController
layout 'application'
def loops
end
def expected_demand
end
# Shows the sum of the inputs in each share group.
def index
end
def gquery_results
@gqueries = Gquery.all.sort
end
#######
private
#######
# A helper class for presenting and calculating share group information.
class ShareGroup < Struct.new(:key, :gql)
# @return [Array<Input>]
# Returns all of the inputs which belong to the group.
def inputs
@inputs ||= Input.in_share_group(key).reject do |input|
input.disabled_in_current_area?(gql)
end
end
# @return [true, false]
# Returns if the group sums up to -- or very close to -- 100.
def ok?
sum >= 99.9999 && sum <= 100.0001
end
# @return [BigDecimal]
# Returns the sum of all the input start values.
def sum
@sum ||= inputs.map { |input| input.start_value_for(gql) }.compact.sum
end
end
# @return [Array<Inspect::ShareGroupsController::ShareGroup>]
# Returns a ShareGroup for each one defined in ETsource.
def share_groups_for_area(area)
gql = Scenario.new(area_code: area, end_year: 2050).gql
groups = Input.all.map(&:share_group)
groups.reject!(&:blank?)
groups.uniq!
groups.sort!
groups.map { |group| ShareGroup.new(group, gql) }
end
helper_method :share_groups_for_area
end
|
#!/bin/bash
##################################################################
## Constants ##
##################################################################
INSTANCE_PATH="data/original/HMO_2020-21_project_instance-1.txt"
DUMP_PATH="data/dumps/HMO_2020-21_project_dump_instance-1.json"
MAX_RUNTIME=(1 5 25)
##################################################################
## Functionality ##
##################################################################
python3 src/main.py --instance_path $INSTANCE_PATH \
--dump_path $DUMP_PATH \
--max_runtime $(echo ${MAX_RUNTIME[@]})
|
package com.yin.springboot.mybatis.server.service;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import com.yin.springboot.mybatis.domain.UmsAdminPermissionRelation;
import java.util.List;
import com.yin.springboot.mybatis.mapper.UmsAdminPermissionRelationMapper;
import com.yin.springboot.mybatis.server.UmsAdminPermissionRelationService;
@Service
public class UmsAdminPermissionRelationServiceImpl implements UmsAdminPermissionRelationService{
@Resource
private UmsAdminPermissionRelationMapper umsAdminPermissionRelationMapper;
@Override
public int deleteByPrimaryKey(Long id) {
return umsAdminPermissionRelationMapper.deleteByPrimaryKey(id);
}
@Override
public int insert(UmsAdminPermissionRelation record) {
return umsAdminPermissionRelationMapper.insert(record);
}
@Override
public int insertOrUpdate(UmsAdminPermissionRelation record) {
return umsAdminPermissionRelationMapper.insertOrUpdate(record);
}
@Override
public int insertOrUpdateSelective(UmsAdminPermissionRelation record) {
return umsAdminPermissionRelationMapper.insertOrUpdateSelective(record);
}
@Override
public int insertSelective(UmsAdminPermissionRelation record) {
return umsAdminPermissionRelationMapper.insertSelective(record);
}
@Override
public UmsAdminPermissionRelation selectByPrimaryKey(Long id) {
return umsAdminPermissionRelationMapper.selectByPrimaryKey(id);
}
@Override
public int updateByPrimaryKeySelective(UmsAdminPermissionRelation record) {
return umsAdminPermissionRelationMapper.updateByPrimaryKeySelective(record);
}
@Override
public int updateByPrimaryKey(UmsAdminPermissionRelation record) {
return umsAdminPermissionRelationMapper.updateByPrimaryKey(record);
}
@Override
public int updateBatch(List<UmsAdminPermissionRelation> list) {
return umsAdminPermissionRelationMapper.updateBatch(list);
}
@Override
public int batchInsert(List<UmsAdminPermissionRelation> list) {
return umsAdminPermissionRelationMapper.batchInsert(list);
}
}
|
Zest.Telephony.Views.TwilioClientView = Backbone.View.extend({
className: 'twilio-client-wrapper',
events: {
'click button.answer': 'deviceAnswer',
'click button.hangup': 'deviceHangup'
},
template: JST["templates/telephony/twilio_client_view"],
initialize: function(options) {
this.agent = options.agent;
this.device = new Zest.Telephony.Models.Device();
this.device.bind("change:state", $.proxy(this.render, this));
$(document)
.bind("telephony:Answer", $.proxy(this.onAnswer, this))
.bind("telephony:Start", $.proxy(this.onAnswer, this))
.bind("telephony:Conference", $.proxy(this.onAnswer, this))
.bind("telephony:CompleteOneStepTransfer", $.proxy(this.onAnswer, this))
.bind("telephony:CompleteTwoStepTransfer", $.proxy(this.onAnswer, this))
.bind("telephony:Busy telephony:NoAnswer telephony:CallFail telephony:Terminate",
$.proxy(this.onCallEnded, this));
this.tokenPath = Zest.Telephony.Config.TWILIO_CLIENT_TOKEN_PATH;
var data = { csr_id: options.csrId };
$.ajax(this.tokenPath, { type: 'GET', data: data })
.done($.proxy(this.loadToken, this))
.fail($.proxy(this.logFail, this));
},
loadToken: function(data) {
var token = data.token;
this.setupTwilioClient(token);
},
logFail: function(xhr, textStatus, errorThrown) {
if (typeof console === "object" && typeof console.log === "function") {
console.log('Failed to load capability token');
}
},
setupTwilioClient: function(token) {
Twilio.Device.setup(token, {debug: false});
Twilio.Device.ready($.proxy(this.deviceReady, this));
Twilio.Device.error($.proxy(this.deviceError, this));
Twilio.Device.incoming($.proxy(this.deviceIncoming, this));
Twilio.Device.connect($.proxy(this.deviceConnect, this));
Twilio.Device.disconnect($.proxy(this.deviceDisconnect, this));
},
deviceReady: function(dev) {
this.device.set({ state: 'ready' });
},
deviceError: function(err) {
this.device.set({ state: 'error' });
},
deviceIncoming: function(conn) {
this.connection = conn;
this.device.set({ state: 'incoming' });
},
deviceConnect: function(conn) {
this.device.set({ state: 'connect' });
},
deviceDisconnect: function(conn) {
this.device.set({ state: 'disconnect' });
},
deviceAnswer: function() {
this.connection.accept();
this.disallowBrowserReload();
this.device.set({ state: 'answering' });
},
deviceHangup: function() {
Twilio.Device.disconnectAll();
this.allowBrowserReload();
this.device.set({ state: 'ready' });
},
onAnswer: function(event, data) {
this.device.set({ state: 'connect' });
this.render();
},
onCallEnded: function(event, data) {
this.device.set({ state: 'ready' });
this.render();
},
disallowBrowserReload: function() {
this.currentBeforeUnload = window.onbeforeunload;
var that = this;
$(window).bind('beforeunload', function() {
if (that.agent.onACall()) {
return 'You are ON A CALL. If you leave this page your call will be terminated.';
}
});
},
allowBrowserReload: function() {
$(window).unbind('beforeunload');
window.onbeforeunload = this.currentBeforeUnload;
},
render: function() {
$(this.el).html(this.template({ device: this.device }));
return this;
}
});
|
import { writeFileSync } from "node:fs";
import { mockBinary } from "./helpers/mock-bin";
import { mockConsoleMethods } from "./helpers/mock-console";
import { runInTempDir } from "./helpers/run-in-tmp";
const { getPackageManager, getPackageManagerName } =
jest.requireActual("../package-manager");
interface TestCase {
npm: boolean;
yarn: boolean;
npmLockFile: boolean;
yarnLockFile: boolean;
expectedPackageManager: string;
}
const testCases: TestCase[] = [
// npm binary - no yarn binary
{
npm: true,
yarn: false,
npmLockFile: false,
yarnLockFile: false,
expectedPackageManager: "npm",
},
{
npm: true,
yarn: false,
npmLockFile: true,
yarnLockFile: false,
expectedPackageManager: "npm",
},
{
npm: true,
yarn: false,
npmLockFile: false,
yarnLockFile: true,
expectedPackageManager: "npm",
},
{
npm: true,
yarn: false,
npmLockFile: true,
yarnLockFile: true,
expectedPackageManager: "npm",
},
// yarn binary - no npm binary
{
npm: false,
yarn: true,
npmLockFile: false,
yarnLockFile: false,
expectedPackageManager: "yarn",
},
{
npm: false,
yarn: true,
npmLockFile: true,
yarnLockFile: false,
expectedPackageManager: "yarn",
},
{
npm: false,
yarn: true,
npmLockFile: false,
yarnLockFile: true,
expectedPackageManager: "yarn",
},
{
npm: false,
yarn: true,
npmLockFile: true,
yarnLockFile: true,
expectedPackageManager: "yarn",
},
// npm and yarn binaries
{
npm: true,
yarn: true,
npmLockFile: false,
yarnLockFile: false,
expectedPackageManager: "npm",
},
{
npm: true,
yarn: true,
npmLockFile: true,
yarnLockFile: false,
expectedPackageManager: "npm",
},
{
npm: true,
yarn: true,
npmLockFile: false,
yarnLockFile: true,
expectedPackageManager: "yarn",
},
{
npm: true,
yarn: true,
npmLockFile: true,
yarnLockFile: true,
expectedPackageManager: "npm",
},
];
describe("getPackageManager()", () => {
runInTempDir();
mockConsoleMethods();
describe("no supported package manager", () => {
mockYarn(false);
mockNpm(false);
it("should throw an error", async () => {
await expect(() =>
getPackageManager(process.cwd())
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Unable to find a package manager. Supported managers are: npm and yarn."`
);
});
});
for (const {
npm,
yarn,
npmLockFile,
yarnLockFile,
expectedPackageManager,
} of testCases) {
describe(
getTestCaseDescription(npm, yarn, npmLockFile, yarnLockFile),
() => {
mockYarn(yarn);
mockNpm(npm);
mockLockFiles(npmLockFile, yarnLockFile);
it(`should return the ${expectedPackageManager} package manager`, async () => {
const actualPackageManager = await getPackageManager(process.cwd());
expect(getPackageManagerName(actualPackageManager)).toEqual(
expectedPackageManager
);
});
}
);
}
});
/**
* Create a fake yarn binary
*/
function mockYarn(succeed: boolean): void {
let unMock: () => void;
beforeEach(async () => {
unMock = await mockBinary("yarn", `process.exit(${succeed ? 0 : 1})`);
});
afterEach(() => unMock());
}
/**
* Create a fake npm binary
*/
function mockNpm(succeed: boolean): void {
let unMock: () => void;
beforeEach(async () => {
unMock = await mockBinary("npm", `process.exit(${succeed ? 0 : 1})`);
});
afterEach(() => unMock());
}
/**
* Create a fake lock files.
*/
function mockLockFiles(npmLockFile: boolean, yarnLockFile: boolean) {
beforeEach(() => {
if (npmLockFile) {
writeFileSync("package-lock.json", "");
}
if (yarnLockFile) {
writeFileSync("yarn.lock", "");
}
});
}
function getTestCaseDescription(
npm: boolean,
yarn: boolean,
npmLockFile: boolean,
yarnLockFile: boolean
): string {
const criteria: string[] = [];
if (npm) {
criteria.push("npm");
}
if (npmLockFile) {
criteria.push("package-lock.json");
}
if (yarn) {
criteria.push("yarn");
}
if (yarnLockFile) {
criteria.push("yarn.lock");
}
return "using " + criteria.join("; ");
}
|
<reponame>jvccorsi/projeto_final_react_origamid
import React from "react";
import { CanvasContext } from "./hooks/use-canvas-context";
import PropTypes from "prop-types";
import { PropTypes as CustomPropTypes } from "victory-core";
var CanvasGroup = function (props) {
var canvasRef = React.useRef();
var children = props.children,
width = props.width,
height = props.height,
clipWidth = props.clipWidth,
padding = props.padding;
var clear = React.useCallback(function (ctx) {
return ctx.clearRect(0, 0, width, height);
}, [width, height]); // This needs to be called in the child component to ensure it is called after the
// shape is drawn
var clip = React.useCallback(function (ctx) {
var maxClipWidth = width - padding.right - padding.left;
ctx.clearRect(width - padding.right, 0, (maxClipWidth - clipWidth) * -1, height);
}, [width, height, padding, clipWidth]);
return React.createElement(CanvasContext.Provider, {
value: {
canvasRef: canvasRef,
clear: clear,
clip: clip
}
}, React.createElement("foreignObject", {
width: width,
height: height,
x: 0,
y: 0
}, React.createElement("canvas", {
width: width,
height: height,
ref: canvasRef
})), children);
};
CanvasGroup.propTypes = {
"aria-label": PropTypes.string,
children: PropTypes.oneOfType([PropTypes.arrayOf(PropTypes.node), PropTypes.node]),
className: PropTypes.string,
clipWidth: CustomPropTypes.nonNegative,
height: PropTypes.number,
padding: PropTypes.oneOfType([PropTypes.number, PropTypes.shape({
top: PropTypes.number,
bottom: PropTypes.number,
left: PropTypes.number,
right: PropTypes.number
})]),
width: PropTypes.number
};
CanvasGroup.role = "container";
CanvasGroup.displayName = "CanvasGroup";
export default CanvasGroup; |
<filename>java/dagger/hilt/android/internal/lifecycle/HiltViewModelFactory.java
/*
* Copyright (C) 2020 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.hilt.android.internal.lifecycle;
import androidx.lifecycle.AbstractSavedStateViewModelFactory;
import androidx.lifecycle.SavedStateHandle;
import androidx.lifecycle.ViewModel;
import androidx.lifecycle.ViewModelProvider;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.savedstate.SavedStateRegistryOwner;
import dagger.Module;
import dagger.hilt.EntryPoint;
import dagger.hilt.EntryPoints;
import dagger.hilt.InstallIn;
import dagger.hilt.android.components.ViewModelComponent;
import dagger.hilt.android.internal.builders.ViewModelComponentBuilder;
import dagger.multibindings.Multibinds;
import java.util.Map;
import java.util.Set;
import javax.inject.Provider;
/**
* View Model Provider Factory for the Hilt Extension.
*
* <p>A provider for this factory will be installed in the {@link
* dagger.hilt.android.components.ActivityComponent} and {@link
* dagger.hilt.android.components.FragmentComponent}. An instance of this factory will also be the
* default factory by activities and fragments annotated with {@link
* dagger.hilt.android.AndroidEntryPoint}.
*/
public final class HiltViewModelFactory implements ViewModelProvider.Factory {
/** Hilt entry point for getting the multi-binding map of ViewModels. */
@EntryPoint
@InstallIn(ViewModelComponent.class)
public interface ViewModelFactoriesEntryPoint {
@HiltViewModelMap
Map<String, Provider<ViewModel>> getHiltViewModelMap();
}
/** Hilt module for providing the empty multi-binding map of ViewModels. */
@Module
@InstallIn(ViewModelComponent.class)
interface ViewModelModule {
@Multibinds
@HiltViewModelMap
Map<String, ViewModel> hiltViewModelMap();
}
private final Set<String> hiltViewModelKeys;
private final ViewModelProvider.Factory delegateFactory;
private final AbstractSavedStateViewModelFactory hiltViewModelFactory;
public HiltViewModelFactory(
@NonNull SavedStateRegistryOwner owner,
@Nullable Bundle defaultArgs,
@NonNull Set<String> hiltViewModelKeys,
@NonNull ViewModelProvider.Factory delegateFactory,
@NonNull ViewModelComponentBuilder viewModelComponentBuilder) {
this.hiltViewModelKeys = hiltViewModelKeys;
this.delegateFactory = delegateFactory;
this.hiltViewModelFactory =
new AbstractSavedStateViewModelFactory(owner, defaultArgs) {
@NonNull
@Override
@SuppressWarnings("unchecked")
protected <T extends ViewModel> T create(
@NonNull String key, @NonNull Class<T> modelClass, @NonNull SavedStateHandle handle) {
ViewModelComponent component =
viewModelComponentBuilder.savedStateHandle(handle).build();
Provider<? extends ViewModel> provider =
EntryPoints.get(component, ViewModelFactoriesEntryPoint.class)
.getHiltViewModelMap()
.get(modelClass.getName());
if (provider == null) {
throw new IllegalStateException(
"Expected the @HiltViewModel-annotated class '"
+ modelClass.getName()
+ "' to be available in the multi-binding of "
+ "@HiltViewModelMap but none was found.");
}
return (T) provider.get();
}
};
}
@NonNull
@Override
public <T extends ViewModel> T create(@NonNull Class<T> modelClass) {
if (hiltViewModelKeys.contains(modelClass.getName())) {
return hiltViewModelFactory.create(modelClass);
} else {
return delegateFactory.create(modelClass);
}
}
}
|
export default class HeckelDiff {
left: string[];
right: string[];
static executeDiff(oldTextArray: string[], newTextArray: string[]): {
oldText: (string | TextNode)[];
newText: (string | TextNode)[];
};
static diff(left: string[], right: string[]): ChangeRange[];
constructor(left: string[], right: string[]);
performDiff(): ChangeRange[];
getDifferences(changeData: ChangeData, uniquePositions: UniquePositions): ChangeData;
findNextChange(leftStartPos?: number, rightStartPos?: number): number[];
findPrevChange(leftLo: number, rightLo: number, leftHi: number, rightHi: number): number[];
mismatchOffset(lArr: string[], rArr: string[]): number;
identifyUniquePositions(): Array<UniquePositions>;
findUnique(array: string[]): Map<string, number>;
appendChangeRange(changesRanges: ChangeRange[], leftLo: number, leftHi: number, rightLo: number, rightHi: number): ChangeRange[];
}
export declare type UniquePositions = [number, number];
export declare class TextNode {
text: string;
low: number;
constructor(text: string, low: number);
}
export declare enum Action {
change = "change",
add = "add",
remove = "remove",
}
export declare class ChangeRange {
action: Action;
leftLo: number;
leftHi: number;
rightLo: number;
rightHi: number;
constructor(action: Action, leftLo: number, leftHi: number, rightLo: number, rightHi: number);
}
export declare class ChangeData {
leftChangePos: number;
rightChangePos: number;
changeRanges: ChangeRange[];
constructor(leftChangePos: number, rightChangePos: number, changeRanges: ChangeRange[]);
}
|
def sum_odd_numbers(lst):
result = 0
for num in lst:
if num % 2 == 1:
result += num
return result
result = sum_odd_numbers([2,4,6,7,11])
print(result) |
echo -e "1234567812345678\x6e\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff" | nc 127.0.0.1 9001 |
//
// ViewController.h
// TextMutationTests
//
// Created by <NAME> on 24/07/2015.
// Copyright (c) 2015 Department for Education and Child Development. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@property (weak, nonatomic) IBOutlet UITextView *textView;
@end
|
'use strict';
require('dotenv').config();
const cors = require('cors');
const express = require('express');
const superagent = require('superagent');
const PORT = process.env.PORT || 3000;
const pg = require('pg');
const methodOverride = require('method-override');
const app = express();
const client = new pg.Client(process.env.DATABASE_URL);
app.use(cors());
//brings in EJS
app.set('view engine', 'ejs');
app.use(express.urlencoded({ extended: true }));
app.use(methodOverride('_method'));
app.use(express.static('./public'));
app.get('/', handleIndexPage);
app.get('/searches', resultsFromAPI);
app.post('/save', saveThisMeme);
app.post('/caption', captionMeme);
app.get('/fav', handleFav);
app.delete('/delete/:id', deleteMeme);
app.get('/aboutus', aboutUs);
function handleFav(request, response) {
const SQL = `SELECT * FROM memes`;
client.query(SQL)
.then(results => {
response.status(200).render('pages/save', { memes: results.rows });
});
}
function handleIndexPage(request, response) {
response.status(200).render('pages/index');
}
function aboutUs(request, response) {
response.status(200).render('pages/aboutus')
}
function resultsFromAPI(request, response) {
let url = 'http://api.imgflip.com/get_memes';
superagent.get(url)
.then(results => {
let input = request.query.name
let meme = results.body.data.memes
let r = new RegExp(input, 'ig')
r === false ? response.status(404).render('pages/error') : false;
let filt = meme.filter(v => r.test(v.name))
let selection = filt.map(memes => new Memes(memes));
response.status(200).render('pages/searches/show', { meme: selection });
});
}
function captionMeme(request, response) {
const queryStringParams = {
username: process.env.IMGFLIP_API_USERNAME,
password: process.env.IMGFLIP_API_PASSWORD,
template_id: request.body.id,
boxes: [
{
"text": request.body.text0,
},
{
"text": request.body.text1,
},
{
"text": request.body.text2,
},
{
"text": request.body.text3,
},
{
"text": request.body.text4,
},
],
format: 'json',
limit: 1,
};
superagent.post('https://api.imgflip.com/caption_image')
.type('form')
.send(queryStringParams)
.then(results => {
let data = results.body.data.url;
response.status(200).render('pages/onememe', { data });
})
.catch(error => {
filt === null ? response.status(404).render('pages/error') : false;
console.error(error.message);
});
}
function saveThisMeme(request, response) {
let SQL = `
INSERT INTO memes (name, url, text0, text1)
VALUES($1, $2, $3, $4)
`;
let VALUES = [
request.body.name,
request.body.data,
request.body.text0,
request.body.text1,
];
client.query(SQL, VALUES)
.then(results => {
response.status(200).redirect('/fav');
})
.catch(error => {
console.error(error.message);
});
}
function Memes(data) {
this.name = data.name;
this.template_id = data.id;
this.url = data.url;
this.text0 = data.text0;
this.text1 = data.text1;
this.font = data.arial;
this.box_count = data.box_count
}
function deleteMeme(request, response) {
let id = request.params.id;
let SQL = `DELETE FROM memes WHERE id = $1`;
let VALUES = [id];
client.query(SQL, VALUES)
.then(results => {
response.status(200).redirect('/fav');
});
}
// This will force an error
app.get('/badthing', (request, response) => {
throw new Error('bad request???');
});
// 404 Handler
app.use('*', (request, response) => {
response.status(404).render('pages/error');
});
// Error Handler
app.use((err, request, response, next) => {
console.error(err);
response.status(500).render('pages/error', { err });
});
// Startup
function startServer() {
app.listen(PORT, () => console.log(`Server running on ${PORT}`));
}
//connecting the client to the databse//
client.connect()
.then(() => {
startServer(PORT);
})
.catch(err => console.error(err));
// superagent.post(urlGoesHere).send( {} ) … and that object is an object where you’d have the user/pass props
// In your server file you’d do that
|
<filename>lib/cesiumWorkerBootstrapper.js
/*global require,importScripts,self*/
"use strict";
importScripts('Cesium-WebWorkers.js');
self.onmessage = function(event) {
var data = event.data;
var worker = require(data.workerModule);
self.onmessage = worker;
};
|
import Template from "./../class/Template";
/**
* Class for the main template
*
* Any information requested by the template will be provided by this class
* as well as it's behaviour.
*/
export default class main extends Template {
// Set Meta Tags Information.
protected _title: String = "Aegis Framework";
protected _version: String = "NYMPH";
// Set what page and template should be used to render this template.
constructor () {
super ();
this.setPage("home.html");
this.setTemplate("main.html");
}
} |
#!/usr/bin/env bash
###############################################################################
# Ruby
###############################################################################
if [ -e "/usr/local/opt/ruby" ]; then
PATH=/usr/local/opt/ruby/bin:$PATH
LDFLAGS="-L/usr/local/opt/ruby/lib $LDFLAGS"
CPPFLAGS="-I/usr/local/opt/ruby/include $CPPFLAGS"
PKG_CONFIG_PATH="/usr/local/opt/ruby/lib/pkgconfig:$PKG_CONFIG_PATH"
fi
export PATH
# Install gems from Gemfile
gem install bundler -f |
/**
* Funkcia vráti kópiu vstupného reťazca v obrátenom poradí
* @method reverse
* @param text [text]
* @return [text reverse]
*/
char* reverse(const char* text);
/**
* Funkcia na šifrovanie textu pomocou Vénierovej šifry
* @method vigenere_encrypt
* @param {key} [@1]
* @param {text} [reťazec na zašifrovanie]
* @return [@2]
*
* @1: reťazec reprezentujúci kľúč použitý na zašifrovanie aj odšifrovanie textu.
Kľúč je reprezentovaný ako jedno slovo a môže pozostávať len zo znakov abecedy,
pričom na veľkosti písmen nezáleží
* @2: adresu kópie reťazca zašifrovaného pomocou Venierovej šifry alebo NULL,
ak zašifrovanie nebolo úspešné
*
*/
char* vigenere_encrypt(const char* key, const char* text);
/**
* Funkcia na dešifrovanie textu pomocou Vénierovej šifry
* @method vigenere_decrypt
* @param key [@1]
* @param text [reťazec na dešifrovanie]
* @return [@2]
*
* @1: reťazec reprezentujúci kľúč použitý na zašifrovanie aj odšifrovanie textu.
Kľúč je reprezentovaný ako jedno slovo a môže pozostávať len zo znakov abecedy,
pričom na veľkosti písmen nezáleží
* @2: vráti adresu kópie reťazca dešifrovaného pomocou Venierovej šifry alebo
NULL, dešifrovanie nebolo úspešné
*
*/
char* vigenere_decrypt(const char* key, const char* text);
/**
* Funkcia na šifrovanie po bitoch podľa nasledujúceho postupu:
* Znak, ktorý má byť zašifrovaný, sa rozdelí na polovicu (4 bity + 4 bity).
* Následne bity v prvej polovici sa rozdelia do párov a ich hodnoty v páre
sa navzájom vymenia. Takto vytvorená štvorica bitov sa použije pre operáciu
* XOR nad zvyšnými 4 bitmi.
*
* @method bit_encrypt
* @param text [reťazec na zašifrovanie]
* @return [Smerník na novovytvorený reťazec obsahujúci zašifrovaný vstupný text]
*/
char* bit_encrypt(const char* text);
/**
* Funkcia na dešifrovanie po bitoch - inverzná ku predchádzajúcej funkcii šifrovania (bit_encrypt).
* @method bit_decrypt
* @param text [reťazec na dešifrovanie]
* @return [Smerník na novovytvorený reťazec obsahujúci dešifrovaný text]
*/
char* bit_decrypt(const char* text);
/**
* Funkcia na zakódovanie textu prostredníctvom šifry BMP.
* Postup pri šifrovaní pomocou BMP:
1. vstupný reťazec najprv šifrujeme pomocou funkcie reverse()
2. získaný reťazec následne šifrujeme pomocou funkcie venier_encrypt()
3. na výsledný reťazec ešte aplikujeme funkciu bit_encrypt()
* @method bmp_encrypt
* @param key [@1]
* @param text [Reťazec na zašifrovanie resp. odšifrovanie]
* @return [@2]
*
* @1: Reťazec reprezentujúci kľúč použitý na zašifrovanie aj odšifrovanie textu.
Kľúč môže pozostávať len zo znakov, pričom na veľkosti nezáleží
* @2: Funkcia vráti adresu kópie reťazca zašifrovaného pomocou šifry BMP alebo
NULL, ak zašifrovanie nebolo úspešné
*
*/
char* bmp_encrypt(const char* key, const char* text);
/**
* Funkcia na dekódovanie textu prostredníctvom šifry BMP.
* Postup pri dešifrovaní pomocou BMP - opačný ako pri šifrovaní (bmp_encrypt).
* @method bmp_decrypt
* @param key [@1]
* @param text [Reťazec na dešifrovanie]
* @return [@2]
*
* @1: Reťazec reprezentujúci kľúč použitý na zašifrovanie aj odšifrovanie textu.
Kľúč môže pozostávať len zo znakov, pričom na veľkosti nezáleží
* @2: Funkcia vráti adresu kópie reťazca dešifrovaného pomocou šifry BMP alebo
NULL, ak dešifrovanie nebolo úspešné
*
*/
char* bmp_decrypt(const char* key, const char* text);
|
#!/usr/bin/env bash
# Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
SCRIPT_DIR=$(dirname "$0")
cd $SCRIPT_DIR
BUILD_DIR=$(cd ..; pwd)/build
trap cleanup 1 2 3 6
cleanup()
{
echo "Caught Signal ... cleaning up."
rm -rf $BUILD_DIR
cd $SCRIPT_DIR
rm -f ../source/daml/reference/base.rst
rm -f ../source/app-dev/grpc/proto-docs.rst
rm -f ../source/LICENSE
rm -f ../source/NOTICES
echo "Done cleanup ... quitting."
exit 1
}
rm -rf $BUILD_DIR
mkdir -p $BUILD_DIR/gen
ln -s ../source $BUILD_DIR
ln -s ../configs $BUILD_DIR
mkdir $BUILD_DIR/theme
bazel build //docs:theme
tar -zxf ../../bazel-bin/docs/da_theme.tar.gz -C $BUILD_DIR/theme
# License and Notices
cp ../../LICENSE ../source
cp ../../NOTICES ../source
for arg in "$@"
do
if [ "$arg" = "--pdf" ]; then
bazel build //docs:pdf-docs
mkdir -p $BUILD_DIR/gen/_downloads
cp -L ../../bazel-bin/docs/DigitalAssetSDK.pdf $BUILD_DIR/gen/_downloads
fi
if [ "$arg" = "--gen" ]; then
# Hoogle
bazel build //compiler/damlc:daml-base-hoogle-docs
mkdir -p $BUILD_DIR/gen/hoogle_db
cp -L ../../bazel-bin/compiler/damlc/daml-base-hoogle.txt $BUILD_DIR/gen/hoogle_db/base.txt
# Javadoc
bazel build //language-support/java:javadoc
mkdir -p $BUILD_DIR/gen/app-dev/bindings-java
unzip ../../bazel-bin/language-support/java/javadoc.jar -d $BUILD_DIR/gen/app-dev/bindings-java/javadocs/
# Proto-docs
bazel build //ledger-api/grpc-definitions:docs
cp -L ../../bazel-bin/ledger-api/grpc-definitions/proto-docs.rst ../source/app-dev/grpc/
#StdLib
bazel build //compiler/damlc:daml-base-rst-docs
cp -L ../../bazel-bin/compiler/damlc/daml-base.rst ../source/daml/reference/base.rst
fi
done
DATE=$(date +"%Y-%m-%d")
echo { \"$DATE\" : \"$DATE\" } > $BUILD_DIR/gen/versions.json
pipenv install
pipenv run sphinx-autobuild -c $BUILD_DIR/configs/html $BUILD_DIR/source $BUILD_DIR/gen
|
#!/binsh
module load R
module load plink
module load plink2
Rscript PRSice.R --dir . --prsice PRSice_linux --base MAGIC1000G_FG_SAS.nodup.QC.txt --target PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.final --no-default --chr chromosome --A1 effect_allele --A2 other_allele --snp variant --pvalue p_value --bp base_pair_location --beta --cov PAPG21-3_capstone1000G_merged.QC.COV.final --pheno PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.pheno.final --pheno-col T2D --ignore-fid --missing CENTER --thread 2 --stat beta --gtf misigD_genesets/Homo_sapiens.GRCh37.87.gtf --binary-target T --msigdb misigD_genesets/c7.all.v7.4.symbols.gmt --multi-plot 10 --out MAGIC_FG_PRSet_PAPG21-3_1000gSAS_GRCh37.c7ALLv74
Rscript PRSice.R --dir . --prsice PRSice_linux --base METAANALYSIS_DIAGRAM_SE1.BMI.MOD.nodup.QC.txt --extract DIAGRAM2017_META_PRSice_avg_PAPG21-3_1000gSAS_GRCh37.valid --target PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.final --no-default --chr CHR --A1 Allele1 --A2 Allele2 --snp SNP --chr-id c:l --pvalue P-value --bp BP --beta --cov PAPG21-3_capstone1000G_merged.QC.COV.final --pheno PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.pheno.final --pheno-col T2D --ignore-fid --missing CENTER --thread 2 --stat Effect --gtf misigD_genesets/Homo_sapiens.GRCh37.87.gtf --binary-target T --msigdb misigD_genesets/c7.all.v7.4.symbols.gmt --multi-plot 10 --out DIAGRAM_SE1_PRSet_PAPG21-3_1000gSAS_GRCh37.c7ALLv74
Rscript PRSice.R --dir . --prsice PRSice_linux --base T2D_TranEthnic.BMIadjusted.nodup.QC.final.txt --extract T2D_TE_PRSice_avg_PAPG21-3_1000gSAS_GRCh37.valid --target PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.final --no-default --chr CHR --A1 A1 --A2 A2 --snp SNP --chr-id c:l --pvalue P --bp BP --beta --cov PAPG21-3_capstone1000G_merged.QC.COV.final --pheno PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.pheno.final --pheno-col T2D --ignore-fid --missing CENTER --thread 2 --stat BETA --gtf misigD_genesets/Homo_sapiens.GRCh37.87.gtf --binary-target T --msigdb misigD_genesets/c7.all.v7.4.symbols.gmt --multi-plot 10 --out T2D_TranEthnic_PRSet_PAPG21-3_1000gSAS_GRCh37.c7ALLv74
Rscript PRSice.R --dir . --prsice PRSice_linux --base UKBB.GWAS1KG.EXOME.CAD.SOFT.META.PublicRelease.300517.nodup.QC.txt --target PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.final --no-default --chr chr --A1 effect_allele --A2 noneffect_allele --snp snptestid --pvalue p-value_gc --bp bp_hg19 --or --cov PAPG21-3_capstone1000G_merged.QC.COV.final --pheno PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.pheno.final --pheno-col T2D --ignore-fid --missing CENTER --thread 2 --stat logOR --gtf misigD_genesets/Homo_sapiens.GRCh37.87.gtf --binary-target T --msigdb misigD_genesets/c7.all.v7.4.symbols.gmt --multi-plot 10 --out UKBB_CAD_PRSet_PAPG21-3_1000gSAS_GRCh37.c7ALLv74
Rscript PRSice.R --dir . --prsice PRSice_linux --base UACR_formatted_20170020_DM-All-nstud_18-SumMac_400.tbl.ALLrsid.nodup.QC.txt --target PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.final --no-default --chr Chr --A1 Allele1 --A2 Allele2 --snp RSID --pvalue P-value --bp Pos_b37 --beta --cov PAPG21-3_capstone1000G_merged.QC.COV.final --pheno PAPG21-3_capstone_1000g_allphase3SAS_GRCh37.QC-merged_excludemissnp.pheno.final --pheno-col T2D --ignore-fid --missing CENTER --thread 2 --stat Effect --gtf misigD_genesets/Homo_sapiens.GRCh37.87.gtf --binary-target T --msigdb misigD_genesets/c7.all.v7.4.symbols.gmt --multi-plot 10 --out UACR_DM_PRSet_PAPG21-3_1000gSAS_GRCh37.c7ALLv74
|
#!/bin/bash
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
export NGINX_VERSION=1.15.8
export NDK_VERSION=0.3.1rc1
export SETMISC_VERSION=0.32
export MORE_HEADERS_VERSION=0.33
export NGINX_DIGEST_AUTH=274490cec649e7300fea97fed13d84e596bbc0ce
export NGINX_SUBSTITUTIONS=bc58cb11844bc42735bbaef7085ea86ace46d05b
export NGINX_OPENTRACING_VERSION=ea9994d7135be5ad2e3009d0f270e063b1fb3b21
export OPENTRACING_CPP_VERSION=1.5.0
export ZIPKIN_CPP_VERSION=0.5.2
export JAEGER_VERSION=ba0fa3fa6dbb01995d996f988a897e272100bf95
export MODSECURITY_VERSION=fc061a57a8b0abda79b17cbe103d78db803fa575
export LUA_NGX_VERSION=1c72f57ce87d4355d546a97c2bd8f5123a70db5c
export LUA_STREAM_NGX_VERSION=0.0.6rc2
export LUA_UPSTREAM_VERSION=0.07
export NGINX_INFLUXDB_VERSION=0e2cb6cbf850a29c81e44be9e33d9a15d45c50e8
export GEOIP2_VERSION=3.2
export NGINX_AJP_VERSION=bf6cd93f2098b59260de8d494f0f4b1f11a84627
export LUAJIT_VERSION=520d53a87dd44c637dddb6de313204211c2b212b
export BUILD_PATH=/tmp/build
ARCH=$(uname -m)
get_src()
{
hash="$1"
url="$2"
f=$(basename "$url")
curl -sSL "$url" -o "$f"
echo "$hash $f" | sha256sum -c - || exit 10
tar xzf "$f"
rm -rf "$f"
}
apt-get update && apt-get dist-upgrade -y
# install required packages to build
clean-install \
bash \
build-essential \
curl ca-certificates \
libgeoip1 \
libgeoip-dev \
patch \
libpcre3 \
libpcre3-dev \
libssl-dev \
zlib1g \
zlib1g-dev \
libaio1 \
libaio-dev \
openssl \
libperl-dev \
cmake \
util-linux \
lua5.1 liblua5.1-0 liblua5.1-dev \
lmdb-utils \
wget \
libcurl4-openssl-dev \
libprotobuf-dev protobuf-compiler \
libz-dev \
procps \
git g++ pkgconf flex bison doxygen libyajl-dev liblmdb-dev libtool dh-autoreconf libxml2 libpcre++-dev libxml2-dev \
lua-cjson \
python \
luarocks \
libmaxminddb-dev \
authbind \
dumb-init \
gdb \
valgrind \
bc \
|| exit 1
if [[ ${ARCH} == "x86_64" ]]; then
ln -s /usr/lib/x86_64-linux-gnu/liblua5.1.so /usr/lib/liblua.so
ln -s /usr/lib/x86_64-linux-gnu /usr/lib/lua-platform-path
fi
if [[ ${ARCH} == "aarch64" ]]; then
ln -s /usr/lib/aarch64-linux-gnu/liblua5.1.so /usr/lib/liblua.so
ln -s /usr/lib/aarch64-linux-gnu /usr/lib/lua-platform-path
fi
mkdir -p /etc/nginx
# Get the GeoIP data
GEOIP_FOLDER=/etc/nginx/geoip
mkdir -p $GEOIP_FOLDER
function geoip2_get {
wget -O $GEOIP_FOLDER/$1.tar.gz $2 || { echo "Could not download $1, exiting." ; exit 1; }
mkdir $GEOIP_FOLDER/$1 \
&& tar xf $GEOIP_FOLDER/$1.tar.gz -C $GEOIP_FOLDER/$1 --strip-components 1 \
&& mv $GEOIP_FOLDER/$1/$1.mmdb $GEOIP_FOLDER/$1.mmdb \
&& rm -rf $GEOIP_FOLDER/$1 \
&& rm -rf $GEOIP_FOLDER/$1.tar.gz
}
geoip2_get "GeoLite2-City" "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz"
geoip2_get "GeoLite2-ASN" "http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz"
mkdir --verbose -p "$BUILD_PATH"
cd "$BUILD_PATH"
# download, verify and extract the source files
get_src a8bdafbca87eb99813ae4fcac1ad0875bf725ce19eb265d28268c309b2b40787 \
"https://nginx.org/download/nginx-$NGINX_VERSION.tar.gz"
get_src 49f50d4cd62b166bc1aaf712febec5e028d9f187cedbc27a610dfd01bdde2d36 \
"https://github.com/simpl/ngx_devel_kit/archive/v$NDK_VERSION.tar.gz"
get_src f1ad2459c4ee6a61771aa84f77871f4bfe42943a4aa4c30c62ba3f981f52c201 \
"https://github.com/openresty/set-misc-nginx-module/archive/v$SETMISC_VERSION.tar.gz"
get_src a3dcbab117a9c103bc1ea5200fc00a7b7d2af97ff7fd525f16f8ac2632e30fbf \
"https://github.com/openresty/headers-more-nginx-module/archive/v$MORE_HEADERS_VERSION.tar.gz"
get_src ede0ad490cb9dd69da348bdea2a60a4c45284c9777b2f13fa48394b6b8e7671c \
"https://github.com/atomx/nginx-http-auth-digest/archive/$NGINX_DIGEST_AUTH.tar.gz"
get_src 618551948ab14cac51d6e4ad00452312c7b09938f59ebff4f93875013be31f2d \
"https://github.com/yaoweibin/ngx_http_substitutions_filter_module/archive/$NGINX_SUBSTITUTIONS.tar.gz"
get_src 343b4293ca0d4afa55bf1ab54c866766043b2585b6ce81467d3d3e25987fc186 \
"https://github.com/opentracing-contrib/nginx-opentracing/archive/$NGINX_OPENTRACING_VERSION.tar.gz"
get_src 4455ca507936bc4b658ded10a90d8ebbbd61c58f06207be565a4ffdc885687b5 \
"https://github.com/opentracing/opentracing-cpp/archive/v$OPENTRACING_CPP_VERSION.tar.gz"
get_src 30affaf0f3a84193f7127cc0135da91773ce45d902414082273dae78914f73df \
"https://github.com/rnburn/zipkin-cpp-opentracing/archive/v$ZIPKIN_CPP_VERSION.tar.gz"
get_src 073deba39f74eff81da917907465e1343c89b335244349d3d3b4ae9331de86f2 \
"https://github.com/SpiderLabs/ModSecurity-nginx/archive/$MODSECURITY_VERSION.tar.gz"
get_src b68286966f292fb552511b71bd8bc11af8f12c8aa760372d1437ac8760cb2f25 \
"https://github.com/jaegertracing/jaeger-client-cpp/archive/$JAEGER_VERSION.tar.gz"
get_src 6c8a2792222f6bfad927840bf64cb890466fcca703a0133cbde0e5b808461279 \
"https://github.com/openresty/lua-nginx-module/archive/$LUA_NGX_VERSION.tar.gz"
get_src 5420dbf59bac52cef8021658d7eae1667a2bd14dda23602c985cae2604de77dd \
"https://github.com/openresty/stream-lua-nginx-module/archive/v$LUA_STREAM_NGX_VERSION.tar.gz"
get_src 2a69815e4ae01aa8b170941a8e1a10b6f6a9aab699dee485d58f021dd933829a \
"https://github.com/openresty/lua-upstream-nginx-module/archive/v$LUA_UPSTREAM_VERSION.tar.gz"
get_src 2349dd0b7ee37680306ee76bc4b6bf5c7509a4a4be16d246d9bbff44f564e4a0 \
"https://github.com/openresty/lua-resty-lrucache/archive/v0.08.tar.gz"
get_src bc9a00f4dd6dd3928c6e878dc84fa7a1073d5a65900cd77a5c1c7ce2d863b22a \
"https://github.com/openresty/lua-resty-core/archive/v0.1.16rc3.tar.gz"
get_src eaf84f58b43289c1c3e0442ada9ed40406357f203adc96e2091638080cb8d361 \
"https://github.com/openresty/lua-resty-lock/archive/v0.07.tar.gz"
get_src 3917d506e2d692088f7b4035c589cc32634de4ea66e40fc51259fbae43c9258d \
"https://github.com/hamishforbes/lua-resty-iputils/archive/v0.3.0.tar.gz"
get_src 5d16e623d17d4f42cc64ea9cfb69ca960d313e12f5d828f785dd227cc483fcbd \
"https://github.com/openresty/lua-resty-upload/archive/v0.10.tar.gz"
get_src 4aca34f324d543754968359672dcf5f856234574ee4da360ce02c778d244572a \
"https://github.com/openresty/lua-resty-dns/archive/v0.21.tar.gz"
get_src 095615fe94e64615c4a27f4f4475b91c047cf8d10bc2dbde8d5ba6aa625fc5ab \
"https://github.com/openresty/lua-resty-string/archive/v0.11.tar.gz"
get_src a77bf0d7cf6a9ba017d0dc973b1a58f13e48242dd3849c5e99c07d250667c44c \
"https://github.com/openresty/lua-resty-balancer/archive/v0.02rc4.tar.gz"
get_src d81b33129c6fb5203b571fa4d8394823bf473d8872c0357a1d0f14420b1483bd \
"https://github.com/cloudflare/lua-resty-cookie/archive/v0.1.0.tar.gz"
get_src d04df883adb86c96a8e0fe6c404851b9c776840dbb524419c06ae3fac42c4e64 \
"https://github.com/openresty/luajit2/archive/$LUAJIT_VERSION.tar.gz"
get_src c673fcee37c1c4794f921b6710b09e8a0e1e58117aa788f798507d033f737192 \
"https://github.com/influxdata/nginx-influxdb-module/archive/$NGINX_INFLUXDB_VERSION.tar.gz"
get_src 15bd1005228cf2c869a6f09e8c41a6aaa6846e4936c473106786ae8ac860fab7 \
"https://github.com/leev/ngx_http_geoip2_module/archive/$GEOIP2_VERSION.tar.gz"
get_src 5f629a50ba22347c441421091da70fdc2ac14586619934534e5a0f8a1390a950 \
"https://github.com/yaoweibin/nginx_ajp_module/archive/$NGINX_AJP_VERSION.tar.gz"
# improve compilation times
CORES=$(($(grep -c ^processor /proc/cpuinfo) - 0))
export MAKEFLAGS=-j${CORES}
export CTEST_BUILD_FLAGS=${MAKEFLAGS}
export HUNTER_JOBS_NUMBER=${CORES}
export HUNTER_KEEP_PACKAGE_SOURCES=false
export HUNTER_USE_CACHE_SERVERS=true
# Install luajit from openresty fork
export LUAJIT_LIB=/usr/local/lib
export LUA_LIB_DIR="$LUAJIT_LIB/lua"
cd "$BUILD_PATH/luajit2-$LUAJIT_VERSION"
make CCDEBUG=-g
make install
export LUAJIT_INC=/usr/local/include/luajit-2.1
# Installing luarocks packages
if [[ ${ARCH} == "x86_64" ]]; then
export PCRE_DIR=/usr/lib/x86_64-linux-gnu
fi
if [[ ${ARCH} == "aarch64" ]]; then
export PCRE_DIR=/usr/lib/aarch64-linux-gnu
fi
cd "$BUILD_PATH"
luarocks install lrexlib-pcre 2.7.2-1 PCRE_LIBDIR=${PCRE_DIR}
cd "$BUILD_PATH/lua-resty-core-0.1.16rc3"
make install
cd "$BUILD_PATH/lua-resty-lrucache-0.08"
make install
cd "$BUILD_PATH/lua-resty-lock-0.07"
make install
cd "$BUILD_PATH/lua-resty-iputils-0.3.0"
make install
cd "$BUILD_PATH/lua-resty-upload-0.10"
make install
cd "$BUILD_PATH/lua-resty-dns-0.21"
make install
cd "$BUILD_PATH/lua-resty-string-0.11"
make install
cd "$BUILD_PATH/lua-resty-balancer-0.02rc4"
make all
make install
cd "$BUILD_PATH/lua-resty-cookie-0.1.0"
make install
# build and install lua-resty-waf with dependencies
/install_lua_resty_waf.sh
# install openresty-gdb-utils
cd /
git clone --depth=1 https://github.com/openresty/openresty-gdb-utils.git
cat > ~/.gdbinit << EOF
directory /openresty-gdb-utils
py import sys
py sys.path.append("/openresty-gdb-utils")
source luajit20.gdb
source ngx-lua.gdb
source luajit21.py
source ngx-raw-req.py
set python print-stack full
EOF
# build opentracing lib
cd "$BUILD_PATH/opentracing-cpp-$OPENTRACING_CPP_VERSION"
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_CXX_FLAGS="-fPIC" \
-DBUILD_TESTING=OFF \
-DBUILD_MOCKTRACER=OFF \
..
make
make install
# build jaeger lib
cd "$BUILD_PATH/jaeger-client-cpp-$JAEGER_VERSION"
sed -i 's/-Werror/-Wno-psabi/' CMakeLists.txt
cat <<EOF > export.map
{
global:
OpenTracingMakeTracerFactory;
local: *;
};
EOF
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DJAEGERTRACING_BUILD_EXAMPLES=OFF \
-DJAEGERTRACING_BUILD_CROSSDOCK=OFF \
-DJAEGERTRACING_COVERAGE=OFF \
-DJAEGERTRACING_PLUGIN=ON \
-DHUNTER_CONFIGURATION_TYPES=Release \
-DJAEGERTRACING_WITH_YAML_CPP=ON ..
make
make install
export HUNTER_INSTALL_DIR=$(cat _3rdParty/Hunter/install-root-dir) \
mv libjaegertracing_plugin.so /usr/local/lib/libjaegertracing_plugin.so
# build zipkin lib
cd "$BUILD_PATH/zipkin-cpp-opentracing-$ZIPKIN_CPP_VERSION"
cat <<EOF > export.map
{
global:
OpenTracingMakeTracerFactory;
local: *;
};
EOF
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=ON \
-DBUILD_PLUGIN=ON \
-DBUILD_TESTING=OFF ..
make
make install
# Get Brotli source and deps
cd "$BUILD_PATH"
git clone --depth=1 https://github.com/google/ngx_brotli.git
cd ngx_brotli
git submodule init
git submodule update
# build modsecurity library
cd "$BUILD_PATH"
git clone -b v3/master --single-branch https://github.com/SpiderLabs/ModSecurity
cd ModSecurity/
git checkout 9ada0a28c8100f905014c128b0e6d11dd75ec7e5
git submodule init
git submodule update
sh build.sh
./configure --disable-doxygen-doc --disable-examples --disable-dependency-tracking
make
make install
mkdir -p /etc/nginx/modsecurity
cp modsecurity.conf-recommended /etc/nginx/modsecurity/modsecurity.conf
cp unicode.mapping /etc/nginx/modsecurity/unicode.mapping
# Download owasp modsecurity crs
cd /etc/nginx/
git clone -b v3.0/master --single-branch https://github.com/SpiderLabs/owasp-modsecurity-crs
cd owasp-modsecurity-crs
git checkout a216353c97dd6ef767a6db4dbf9b724627811c9b
mv crs-setup.conf.example crs-setup.conf
mv rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf.example rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
mv rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
cd ..
# OWASP CRS v3 rules
echo "
Include /etc/nginx/owasp-modsecurity-crs/crs-setup.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-901-INITIALIZATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-903.9001-DRUPAL-EXCLUSION-RULES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-903.9002-WORDPRESS-EXCLUSION-RULES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-905-COMMON-EXCEPTIONS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-910-IP-REPUTATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-911-METHOD-ENFORCEMENT.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-912-DOS-PROTECTION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-913-SCANNER-DETECTION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-920-PROTOCOL-ENFORCEMENT.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-921-PROTOCOL-ATTACK.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-930-APPLICATION-ATTACK-LFI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-931-APPLICATION-ATTACK-RFI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-932-APPLICATION-ATTACK-RCE.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-933-APPLICATION-ATTACK-PHP.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-941-APPLICATION-ATTACK-XSS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-942-APPLICATION-ATTACK-SQLI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-949-BLOCKING-EVALUATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-950-DATA-LEAKAGES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-951-DATA-LEAKAGES-SQL.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-952-DATA-LEAKAGES-JAVA.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-953-DATA-LEAKAGES-PHP.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-954-DATA-LEAKAGES-IIS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-959-BLOCKING-EVALUATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-980-CORRELATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
" > /etc/nginx/owasp-modsecurity-crs/nginx-modsecurity.conf
# build nginx
cd "$BUILD_PATH/nginx-$NGINX_VERSION"
# apply Nginx patches
patch -p1 < /patches/openresty-ssl_cert_cb_yield.patch
WITH_FLAGS="--with-debug \
--with-compat \
--with-pcre-jit \
--with-http_ssl_module \
--with-http_stub_status_module \
--with-http_realip_module \
--with-http_auth_request_module \
--with-http_addition_module \
--with-http_dav_module \
--with-http_geoip_module \
--with-http_gzip_static_module \
--with-http_sub_module \
--with-http_v2_module \
--with-stream \
--with-stream_ssl_module \
--with-stream_ssl_preread_module \
--with-threads \
--with-http_secure_link_module \
--with-http_gunzip_module"
if [[ ${ARCH} != "aarch64" ]]; then
WITH_FLAGS+=" --with-file-aio"
fi
# "Combining -flto with -g is currently experimental and expected to produce unexpected results."
# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
CC_OPT="-g -Og -fPIE -fstack-protector-strong \
-Wformat \
-Werror=format-security \
-Wno-deprecated-declarations \
-fno-strict-aliasing \
-D_FORTIFY_SOURCE=2 \
--param=ssp-buffer-size=4 \
-DTCP_FASTOPEN=23 \
-fPIC \
-I$HUNTER_INSTALL_DIR/include \
-Wno-cast-function-type"
LD_OPT="-fPIE -fPIC -pie -Wl,-z,relro -Wl,-z,now -L$HUNTER_INSTALL_DIR/lib"
if [[ ${ARCH} == "x86_64" ]]; then
CC_OPT+=' -m64 -mtune=native'
fi
WITH_MODULES="--add-module=$BUILD_PATH/ngx_devel_kit-$NDK_VERSION \
--add-module=$BUILD_PATH/set-misc-nginx-module-$SETMISC_VERSION \
--add-module=$BUILD_PATH/headers-more-nginx-module-$MORE_HEADERS_VERSION \
--add-module=$BUILD_PATH/nginx-http-auth-digest-$NGINX_DIGEST_AUTH \
--add-module=$BUILD_PATH/ngx_http_substitutions_filter_module-$NGINX_SUBSTITUTIONS \
--add-module=$BUILD_PATH/lua-nginx-module-$LUA_NGX_VERSION \
--add-module=$BUILD_PATH/stream-lua-nginx-module-$LUA_STREAM_NGX_VERSION \
--add-module=$BUILD_PATH/lua-upstream-nginx-module-$LUA_UPSTREAM_VERSION \
--add-module=$BUILD_PATH/nginx-influxdb-module-$NGINX_INFLUXDB_VERSION \
--add-dynamic-module=$BUILD_PATH/nginx-opentracing-$NGINX_OPENTRACING_VERSION/opentracing \
--add-dynamic-module=$BUILD_PATH/ModSecurity-nginx-$MODSECURITY_VERSION \
--add-dynamic-module=$BUILD_PATH/ngx_http_geoip2_module-${GEOIP2_VERSION} \
--add-module=$BUILD_PATH/nginx_ajp_module-${NGINX_AJP_VERSION} \
--add-module=$BUILD_PATH/ngx_brotli"
./configure \
--prefix=/usr/share/nginx \
--conf-path=/etc/nginx/nginx.conf \
--modules-path=/etc/nginx/modules \
--http-log-path=/var/log/nginx/access.log \
--error-log-path=/var/log/nginx/error.log \
--lock-path=/var/lock/nginx.lock \
--pid-path=/run/nginx.pid \
--http-client-body-temp-path=/var/lib/nginx/body \
--http-fastcgi-temp-path=/var/lib/nginx/fastcgi \
--http-proxy-temp-path=/var/lib/nginx/proxy \
--http-scgi-temp-path=/var/lib/nginx/scgi \
--http-uwsgi-temp-path=/var/lib/nginx/uwsgi \
${WITH_FLAGS} \
--without-mail_pop3_module \
--without-mail_smtp_module \
--without-mail_imap_module \
--without-http_uwsgi_module \
--without-http_scgi_module \
--with-cc-opt="${CC_OPT}" \
--with-ld-opt="${LD_OPT}" \
--user=www-data \
--group=www-data \
${WITH_MODULES}
make || exit 1
make install || exit 1
echo "Cleaning..."
cd /
mv /usr/share/nginx/sbin/nginx /usr/sbin
apt-mark unmarkauto \
bash \
curl ca-certificates \
libgeoip1 \
libpcre3 \
zlib1g \
libaio1 \
gdb \
geoip-bin \
libyajl2 liblmdb0 libxml2 libpcre++ \
gzip \
openssl
apt-get remove -y --purge \
build-essential \
libgeoip-dev \
libpcre3-dev \
libssl-dev \
zlib1g-dev \
libaio-dev \
linux-libc-dev \
cmake \
wget \
patch \
protobuf-compiler \
python \
xz-utils \
bc \
git g++ pkgconf flex bison doxygen libyajl-dev liblmdb-dev libgeoip-dev libtool dh-autoreconf libpcre++-dev libxml2-dev
apt-get autoremove -y
rm -rf "$BUILD_PATH"
rm -Rf /usr/share/man /usr/share/doc
rm -rf /tmp/* /var/tmp/*
rm -rf /var/lib/apt/lists/*
rm -rf /var/cache/apt/archives/*
rm -rf /usr/local/modsecurity/bin
rm -rf /usr/local/modsecurity/include
rm -rf /usr/local/modsecurity/lib/libmodsecurity.a
rm -rf /root/.cache
rm -rf /etc/nginx/owasp-modsecurity-crs/.git
rm -rf /etc/nginx/owasp-modsecurity-crs/util/regression-tests
rm -rf $HOME/.hunter
# move geoip directory
mv /geoip/* /etc/nginx/geoip
rm -rf /geoip
# update image permissions
writeDirs=( \
/etc/nginx \
/var/lib/nginx \
/var/log/nginx \
/opt/modsecurity/var/log \
/opt/modsecurity/var/upload \
/opt/modsecurity/var/audit \
);
for dir in "${writeDirs[@]}"; do
mkdir -p ${dir};
chown -R www-data.www-data ${dir};
done
for value in {1..1023};do
touch /etc/authbind/byport/$value
chown www-data /etc/authbind/byport/$value
chmod 755 /etc/authbind/byport/$value
done
|
package com.lzh.router.replugin.update;
import android.content.Context;
import android.content.Intent;
import android.widget.Toast;
import com.qihoo360.replugin.RePlugin;
import com.qihoo360.replugin.model.PluginInfo;
import org.lzh.framework.updatepluginlib.base.InstallStrategy;
import org.lzh.framework.updatepluginlib.model.Update;
/**
* 定制下载成功后安装操作。下载后进行安装并重启plugin intent.
*/
class RePluginInstall implements InstallStrategy {
private String pluginName;
private Context context;
private Intent intent;
RePluginInstall(String pluginName, Context context, Intent intent) {
this.pluginName = pluginName;
this.context = context;
this.intent = intent;
}
@Override
public void install(Context context, String filename, Update update) {
PluginInfo info = RePlugin.install(filename);
if (!info.getAlias().equals(pluginName)) {
// 校验是否插件的别名能匹配上。将不正确的卸载掉
Toast.makeText(context, String.format("install plugin failed: need alias for %s but is %s", pluginName, info.getAlias()), Toast.LENGTH_SHORT).show();
RePlugin.uninstall(info.getAlias());
} else {
RePlugin.startActivity(this.context, intent);
}
}
}
|
/* ***** BEGIN LICENSE BLOCK *****
* Distributed under the BSD license:
*
* Copyright (c) 2010, Ajax.org B.V.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Ajax.org B.V. nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ***** END LICENSE BLOCK ***** */
define(function(require, exports, module) {
"use strict";
var oop = require("./lib/oop");
var lang = require("./lib/lang");
var config = require("./config");
var EventEmitter = require("./lib/event_emitter").EventEmitter;
var Selection = require("./selection").Selection;
var TextMode = require("./mode/text").Mode;
var Range = require("./range").Range;
var Document = require("./document").Document;
var BackgroundTokenizer = require("./background_tokenizer").BackgroundTokenizer;
var SearchHighlight = require("./search_highlight").SearchHighlight;
/**
* Stores all the data about [[Editor `Editor`]] state providing easy way to change editors state.
*
* `EditSession` can be attached to only one [[Document `Document`]]. Same `Document` can be attached to several `EditSession`s.
* @class EditSession
**/
//{ events
/**
*
* Emitted when the document changes.
* @event change
* @param {Object} e An object containing a `delta` of information about the change.
**/
/**
* Emitted when the tab size changes, via [[EditSession.setTabSize]].
*
* @event changeTabSize
**/
/**
* Emitted when the ability to overwrite text changes, via [[EditSession.setOverwrite]].
*
* @event changeOverwrite
**/
/**
* Emitted when the gutter changes, either by setting or removing breakpoints, or when the gutter decorations change.
*
* @event changeBreakpoint
**/
/**
* Emitted when a front marker changes.
*
* @event changeFrontMarker
**/
/**
* Emitted when a back marker changes.
*
* @event changeBackMarker
**/
/**
* Emitted when an annotation changes, like through [[EditSession.setAnnotations]].
*
* @event changeAnnotation
**/
/**
* Emitted when a background tokenizer asynchronously processes new rows.
* @event tokenizerUpdate
*
* @param {Object} e An object containing one property, `"data"`, that contains information about the changing rows
*
**/
/**
* Emitted when the current mode changes.
*
* @event changeMode
*
**/
/**
* Emitted when the wrap mode changes.
*
* @event changeWrapMode
*
**/
/**
* Emitted when the wrapping limit changes.
*
* @event changeWrapLimit
*
**/
/**
* Emitted when a code fold is added or removed.
*
* @event changeFold
*
**/
/**
* Emitted when the scroll top changes.
* @event changeScrollTop
*
* @param {Number} scrollTop The new scroll top value
**/
/**
* Emitted when the scroll left changes.
* @event changeScrollLeft
*
* @param {Number} scrollLeft The new scroll left value
**/
//}
/**
*
* Sets up a new `EditSession` and associates it with the given `Document` and `TextMode`.
* @param {Document | String} text [If `text` is a `Document`, it associates the `EditSession` with it. Otherwise, a new `Document` is created, with the initial text]{: #textParam}
* @param {TextMode} mode [The inital language mode to use for the document]{: #modeParam}
*
* @constructor
**/
var EditSession = function(text, mode) {
this.$breakpoints = [];
this.$decorations = [];
this.$frontMarkers = {};
this.$backMarkers = {};
this.$markerId = 1;
this.$undoSelect = true;
this.$foldData = [];
this.$foldData.toString = function() {
return this.join("\n");
}
this.on("changeFold", this.onChangeFold.bind(this));
this.$onChange = this.onChange.bind(this);
if (typeof text != "object" || !text.getLine)
text = new Document(text);
this.setDocument(text);
this.selection = new Selection(this);
config.resetOptions(this);
this.setMode(mode);
config._emit("session", this);
};
(function() {
oop.implement(this, EventEmitter);
/**
* Sets the `EditSession` to point to a new `Document`. If a `BackgroundTokenizer` exists, it also points to `doc`.
*
* @param {Document} doc The new `Document` to use
*
**/
this.setDocument = function(doc) {
if (this.doc)
this.doc.removeListener("change", this.$onChange);
this.doc = doc;
doc.on("change", this.$onChange);
if (this.bgTokenizer)
this.bgTokenizer.setDocument(this.getDocument());
this.resetCaches();
};
/**
* Returns the `Document` associated with this session.
* @return {Document}
**/
this.getDocument = function() {
return this.doc;
};
/**
* @param {Number} row The row to work with
*
**/
this.$resetRowCache = function(docRow) {
if (!docRow) {
this.$docRowCache = [];
this.$screenRowCache = [];
return;
}
var l = this.$docRowCache.length;
var i = this.$getRowCacheIndex(this.$docRowCache, docRow) + 1;
if (l > i) {
this.$docRowCache.splice(i, l);
this.$screenRowCache.splice(i, l);
}
};
this.$getRowCacheIndex = function(cacheArray, val) {
var low = 0;
var hi = cacheArray.length - 1;
while (low <= hi) {
var mid = (low + hi) >> 1;
var c = cacheArray[mid];
if (val > c)
low = mid + 1;
else if (val < c)
hi = mid - 1;
else
return mid;
}
return low -1;
};
this.resetCaches = function() {
this.$modified = true;
this.$wrapData = [];
this.$rowLengthCache = [];
this.$resetRowCache(0);
if (this.bgTokenizer)
this.bgTokenizer.start(0);
};
this.onChangeFold = function(e) {
var fold = e.data;
this.$resetRowCache(fold.start.row);
};
this.onChange = function(e) {
var delta = e.data;
this.$modified = true;
this.$resetRowCache(delta.range.start.row);
var removedFolds = this.$updateInternalDataOnChange(e);
if (!this.$fromUndo && this.$undoManager && !delta.ignore) {
this.$deltasDoc.push(delta);
if (removedFolds && removedFolds.length != 0) {
this.$deltasFold.push({
action: "removeFolds",
folds: removedFolds
});
}
this.$informUndoManager.schedule();
}
this.bgTokenizer.$updateOnChange(delta);
this._emit("change", e);
};
/**
* Sets the session text.
* @param {String} text The new text to place
*
*
*
**/
this.setValue = function(text) {
this.doc.setValue(text);
this.selection.moveCursorTo(0, 0);
this.selection.clearSelection();
this.$resetRowCache(0);
this.$deltas = [];
this.$deltasDoc = [];
this.$deltasFold = [];
this.getUndoManager().reset();
};
/**
* Returns the current [[Document `Document`]] as a string.
* @method toString
* @returns {String}
* @alias EditSession.getValue
*
**/
/**
* Returns the current [[Document `Document`]] as a string.
* @method getValue
* @returns {String}
* @alias EditSession.toString
**/
this.getValue =
this.toString = function() {
return this.doc.getValue();
};
/**
* Returns the string of the current selection.
**/
this.getSelection = function() {
return this.selection;
};
/**
* {:BackgroundTokenizer.getState}
* @param {Number} row The row to start at
*
* @related BackgroundTokenizer.getState
**/
this.getState = function(row) {
return this.bgTokenizer.getState(row);
};
/**
* Starts tokenizing at the row indicated. Returns a list of objects of the tokenized rows.
* @param {Number} row The row to start at
*
*
*
**/
this.getTokens = function(row) {
return this.bgTokenizer.getTokens(row);
};
/**
* Returns an object indicating the token at the current row. The object has two properties: `index` and `start`.
* @param {Number} row The row number to retrieve from
* @param {Number} column The column number to retrieve from
*
*
**/
this.getTokenAt = function(row, column) {
var tokens = this.bgTokenizer.getTokens(row);
var token, c = 0;
if (column == null) {
i = tokens.length - 1;
c = this.getLine(row).length;
} else {
for (var i = 0; i < tokens.length; i++) {
c += tokens[i].value.length;
if (c >= column)
break;
}
}
token = tokens[i];
if (!token)
return null;
token.index = i;
token.start = c - token.value.length;
return token;
};
/**
* Sets the undo manager.
* @param {UndoManager} undoManager The new undo manager
*
*
**/
this.setUndoManager = function(undoManager) {
this.$undoManager = undoManager;
this.$deltas = [];
this.$deltasDoc = [];
this.$deltasFold = [];
if (this.$informUndoManager)
this.$informUndoManager.cancel();
if (undoManager) {
var self = this;
this.$syncInformUndoManager = function() {
self.$informUndoManager.cancel();
if (self.$deltasFold.length) {
self.$deltas.push({
group: "fold",
deltas: self.$deltasFold
});
self.$deltasFold = [];
}
if (self.$deltasDoc.length) {
self.$deltas.push({
group: "doc",
deltas: self.$deltasDoc
});
self.$deltasDoc = [];
}
if (self.$deltas.length > 0) {
undoManager.execute({
action: "aceupdate",
args: [self.$deltas, self]
});
}
self.$deltas = [];
}
this.$informUndoManager = lang.delayedCall(this.$syncInformUndoManager);
}
};
/**
* starts a new group in undo history
**/
this.markUndoGroup = function() {
if (this.$syncInformUndoManager)
this.$syncInformUndoManager();
};
this.$defaultUndoManager = {
undo: function() {},
redo: function() {},
reset: function() {}
};
/**
* Returns the current undo manager.
**/
this.getUndoManager = function() {
return this.$undoManager || this.$defaultUndoManager;
};
/**
* Returns the current value for tabs. If the user is using soft tabs, this will be a series of spaces (defined by [[EditSession.getTabSize `getTabSize()`]]); otherwise it's simply `'\t'`.
**/
this.getTabString = function() {
if (this.getUseSoftTabs()) {
return lang.stringRepeat(" ", this.getTabSize());
} else {
return "\t";
}
};
/**
/**
* Pass `true` to enable the use of soft tabs. Soft tabs means you're using spaces instead of the tab character (`'\t'`).
* @param {Boolean} useSoftTabs Value indicating whether or not to use soft tabs
**/
this.setUseSoftTabs = function(val) {
this.setOption("useSoftTabs", val);
};
/**
* Returns `true` if soft tabs are being used, `false` otherwise.
* @returns {Boolean}
**/
this.getUseSoftTabs = function() {
return this.$useSoftTabs;
};
/**
* Set the number of spaces that define a soft tab; for example, passing in `4` transforms the soft tabs to be equivalent to four spaces. This function also emits the `changeTabSize` event.
* @param {Number} tabSize The new tab size
**/
this.setTabSize = function(tabSize) {
this.setOption("tabSize", tabSize)
};
/**
* Returns the current tab size.
**/
this.getTabSize = function() {
return this.$tabSize;
};
/**
* Returns `true` if the character at the position is a soft tab.
* @param {Object} position The position to check
*
*
**/
this.isTabStop = function(position) {
return this.$useSoftTabs && (position.column % this.$tabSize == 0);
};
this.$overwrite = false;
/**
* Pass in `true` to enable overwrites in your session, or `false` to disable.
*
* If overwrites is enabled, any text you enter will type over any text after it. If the value of `overwrite` changes, this function also emites the `changeOverwrite` event.
*
* @param {Boolean} overwrite Defines wheter or not to set overwrites
*
*
**/
this.setOverwrite = function(overwrite) {
this.setOption("overwrite", overwrite)
};
/**
* Returns `true` if overwrites are enabled; `false` otherwise.
**/
this.getOverwrite = function() {
return this.$overwrite;
};
/**
* Sets the value of overwrite to the opposite of whatever it currently is.
**/
this.toggleOverwrite = function() {
this.setOverwrite(!this.$overwrite);
};
/**
* Adds `className` to the `row`, to be used for CSS stylings and whatnot.
* @param {Number} row The row number
* @param {String} className The class to add
*
*
**/
this.addGutterDecoration = function(row, className) {
if (!this.$decorations[row])
this.$decorations[row] = "";
this.$decorations[row] += " " + className;
this._emit("changeBreakpoint", {});
};
/**
* Removes `className` from the `row`.
* @param {Number} row The row number
* @param {String} className The class to add
*
*
**/
this.removeGutterDecoration = function(row, className) {
this.$decorations[row] = (this.$decorations[row] || "").replace(" " + className, "");
this._emit("changeBreakpoint", {});
};
/**
* Returns an array of numbers, indicating which rows have breakpoints.
* @returns {[Number]}
**/
this.getBreakpoints = function() {
return this.$breakpoints;
};
/**
* Sets a breakpoint on every row number given by `rows`. This function also emites the `'changeBreakpoint'` event.
* @param {Array} rows An array of row indices
*
*
*
**/
this.setBreakpoints = function(rows) {
this.$breakpoints = [];
for (var i=0; i<rows.length; i++) {
this.$breakpoints[rows[i]] = "ace_breakpoint";
}
this._emit("changeBreakpoint", {});
};
/**
* Removes all breakpoints on the rows. This function also emites the `'changeBreakpoint'` event.
**/
this.clearBreakpoints = function() {
this.$breakpoints = [];
this._emit("changeBreakpoint", {});
};
/**
* Sets a breakpoint on the row number given by `rows`. This function also emites the `'changeBreakpoint'` event.
* @param {Number} row A row index
* @param {String} className Class of the breakpoint
*
*
**/
this.setBreakpoint = function(row, className) {
if (className === undefined)
className = "ace_breakpoint";
if (className)
this.$breakpoints[row] = className;
else
delete this.$breakpoints[row];
this._emit("changeBreakpoint", {});
};
/**
* Removes a breakpoint on the row number given by `rows`. This function also emites the `'changeBreakpoint'` event.
* @param {Number} row A row index
*
*
**/
this.clearBreakpoint = function(row) {
delete this.$breakpoints[row];
this._emit("changeBreakpoint", {});
};
/**
* Adds a new marker to the given `Range`. If `inFront` is `true`, a front marker is defined, and the `'changeFrontMarker'` event fires; otherwise, the `'changeBackMarker'` event fires.
* @param {Range} range Define the range of the marker
* @param {String} clazz Set the CSS class for the marker
* @param {Function | String} type Identify the type of the marker
* @param {Boolean} inFront Set to `true` to establish a front marker
*
*
* @return {Number} The new marker id
**/
this.addMarker = function(range, clazz, type, inFront) {
var id = this.$markerId++;
var marker = {
range : range,
type : type || "line",
renderer: typeof type == "function" ? type : null,
clazz : clazz,
inFront: !!inFront,
id: id
}
if (inFront) {
this.$frontMarkers[id] = marker;
this._emit("changeFrontMarker")
} else {
this.$backMarkers[id] = marker;
this._emit("changeBackMarker")
}
return id;
};
/**
* Adds a dynamic marker to the session.
* @param {Object} marker object with update method
* @param {Boolean} inFront Set to `true` to establish a front marker
*
*
* @return {Object} The added marker
**/
this.addDynamicMarker = function(marker, inFront) {
if (!marker.update)
return;
var id = this.$markerId++;
marker.id = id;
marker.inFront = !!inFront;
if (inFront) {
this.$frontMarkers[id] = marker;
this._emit("changeFrontMarker")
} else {
this.$backMarkers[id] = marker;
this._emit("changeBackMarker")
}
return marker;
};
/**
* Removes the marker with the specified ID. If this marker was in front, the `'changeFrontMarker'` event is emitted. If the marker was in the back, the `'changeBackMarker'` event is emitted.
* @param {Number} markerId A number representing a marker
*
*
*
**/
this.removeMarker = function(markerId) {
var marker = this.$frontMarkers[markerId] || this.$backMarkers[markerId];
if (!marker)
return;
var markers = marker.inFront ? this.$frontMarkers : this.$backMarkers;
if (marker) {
delete (markers[markerId]);
this._emit(marker.inFront ? "changeFrontMarker" : "changeBackMarker");
}
};
/**
* Returns an array containing the IDs of all the markers, either front or back.
* @param {Boolean} inFront If `true`, indicates you only want front markers; `false` indicates only back markers
*
* @returns {Array}
**/
this.getMarkers = function(inFront) {
return inFront ? this.$frontMarkers : this.$backMarkers;
};
this.highlight = function(re) {
if (!this.$searchHighlight) {
var highlight = new SearchHighlight(null, "ace_selected-word", "text");
this.$searchHighlight = this.addDynamicMarker(highlight);
}
this.$searchHighlight.setRegexp(re);
}
// experimental
this.highlightLines = function(startRow, endRow, clazz, inFront) {
if (typeof endRow != "number") {
clazz = endRow;
endRow = startRow;
}
if (!clazz)
clazz = "ace_step";
var range = new Range(startRow, 0, endRow, Infinity);
range.id = this.addMarker(range, clazz, "fullLine", inFront);
return range;
};
/*
* Error:
* {
* row: 12,
* column: 2, //can be undefined
* text: "Missing argument",
* type: "error" // or "warning" or "info"
* }
*/
/**
* Sets annotations for the `EditSession`. This functions emits the `'changeAnnotation'` event.
* @param {Array} annotations A list of annotations
*
**/
this.setAnnotations = function(annotations) {
this.$annotations = annotations;
this._emit("changeAnnotation", {});
};
/**
* Returns the annotations for the `EditSession`.
* @returns {Array}
**/
this.getAnnotations = function() {
return this.$annotations || [];
};
/**
* Clears all the annotations for this session. This function also triggers the `'changeAnnotation'` event.
**/
this.clearAnnotations = function() {
this.setAnnotations([]);
};
/**
* If `text` contains either the newline (`\n`) or carriage-return ('\r') characters, `$autoNewLine` stores that value.
* @param {String} text A block of text
*
*
**/
this.$detectNewLine = function(text) {
var match = text.match(/^.*?(\r?\n)/m);
if (match) {
this.$autoNewLine = match[1];
} else {
this.$autoNewLine = "\n";
}
};
/**
* Given a starting row and column, this method returns the `Range` of the first word boundary it finds.
* @param {Number} row The row to start at
* @param {Number} column The column to start at
*
* @returns {Range}
**/
this.getWordRange = function(row, column) {
var line = this.getLine(row);
var inToken = false;
if (column > 0)
inToken = !!line.charAt(column - 1).match(this.tokenRe);
if (!inToken)
inToken = !!line.charAt(column).match(this.tokenRe);
if (inToken)
var re = this.tokenRe;
else if (/^\s+$/.test(line.slice(column-1, column+1)))
var re = /\s/;
else
var re = this.nonTokenRe;
var start = column;
if (start > 0) {
do {
start--;
}
while (start >= 0 && line.charAt(start).match(re));
start++;
}
var end = column;
while (end < line.length && line.charAt(end).match(re)) {
end++;
}
return new Range(row, start, row, end);
};
/**
* Gets the range of a word, including its right whitespace.
* @param {Number} row The row number to start from
* @param {Number} column The column number to start from
*
* @return {Range}
**/
this.getAWordRange = function(row, column) {
var wordRange = this.getWordRange(row, column);
var line = this.getLine(wordRange.end.row);
while (line.charAt(wordRange.end.column).match(/[ \t]/)) {
wordRange.end.column += 1;
}
return wordRange;
};
/**
* {:Document.setNewLineMode.desc}
* @param {String} newLineMode {:Document.setNewLineMode.param}
*
*
* @related Document.setNewLineMode
**/
this.setNewLineMode = function(newLineMode) {
this.doc.setNewLineMode(newLineMode);
};
/**
*
* Returns the current new line mode.
* @returns {String}
* @related Document.getNewLineMode
**/
this.getNewLineMode = function() {
return this.doc.getNewLineMode();
};
/**
* Identifies if you want to use a worker for the `EditSession`.
* @param {Boolean} useWorker Set to `true` to use a worker
*
**/
this.setUseWorker = function(useWorker) { this.setOption("useWorker", useWorker); };
/**
* Returns `true` if workers are being used.
**/
this.getUseWorker = function() { return this.$useWorker; };
/**
* Reloads all the tokens on the current session. This function calls [[BackgroundTokenizer.start `BackgroundTokenizer.start ()`]] to all the rows; it also emits the `'tokenizerUpdate'` event.
**/
this.onReloadTokenizer = function(e) {
var rows = e.data;
this.bgTokenizer.start(rows.first);
this._emit("tokenizerUpdate", e);
};
this.$modes = {};
/**
* Sets a new text mode for the `EditSession`. This method also emits the `'changeMode'` event. If a [[BackgroundTokenizer `BackgroundTokenizer`]] is set, the `'tokenizerUpdate'` event is also emitted.
* @param {TextMode} mode Set a new text mode
*
**/
this.$mode = null;
this.$modeId = null;
this.setMode = function(mode) {
if (mode && typeof mode === "object") {
if (mode.getTokenizer)
return this.$onChangeMode(mode);
var options = mode;
var path = options.path;
} else {
path = mode || "ace/mode/text";
}
// this is needed if ace isn't on require path (e.g tests in node)
if (!this.$modes["ace/mode/text"])
this.$modes["ace/mode/text"] = new TextMode();
if (this.$modes[path] && !options)
return this.$onChangeMode(this.$modes[path]);
// load on demand
this.$modeId = path;
config.loadModule(["mode", path], function(m) {
if (this.$modeId !== path)
return;
if (this.$modes[path] && !options)
return this.$onChangeMode(this.$modes[path]);
if (m && m.Mode) {
m = new m.Mode(options);
if (!options) {
this.$modes[path] = m;
m.$id = path;
}
this.$onChangeMode(m)
}
}.bind(this));
// set mode to text until loading is finished
if (!this.$mode)
this.$onChangeMode(this.$modes["ace/mode/text"], true);
};
this.$onChangeMode = function(mode, $isPlaceholder) {
if (this.$mode === mode) return;
this.$mode = mode;
this.$stopWorker();
if (this.$useWorker)
this.$startWorker();
var tokenizer = mode.getTokenizer();
if(tokenizer.addEventListener !== undefined) {
var onReloadTokenizer = this.onReloadTokenizer.bind(this);
tokenizer.addEventListener("update", onReloadTokenizer);
}
if (!this.bgTokenizer) {
this.bgTokenizer = new BackgroundTokenizer(tokenizer);
var _self = this;
this.bgTokenizer.addEventListener("update", function(e) {
_self._emit("tokenizerUpdate", e);
});
} else {
this.bgTokenizer.setTokenizer(tokenizer);
}
this.bgTokenizer.setDocument(this.getDocument());
this.tokenRe = mode.tokenRe;
this.nonTokenRe = mode.nonTokenRe;
if (!$isPlaceholder) {
this.$modeId = mode.$id;
this.$setFolding(mode.foldingRules);
this._emit("changeMode");
this.bgTokenizer.start(0);
}
};
this.$stopWorker = function() {
if (this.$worker)
this.$worker.terminate();
this.$worker = null;
};
this.$startWorker = function() {
if (typeof Worker !== "undefined" && !require.noWorker) {
try {
this.$worker = this.$mode.createWorker(this);
} catch (e) {
console.log("Could not load worker");
console.log(e);
this.$worker = null;
}
}
else
this.$worker = null;
};
/**
* Returns the current text mode.
* @returns {TextMode} The current text mode
**/
this.getMode = function() {
return this.$mode;
};
this.$scrollTop = 0;
/**
* This function sets the scroll top value. It also emits the `'changeScrollTop'` event.
* @param {Number} scrollTop The new scroll top value
*
**/
this.setScrollTop = function(scrollTop) {
scrollTop = Math.round(Math.max(0, scrollTop));
if (this.$scrollTop === scrollTop || isNaN(scrollTop))
return;
this.$scrollTop = scrollTop;
this._signal("changeScrollTop", scrollTop);
};
/**
* [Returns the value of the distance between the top of the editor and the topmost part of the visible content.]{: #EditSession.getScrollTop}
* @returns {Number}
**/
this.getScrollTop = function() {
return this.$scrollTop;
};
this.$scrollLeft = 0;
/**
* [Sets the value of the distance between the left of the editor and the leftmost part of the visible content.]{: #EditSession.setScrollLeft}
**/
this.setScrollLeft = function(scrollLeft) {
scrollLeft = Math.round(Math.max(0, scrollLeft));
if (this.$scrollLeft === scrollLeft || isNaN(scrollLeft))
return;
this.$scrollLeft = scrollLeft;
this._signal("changeScrollLeft", scrollLeft);
};
/**
* [Returns the value of the distance between the left of the editor and the leftmost part of the visible content.]{: #EditSession.getScrollLeft}
* @returns {Number}
**/
this.getScrollLeft = function() {
return this.$scrollLeft;
};
/**
* Returns the width of the screen.
* @returns {Number}
**/
this.getScreenWidth = function() {
this.$computeWidth();
return this.screenWidth;
};
this.$computeWidth = function(force) {
if (this.$modified || force) {
this.$modified = false;
if (this.$useWrapMode)
return this.screenWidth = this.$wrapLimit;
var lines = this.doc.getAllLines();
var cache = this.$rowLengthCache;
var longestScreenLine = 0;
var foldIndex = 0;
var foldLine = this.$foldData[foldIndex];
var foldStart = foldLine ? foldLine.start.row : Infinity;
var len = lines.length;
for (var i = 0; i < len; i++) {
if (i > foldStart) {
i = foldLine.end.row + 1;
if (i >= len)
break;
foldLine = this.$foldData[foldIndex++];
foldStart = foldLine ? foldLine.start.row : Infinity;
}
if (cache[i] == null)
cache[i] = this.$getStringScreenWidth(lines[i])[0];
if (cache[i] > longestScreenLine)
longestScreenLine = cache[i];
}
this.screenWidth = longestScreenLine;
}
};
/**
* Returns a verbatim copy of the given line as it is in the document
* @param {Number} row The row to retrieve from
*
*
* @returns {String}
*
**/
this.getLine = function(row) {
return this.doc.getLine(row);
};
/**
* Returns an array of strings of the rows between `firstRow` and `lastRow`. This function is inclusive of `lastRow`.
* @param {Number} firstRow The first row index to retrieve
* @param {Number} lastRow The final row index to retrieve
*
* @returns {[String]}
*
**/
this.getLines = function(firstRow, lastRow) {
return this.doc.getLines(firstRow, lastRow);
};
/**
* Returns the number of rows in the document.
* @returns {Number}
**/
this.getLength = function() {
return this.doc.getLength();
};
/**
* {:Document.getTextRange.desc}
* @param {Range} range The range to work with
*
* @returns {String}
**/
this.getTextRange = function(range) {
return this.doc.getTextRange(range || this.selection.getRange());
};
/**
* Inserts a block of `text` and the indicated `position`.
* @param {Object} position The position {row, column} to start inserting at
* @param {String} text A chunk of text to insert
* @returns {Object} The position of the last line of `text`. If the length of `text` is 0, this function simply returns `position`.
*
*
**/
this.insert = function(position, text) {
return this.doc.insert(position, text);
};
/**
* Removes the `range` from the document.
* @param {Range} range A specified Range to remove
* @returns {Object} The new `start` property of the range, which contains `startRow` and `startColumn`. If `range` is empty, this function returns the unmodified value of `range.start`.
*
* @related Document.remove
*
**/
this.remove = function(range) {
return this.doc.remove(range);
};
/**
* Reverts previous changes to your document.
* @param {Array} deltas An array of previous changes
* @param {Boolean} dontSelect [If `true`, doesn't select the range of where the change occured]{: #dontSelect}
*
*
* @returns {Range}
**/
this.undoChanges = function(deltas, dontSelect) {
if (!deltas.length)
return;
this.$fromUndo = true;
var lastUndoRange = null;
for (var i = deltas.length - 1; i != -1; i--) {
var delta = deltas[i];
if (delta.group == "doc") {
this.doc.revertDeltas(delta.deltas);
lastUndoRange =
this.$getUndoSelection(delta.deltas, true, lastUndoRange);
} else {
delta.deltas.forEach(function(foldDelta) {
this.addFolds(foldDelta.folds);
}, this);
}
}
this.$fromUndo = false;
lastUndoRange &&
this.$undoSelect &&
!dontSelect &&
this.selection.setSelectionRange(lastUndoRange);
return lastUndoRange;
};
/**
* Re-implements a previously undone change to your document.
* @param {Array} deltas An array of previous changes
* @param {Boolean} dontSelect {:dontSelect}
*
*
* @returns {Range}
**/
this.redoChanges = function(deltas, dontSelect) {
if (!deltas.length)
return;
this.$fromUndo = true;
var lastUndoRange = null;
for (var i = 0; i < deltas.length; i++) {
var delta = deltas[i];
if (delta.group == "doc") {
this.doc.applyDeltas(delta.deltas);
lastUndoRange =
this.$getUndoSelection(delta.deltas, false, lastUndoRange);
}
}
this.$fromUndo = false;
lastUndoRange &&
this.$undoSelect &&
!dontSelect &&
this.selection.setSelectionRange(lastUndoRange);
return lastUndoRange;
};
/**
* Enables or disables highlighting of the range where an undo occured.
* @param {Boolean} enable If `true`, selects the range of the reinserted change
*
**/
this.setUndoSelect = function(enable) {
this.$undoSelect = enable;
};
this.$getUndoSelection = function(deltas, isUndo, lastUndoRange) {
function isInsert(delta) {
var insert =
delta.action === "insertText" || delta.action === "insertLines";
return isUndo ? !insert : insert;
}
var delta = deltas[0];
var range, point;
var lastDeltaIsInsert = false;
if (isInsert(delta)) {
range = delta.range.clone();
lastDeltaIsInsert = true;
} else {
range = Range.fromPoints(delta.range.start, delta.range.start);
lastDeltaIsInsert = false;
}
for (var i = 1; i < deltas.length; i++) {
delta = deltas[i];
if (isInsert(delta)) {
point = delta.range.start;
if (range.compare(point.row, point.column) == -1) {
range.setStart(delta.range.start);
}
point = delta.range.end;
if (range.compare(point.row, point.column) == 1) {
range.setEnd(delta.range.end);
}
lastDeltaIsInsert = true;
} else {
point = delta.range.start;
if (range.compare(point.row, point.column) == -1) {
range =
Range.fromPoints(delta.range.start, delta.range.start);
}
lastDeltaIsInsert = false;
}
}
// Check if this range and the last undo range has something in common.
// If true, merge the ranges.
if (lastUndoRange != null) {
var cmp = lastUndoRange.compareRange(range);
if (cmp == 1) {
range.setStart(lastUndoRange.start);
} else if (cmp == -1) {
range.setEnd(lastUndoRange.end);
}
}
return range;
};
/**
* Replaces a range in the document with the new `text`.
*
* @param {Range} range A specified Range to replace
* @param {String} text The new text to use as a replacement
* @returns {Object} An object containing the final row and column, like this:
* ```
* {row: endRow, column: 0}
* ```
* If the text and range are empty, this function returns an object containing the current `range.start` value.
* If the text is the exact same as what currently exists, this function returns an object containing the current `range.end` value.
*
*
*
* @related Document.replace
*
*
**/
this.replace = function(range, text) {
return this.doc.replace(range, text);
};
/**
* Moves a range of text from the given range to the given position. `toPosition` is an object that looks like this:
* ```json
* { row: newRowLocation, column: newColumnLocation }
* ```
* @param {Range} fromRange The range of text you want moved within the document
* @param {Object} toPosition The location (row and column) where you want to move the text to
* @returns {Range} The new range where the text was moved to.
*
*
*
**/
this.moveText = function(fromRange, toPosition, copy) {
var text = this.getTextRange(fromRange);
var folds = this.getFoldsInRange(fromRange);
var toRange = Range.fromPoints(toPosition, toPosition);
if (!copy) {
this.remove(fromRange);
var rowDiff = fromRange.start.row - fromRange.end.row;
var collDiff = rowDiff ? -fromRange.end.column : fromRange.start.column - fromRange.end.column;
if (collDiff) {
if (toRange.start.row == fromRange.end.row && toRange.start.column > fromRange.end.column)
toRange.start.column += collDiff;
if (toRange.end.row == fromRange.end.row && toRange.end.column > fromRange.end.column)
toRange.end.column += collDiff;
}
if (rowDiff && toRange.start.row >= fromRange.end.row) {
toRange.start.row += rowDiff;
toRange.end.row += rowDiff;
}
}
this.insert(toRange.start, text);
if (folds.length) {
var oldStart = fromRange.start;
var newStart = toRange.start;
var rowDiff = newStart.row - oldStart.row;
var collDiff = newStart.column - oldStart.column;
this.addFolds(folds.map(function(x) {
x = x.clone();
if (x.start.row == oldStart.row)
x.start.column += collDiff;
if (x.end.row == oldStart.row)
x.end.column += collDiff;
x.start.row += rowDiff;
x.end.row += rowDiff;
return x;
}));
}
return toRange;
};
/**
* Indents all the rows, from `startRow` to `endRow` (inclusive), by prefixing each row with the token in `indentString`.
*
* If `indentString` contains the `'\t'` character, it's replaced by whatever is defined by [[EditSession.getTabString `getTabString()`]].
* @param {Number} startRow Starting row
* @param {Number} endRow Ending row
* @param {String} indentString The indent token
*
*
**/
this.indentRows = function(startRow, endRow, indentString) {
indentString = indentString.replace(/\t/g, this.getTabString());
for (var row=startRow; row<=endRow; row++)
this.insert({row: row, column:0}, indentString);
};
/**
* Outdents all the rows defined by the `start` and `end` properties of `range`.
* @param {Range} range A range of rows
*
*
**/
this.outdentRows = function (range) {
var rowRange = range.collapseRows();
var deleteRange = new Range(0, 0, 0, 0);
var size = this.getTabSize();
for (var i = rowRange.start.row; i <= rowRange.end.row; ++i) {
var line = this.getLine(i);
deleteRange.start.row = i;
deleteRange.end.row = i;
for (var j = 0; j < size; ++j)
if (line.charAt(j) != ' ')
break;
if (j < size && line.charAt(j) == '\t') {
deleteRange.start.column = j;
deleteRange.end.column = j + 1;
} else {
deleteRange.start.column = 0;
deleteRange.end.column = j;
}
this.remove(deleteRange);
}
};
this.$moveLines = function(firstRow, lastRow, dir) {
firstRow = this.getRowFoldStart(firstRow);
lastRow = this.getRowFoldEnd(lastRow);
if (dir < 0) {
var row = this.getRowFoldStart(firstRow + dir);
if (row < 0) return 0;
var diff = row-firstRow;
} else if (dir > 0) {
var row = this.getRowFoldEnd(lastRow + dir);
if (row > this.doc.getLength()-1) return 0;
var diff = row-lastRow;
} else {
firstRow = this.$clipRowToDocument(firstRow);
lastRow = this.$clipRowToDocument(lastRow);
var diff = lastRow - firstRow + 1;
}
var range = new Range(firstRow, 0, lastRow, Number.MAX_VALUE);
var folds = this.getFoldsInRange(range).map(function(x){
x = x.clone();
x.start.row += diff;
x.end.row += diff;
return x;
});
var lines = dir == 0
? this.doc.getLines(firstRow, lastRow)
: this.doc.removeLines(firstRow, lastRow);
this.doc.insertLines(firstRow+diff, lines);
folds.length && this.addFolds(folds);
return diff;
};
/**
* Shifts all the lines in the document up one, starting from `firstRow` and ending at `lastRow`.
* @param {Number} firstRow The starting row to move up
* @param {Number} lastRow The final row to move up
* @returns {Number} If `firstRow` is less-than or equal to 0, this function returns 0. Otherwise, on success, it returns -1.
*
* @related Document.insertLines
*
**/
this.moveLinesUp = function(firstRow, lastRow) {
return this.$moveLines(firstRow, lastRow, -1);
};
/**
* Shifts all the lines in the document down one, starting from `firstRow` and ending at `lastRow`.
* @param {Number} firstRow The starting row to move down
* @param {Number} lastRow The final row to move down
* @returns {Number} If `firstRow` is less-than or equal to 0, this function returns 0. Otherwise, on success, it returns -1.
*
* @related Document.insertLines
**/
this.moveLinesDown = function(firstRow, lastRow) {
return this.$moveLines(firstRow, lastRow, 1);
};
/**
* Duplicates all the text between `firstRow` and `lastRow`.
* @param {Number} firstRow The starting row to duplicate
* @param {Number} lastRow The final row to duplicate
* @returns {Number} Returns the number of new rows added; in other words, `lastRow - firstRow + 1`.
*
*
**/
this.duplicateLines = function(firstRow, lastRow) {
return this.$moveLines(firstRow, lastRow, 0);
};
this.$clipRowToDocument = function(row) {
return Math.max(0, Math.min(row, this.doc.getLength()-1));
};
this.$clipColumnToRow = function(row, column) {
if (column < 0)
return 0;
return Math.min(this.doc.getLine(row).length, column);
};
this.$clipPositionToDocument = function(row, column) {
column = Math.max(0, column);
if (row < 0) {
row = 0;
column = 0;
} else {
var len = this.doc.getLength();
if (row >= len) {
row = len - 1;
column = this.doc.getLine(len-1).length;
} else {
column = Math.min(this.doc.getLine(row).length, column);
}
}
return {
row: row,
column: column
};
};
this.$clipRangeToDocument = function(range) {
if (range.start.row < 0) {
range.start.row = 0;
range.start.column = 0;
} else {
range.start.column = this.$clipColumnToRow(
range.start.row,
range.start.column
);
}
var len = this.doc.getLength() - 1;
if (range.end.row > len) {
range.end.row = len;
range.end.column = this.doc.getLine(len).length;
} else {
range.end.column = this.$clipColumnToRow(
range.end.row,
range.end.column
);
}
return range;
};
// WRAPMODE
this.$wrapLimit = 80;
this.$useWrapMode = false;
this.$wrapLimitRange = {
min : null,
max : null
};
/**
* Sets whether or not line wrapping is enabled. If `useWrapMode` is different than the current value, the `'changeWrapMode'` event is emitted.
* @param {Boolean} useWrapMode Enable (or disable) wrap mode
*
*
**/
this.setUseWrapMode = function(useWrapMode) {
if (useWrapMode != this.$useWrapMode) {
this.$useWrapMode = useWrapMode;
this.$modified = true;
this.$resetRowCache(0);
// If wrapMode is activaed, the wrapData array has to be initialized.
if (useWrapMode) {
var len = this.getLength();
this.$wrapData = [];
for (var i = 0; i < len; i++) {
this.$wrapData.push([]);
}
this.$updateWrapData(0, len - 1);
}
this._emit("changeWrapMode");
}
};
/**
* Returns `true` if wrap mode is being used; `false` otherwise.
* @returns {Boolean}
**/
this.getUseWrapMode = function() {
return this.$useWrapMode;
};
// Allow the wrap limit to move freely between min and max. Either
// parameter can be null to allow the wrap limit to be unconstrained
// in that direction. Or set both parameters to the same number to pin
// the limit to that value.
/**
* Sets the boundaries of wrap. Either value can be `null` to have an unconstrained wrap, or, they can be the same number to pin the limit. If the wrap limits for `min` or `max` are different, this method also emits the `'changeWrapMode'` event.
* @param {Number} min The minimum wrap value (the left side wrap)
* @param {Number} max The maximum wrap value (the right side wrap)
*
*
**/
this.setWrapLimitRange = function(min, max) {
if (this.$wrapLimitRange.min !== min || this.$wrapLimitRange.max !== max) {
this.$wrapLimitRange.min = min;
this.$wrapLimitRange.max = max;
this.$modified = true;
// This will force a recalculation of the wrap limit
this._emit("changeWrapMode");
}
};
/**
* This should generally only be called by the renderer when a resize is detected.
* @param {Number} desiredLimit The new wrap limit
* @returns {Boolean}
*
* @private
**/
this.adjustWrapLimit = function(desiredLimit, $printMargin) {
var limits = this.$wrapLimitRange
if (limits.max < 0)
limits = {min: $printMargin, max: $printMargin};
var wrapLimit = this.$constrainWrapLimit(desiredLimit, limits.min, limits.max);
if (wrapLimit != this.$wrapLimit && wrapLimit > 1) {
this.$wrapLimit = wrapLimit;
this.$modified = true;
if (this.$useWrapMode) {
this.$updateWrapData(0, this.getLength() - 1);
this.$resetRowCache(0);
this._emit("changeWrapLimit");
}
return true;
}
return false;
};
this.$constrainWrapLimit = function(wrapLimit, min, max) {
if (min)
wrapLimit = Math.max(min, wrapLimit);
if (max)
wrapLimit = Math.min(max, wrapLimit);
return wrapLimit;
};
/**
* Returns the value of wrap limit.
* @returns {Number} The wrap limit.
**/
this.getWrapLimit = function() {
return this.$wrapLimit;
};
/**
* Sets the line length for soft wrap in the editor. Lines will break
* at a minimum of the given length minus 20 chars and at a maximum
* of the given number of chars.
* @param {number} limit The maximum line length in chars, for soft wrapping lines.
*/
this.setWrapLimit = function (limit) {
this.setWrapLimitRange(limit, limit);
};
/**
* Returns an object that defines the minimum and maximum of the wrap limit; it looks something like this:
*
* { min: wrapLimitRange_min, max: wrapLimitRange_max }
*
* @returns {Object}
**/
this.getWrapLimitRange = function() {
// Avoid unexpected mutation by returning a copy
return {
min : this.$wrapLimitRange.min,
max : this.$wrapLimitRange.max
};
};
this.$updateInternalDataOnChange = function(e) {
var useWrapMode = this.$useWrapMode;
var len;
var action = e.data.action;
var firstRow = e.data.range.start.row;
var lastRow = e.data.range.end.row;
var start = e.data.range.start;
var end = e.data.range.end;
var removedFolds = null;
if (action.indexOf("Lines") != -1) {
if (action == "insertLines") {
lastRow = firstRow + (e.data.lines.length);
} else {
lastRow = firstRow;
}
len = e.data.lines ? e.data.lines.length : lastRow - firstRow;
} else {
len = lastRow - firstRow;
}
this.$updating = true;
if (len != 0) {
if (action.indexOf("remove") != -1) {
this[useWrapMode ? "$wrapData" : "$rowLengthCache"].splice(firstRow, len);
var foldLines = this.$foldData;
removedFolds = this.getFoldsInRange(e.data.range);
this.removeFolds(removedFolds);
var foldLine = this.getFoldLine(end.row);
var idx = 0;
if (foldLine) {
foldLine.addRemoveChars(end.row, end.column, start.column - end.column);
foldLine.shiftRow(-len);
var foldLineBefore = this.getFoldLine(firstRow);
if (foldLineBefore && foldLineBefore !== foldLine) {
foldLineBefore.merge(foldLine);
foldLine = foldLineBefore;
}
idx = foldLines.indexOf(foldLine) + 1;
}
for (idx; idx < foldLines.length; idx++) {
var foldLine = foldLines[idx];
if (foldLine.start.row >= end.row) {
foldLine.shiftRow(-len);
}
}
lastRow = firstRow;
} else {
var args;
if (useWrapMode) {
args = [firstRow, 0];
for (var i = 0; i < len; i++) args.push([]);
this.$wrapData.splice.apply(this.$wrapData, args);
} else {
args = Array(len);
args.unshift(firstRow, 0);
this.$rowLengthCache.splice.apply(this.$rowLengthCache, args);
}
// If some new line is added inside of a foldLine, then split
// the fold line up.
var foldLines = this.$foldData;
var foldLine = this.getFoldLine(firstRow);
var idx = 0;
if (foldLine) {
var cmp = foldLine.range.compareInside(start.row, start.column)
// Inside of the foldLine range. Need to split stuff up.
if (cmp == 0) {
foldLine = foldLine.split(start.row, start.column);
foldLine.shiftRow(len);
foldLine.addRemoveChars(
lastRow, 0, end.column - start.column);
} else
// Infront of the foldLine but same row. Need to shift column.
if (cmp == -1) {
foldLine.addRemoveChars(firstRow, 0, end.column - start.column);
foldLine.shiftRow(len);
}
// Nothing to do if the insert is after the foldLine.
idx = foldLines.indexOf(foldLine) + 1;
}
for (idx; idx < foldLines.length; idx++) {
var foldLine = foldLines[idx];
if (foldLine.start.row >= firstRow) {
foldLine.shiftRow(len);
}
}
}
} else {
// Realign folds. E.g. if you add some new chars before a fold, the
// fold should "move" to the right.
len = Math.abs(e.data.range.start.column - e.data.range.end.column);
if (action.indexOf("remove") != -1) {
// Get all the folds in the change range and remove them.
removedFolds = this.getFoldsInRange(e.data.range);
this.removeFolds(removedFolds);
len = -len;
}
var foldLine = this.getFoldLine(firstRow);
if (foldLine) {
foldLine.addRemoveChars(firstRow, start.column, len);
}
}
if (useWrapMode && this.$wrapData.length != this.doc.getLength()) {
console.error("doc.getLength() and $wrapData.length have to be the same!");
}
this.$updating = false;
if (useWrapMode)
this.$updateWrapData(firstRow, lastRow);
else
this.$updateRowLengthCache(firstRow, lastRow);
return removedFolds;
};
this.$updateRowLengthCache = function(firstRow, lastRow, b) {
this.$rowLengthCache[firstRow] = null;
this.$rowLengthCache[lastRow] = null;
};
this.$updateWrapData = function(firstRow, lastRow) {
var lines = this.doc.getAllLines();
var tabSize = this.getTabSize();
var wrapData = this.$wrapData;
var wrapLimit = this.$wrapLimit;
var tokens;
var foldLine;
var row = firstRow;
lastRow = Math.min(lastRow, lines.length - 1);
while (row <= lastRow) {
foldLine = this.getFoldLine(row, foldLine);
if (!foldLine) {
tokens = this.$getDisplayTokens(lang.stringTrimRight(lines[row]));
wrapData[row] = this.$computeWrapSplits(tokens, wrapLimit, tabSize);
row ++;
} else {
tokens = [];
foldLine.walk(function(placeholder, row, column, lastColumn) {
var walkTokens;
if (placeholder != null) {
walkTokens = this.$getDisplayTokens(
placeholder, tokens.length);
walkTokens[0] = PLACEHOLDER_START;
for (var i = 1; i < walkTokens.length; i++) {
walkTokens[i] = PLACEHOLDER_BODY;
}
} else {
walkTokens = this.$getDisplayTokens(
lines[row].substring(lastColumn, column),
tokens.length);
}
tokens = tokens.concat(walkTokens);
}.bind(this),
foldLine.end.row,
lines[foldLine.end.row].length + 1
);
// Remove spaces/tabs from the back of the token array.
while (tokens.length != 0 && tokens[tokens.length - 1] >= SPACE)
tokens.pop();
wrapData[foldLine.start.row]
= this.$computeWrapSplits(tokens, wrapLimit, tabSize);
row = foldLine.end.row + 1;
}
}
};
// "Tokens"
var CHAR = 1,
CHAR_EXT = 2,
PLACEHOLDER_START = 3,
PLACEHOLDER_BODY = 4,
PUNCTUATION = 9,
SPACE = 10,
TAB = 11,
TAB_SPACE = 12;
this.$computeWrapSplits = function(tokens, wrapLimit) {
if (tokens.length == 0) {
return [];
}
var splits = [];
var displayLength = tokens.length;
var lastSplit = 0, lastDocSplit = 0;
function addSplit(screenPos) {
var displayed = tokens.slice(lastSplit, screenPos);
// The document size is the current size - the extra width for tabs
// and multipleWidth characters.
var len = displayed.length;
displayed.join("").
// Get all the TAB_SPACEs.
replace(/12/g, function() {
len -= 1;
}).
// Get all the CHAR_EXT/multipleWidth characters.
replace(/2/g, function() {
len -= 1;
});
lastDocSplit += len;
splits.push(lastDocSplit);
lastSplit = screenPos;
}
while (displayLength - lastSplit > wrapLimit) {
// This is, where the split should be.
var split = lastSplit + wrapLimit;
// If there is a space or tab at this split position, then making
// a split is simple.
if (tokens[split] >= SPACE) {
// Include all following spaces + tabs in this split as well.
while (tokens[split] >= SPACE) {
split ++;
}
addSplit(split);
continue;
}
// === ELSE ===
// Check if split is inside of a placeholder. Placeholder are
// not splitable. Therefore, seek the beginning of the placeholder
// and try to place the split beofre the placeholder's start.
if (tokens[split] == PLACEHOLDER_START
|| tokens[split] == PLACEHOLDER_BODY)
{
// Seek the start of the placeholder and do the split
// before the placeholder. By definition there always
// a PLACEHOLDER_START between split and lastSplit.
for (split; split != lastSplit - 1; split--) {
if (tokens[split] == PLACEHOLDER_START) {
// split++; << No incremental here as we want to
// have the position before the Placeholder.
break;
}
}
// If the PLACEHOLDER_START is not the index of the
// last split, then we can do the split
if (split > lastSplit) {
addSplit(split);
continue;
}
// If the PLACEHOLDER_START IS the index of the last
// split, then we have to place the split after the
// placeholder. So, let's seek for the end of the placeholder.
split = lastSplit + wrapLimit;
for (split; split < tokens.length; split++) {
if (tokens[split] != PLACEHOLDER_BODY)
{
break;
}
}
// If spilt == tokens.length, then the placeholder is the last
// thing in the line and adding a new split doesn't make sense.
if (split == tokens.length) {
break; // Breaks the while-loop.
}
// Finally, add the split...
addSplit(split);
continue;
}
// === ELSE ===
// Search for the first non space/tab/placeholder/punctuation token backwards.
var minSplit = Math.max(split - 10, lastSplit - 1);
while (split > minSplit && tokens[split] < PLACEHOLDER_START) {
split --;
}
while (split > minSplit && tokens[split] == PUNCTUATION) {
split --;
}
// If we found one, then add the split.
if (split > minSplit) {
addSplit(++split);
continue;
}
// === ELSE ===
split = lastSplit + wrapLimit;
// The split is inside of a CHAR or CHAR_EXT token and no space
// around -> force a split.
addSplit(split);
}
return splits;
};
/**
* Given a string, returns an array of the display characters, including tabs and spaces.
* @param {String} str The string to check
* @param {Number} offset The value to start at
*
*
**/
this.$getDisplayTokens = function(str, offset) {
var arr = [];
var tabSize;
offset = offset || 0;
for (var i = 0; i < str.length; i++) {
var c = str.charCodeAt(i);
// Tab
if (c == 9) {
tabSize = this.getScreenTabSize(arr.length + offset);
arr.push(TAB);
for (var n = 1; n < tabSize; n++) {
arr.push(TAB_SPACE);
}
}
// Space
else if (c == 32) {
arr.push(SPACE);
} else if((c > 39 && c < 48) || (c > 57 && c < 64)) {
arr.push(PUNCTUATION);
}
// full width characters
else if (c >= 0x1100 && isFullWidth(c)) {
arr.push(CHAR, CHAR_EXT);
} else {
arr.push(CHAR);
}
}
return arr;
};
/**
* Calculates the width of the string `str` on the screen while assuming that the string starts at the first column on the screen.
* @param {String} str The string to calculate the screen width of
* @param {Number} maxScreenColumn
* @param {Number} screenColumn
* @returns {[Number]} Returns an `int[]` array with two elements:<br/>
* The first position indicates the number of columns for `str` on screen.<br/>
* The second value contains the position of the document column that this function read until.
*
*
*
*
**/
this.$getStringScreenWidth = function(str, maxScreenColumn, screenColumn) {
if (maxScreenColumn == 0)
return [0, 0];
if (maxScreenColumn == null)
maxScreenColumn = Infinity;
screenColumn = screenColumn || 0;
var c, column;
for (column = 0; column < str.length; column++) {
c = str.charCodeAt(column);
// tab
if (c == 9) {
screenColumn += this.getScreenTabSize(screenColumn);
}
// full width characters
else if (c >= 0x1100 && isFullWidth(c)) {
screenColumn += 2;
} else {
screenColumn += 1;
}
if (screenColumn > maxScreenColumn) {
break;
}
}
return [screenColumn, column];
};
/**
* Returns number of screenrows in a wrapped line.
* @param {Number} row The row number to check
*
* @returns {Number}
**/
this.getRowLength = function(row) {
if (!this.$useWrapMode || !this.$wrapData[row]) {
return 1;
} else {
return this.$wrapData[row].length + 1;
}
};
/**
* Returns the position (on screen) for the last character in the provided screen row.
* @param {Number} screenRow The screen row to check
* @returns {Number}
*
* @related EditSession.documentToScreenColumn
**/
this.getScreenLastRowColumn = function(screenRow) {
var pos = this.screenToDocumentPosition(screenRow, Number.MAX_VALUE);
return this.documentToScreenColumn(pos.row, pos.column);
};
/**
* For the given document row and column, this returns the column position of the last screen row.
* @param {Number} docRow
*
* @param {Number} docColumn
**/
this.getDocumentLastRowColumn = function(docRow, docColumn) {
var screenRow = this.documentToScreenRow(docRow, docColumn);
return this.getScreenLastRowColumn(screenRow);
};
/**
* For the given document row and column, this returns the document position of the last row.
* @param {Number} docRow
* @param {Number} docColumn
*
*
**/
this.getDocumentLastRowColumnPosition = function(docRow, docColumn) {
var screenRow = this.documentToScreenRow(docRow, docColumn);
return this.screenToDocumentPosition(screenRow, Number.MAX_VALUE / 10);
};
/**
* For the given row, this returns the split data.
* @returns {String}
**/
this.getRowSplitData = function(row) {
if (!this.$useWrapMode) {
return undefined;
} else {
return this.$wrapData[row];
}
};
/**
* The distance to the next tab stop at the specified screen column.
* @param {Number} screenColumn The screen column to check
*
*
* @returns {Number}
**/
this.getScreenTabSize = function(screenColumn) {
return this.$tabSize - screenColumn % this.$tabSize;
};
this.screenToDocumentRow = function(screenRow, screenColumn) {
return this.screenToDocumentPosition(screenRow, screenColumn).row;
};
this.screenToDocumentColumn = function(screenRow, screenColumn) {
return this.screenToDocumentPosition(screenRow, screenColumn).column;
};
/**
* Converts characters coordinates on the screen to characters coordinates within the document. [This takes into account code folding, word wrap, tab size, and any other visual modifications.]{: #conversionConsiderations}
* @param {Number} screenRow The screen row to check
* @param {Number} screenColumn The screen column to check
* @returns {Object} The object returned has two properties: `row` and `column`.
*
*
* @related EditSession.documentToScreenPosition
*
**/
this.screenToDocumentPosition = function(screenRow, screenColumn) {
if (screenRow < 0)
return {row: 0, column: 0};
var line;
var docRow = 0;
var docColumn = 0;
var column;
var row = 0;
var rowLength = 0;
var rowCache = this.$screenRowCache;
var i = this.$getRowCacheIndex(rowCache, screenRow);
var l = rowCache.length;
if (l && i >= 0) {
var row = rowCache[i];
var docRow = this.$docRowCache[i];
var doCache = screenRow > rowCache[l - 1];
} else {
var doCache = !l;
}
var maxRow = this.getLength() - 1;
var foldLine = this.getNextFoldLine(docRow);
var foldStart = foldLine ? foldLine.start.row : Infinity;
while (row <= screenRow) {
rowLength = this.getRowLength(docRow);
if (row + rowLength - 1 >= screenRow || docRow >= maxRow) {
break;
} else {
row += rowLength;
docRow++;
if (docRow > foldStart) {
docRow = foldLine.end.row+1;
foldLine = this.getNextFoldLine(docRow, foldLine);
foldStart = foldLine ? foldLine.start.row : Infinity;
}
}
if (doCache) {
this.$docRowCache.push(docRow);
this.$screenRowCache.push(row);
}
}
if (foldLine && foldLine.start.row <= docRow) {
line = this.getFoldDisplayLine(foldLine);
docRow = foldLine.start.row;
} else if (row + rowLength <= screenRow || docRow > maxRow) {
// clip at the end of the document
return {
row: maxRow,
column: this.getLine(maxRow).length
}
} else {
line = this.getLine(docRow);
foldLine = null;
}
if (this.$useWrapMode) {
var splits = this.$wrapData[docRow];
if (splits) {
column = splits[screenRow - row];
if(screenRow > row && splits.length) {
docColumn = splits[screenRow - row - 1] || splits[splits.length - 1];
line = line.substring(docColumn);
}
}
}
docColumn += this.$getStringScreenWidth(line, screenColumn)[1];
// We remove one character at the end so that the docColumn
// position returned is not associated to the next row on the screen.
if (this.$useWrapMode && docColumn >= column)
docColumn = column - 1;
if (foldLine)
return foldLine.idxToPosition(docColumn);
return {row: docRow, column: docColumn};
};
/**
* Converts document coordinates to screen coordinates. {:conversionConsiderations}
* @param {Number} docRow The document row to check
* @param {Number} docColumn The document column to check
* @returns {Object} The object returned by this method has two properties: `row` and `column`.
*
*
* @related EditSession.screenToDocumentPosition
*
**/
this.documentToScreenPosition = function(docRow, docColumn) {
// Normalize the passed in arguments.
if (typeof docColumn === "undefined")
var pos = this.$clipPositionToDocument(docRow.row, docRow.column);
else
pos = this.$clipPositionToDocument(docRow, docColumn);
docRow = pos.row;
docColumn = pos.column;
var screenRow = 0;
var foldStartRow = null;
var fold = null;
// Clamp the docRow position in case it's inside of a folded block.
fold = this.getFoldAt(docRow, docColumn, 1);
if (fold) {
docRow = fold.start.row;
docColumn = fold.start.column;
}
var rowEnd, row = 0;
var rowCache = this.$docRowCache;
var i = this.$getRowCacheIndex(rowCache, docRow);
var l = rowCache.length;
if (l && i >= 0) {
var row = rowCache[i];
var screenRow = this.$screenRowCache[i];
var doCache = docRow > rowCache[l - 1];
} else {
var doCache = !l;
}
var foldLine = this.getNextFoldLine(row);
var foldStart = foldLine ?foldLine.start.row :Infinity;
while (row < docRow) {
if (row >= foldStart) {
rowEnd = foldLine.end.row + 1;
if (rowEnd > docRow)
break;
foldLine = this.getNextFoldLine(rowEnd, foldLine);
foldStart = foldLine ?foldLine.start.row :Infinity;
}
else {
rowEnd = row + 1;
}
screenRow += this.getRowLength(row);
row = rowEnd;
if (doCache) {
this.$docRowCache.push(row);
this.$screenRowCache.push(screenRow);
}
}
// Calculate the text line that is displayed in docRow on the screen.
var textLine = "";
// Check if the final row we want to reach is inside of a fold.
if (foldLine && row >= foldStart) {
textLine = this.getFoldDisplayLine(foldLine, docRow, docColumn);
foldStartRow = foldLine.start.row;
} else {
textLine = this.getLine(docRow).substring(0, docColumn);
foldStartRow = docRow;
}
// Clamp textLine if in wrapMode.
if (this.$useWrapMode) {
var wrapRow = this.$wrapData[foldStartRow];
var screenRowOffset = 0;
while (textLine.length >= wrapRow[screenRowOffset]) {
screenRow ++;
screenRowOffset++;
}
textLine = textLine.substring(
wrapRow[screenRowOffset - 1] || 0, textLine.length
);
}
return {
row: screenRow,
column: this.$getStringScreenWidth(textLine)[0]
};
};
/**
* For the given document row and column, returns the screen column.
* @param {Number} row
* @param {Number} docColumn
* @returns {Number}
*
**/
this.documentToScreenColumn = function(row, docColumn) {
return this.documentToScreenPosition(row, docColumn).column;
};
/**
* For the given document row and column, returns the screen row.
* @param {Number} docRow
* @param {Number} docColumn
*
*
**/
this.documentToScreenRow = function(docRow, docColumn) {
return this.documentToScreenPosition(docRow, docColumn).row;
};
/**
* Returns the length of the screen.
* @returns {Number}
**/
this.getScreenLength = function() {
var screenRows = 0;
var fold = null;
if (!this.$useWrapMode) {
screenRows = this.getLength();
// Remove the folded lines again.
var foldData = this.$foldData;
for (var i = 0; i < foldData.length; i++) {
fold = foldData[i];
screenRows -= fold.end.row - fold.start.row;
}
} else {
var lastRow = this.$wrapData.length;
var row = 0, i = 0;
var fold = this.$foldData[i++];
var foldStart = fold ? fold.start.row :Infinity;
while (row < lastRow) {
screenRows += this.$wrapData[row].length + 1;
row ++;
if (row > foldStart) {
row = fold.end.row+1;
fold = this.$foldData[i++];
foldStart = fold ?fold.start.row :Infinity;
}
}
}
return screenRows;
};
// For every keystroke this gets called once per char in the whole doc!!
// Wouldn't hurt to make it a bit faster for c >= 0x1100
/**
* @private
*
*/
function isFullWidth(c) {
if (c < 0x1100)
return false;
return c >= 0x1100 && c <= 0x115F ||
c >= 0x11A3 && c <= 0x11A7 ||
c >= 0x11FA && c <= 0x11FF ||
c >= 0x2329 && c <= 0x232A ||
c >= 0x2E80 && c <= 0x2E99 ||
c >= 0x2E9B && c <= 0x2EF3 ||
c >= 0x2F00 && c <= 0x2FD5 ||
c >= 0x2FF0 && c <= 0x2FFB ||
c >= 0x3000 && c <= 0x303E ||
c >= 0x3041 && c <= 0x3096 ||
c >= 0x3099 && c <= 0x30FF ||
c >= 0x3105 && c <= 0x312D ||
c >= 0x3131 && c <= 0x318E ||
c >= 0x3190 && c <= 0x31BA ||
c >= 0x31C0 && c <= 0x31E3 ||
c >= 0x31F0 && c <= 0x321E ||
c >= 0x3220 && c <= 0x3247 ||
c >= 0x3250 && c <= 0x32FE ||
c >= 0x3300 && c <= 0x4DBF ||
c >= 0x4E00 && c <= 0xA48C ||
c >= 0xA490 && c <= 0xA4C6 ||
c >= 0xA960 && c <= 0xA97C ||
c >= 0xAC00 && c <= 0xD7A3 ||
c >= 0xD7B0 && c <= 0xD7C6 ||
c >= 0xD7CB && c <= 0xD7FB ||
c >= 0xF900 && c <= 0xFAFF ||
c >= 0xFE10 && c <= 0xFE19 ||
c >= 0xFE30 && c <= 0xFE52 ||
c >= 0xFE54 && c <= 0xFE66 ||
c >= 0xFE68 && c <= 0xFE6B ||
c >= 0xFF01 && c <= 0xFF60 ||
c >= 0xFFE0 && c <= 0xFFE6;
};
}).call(EditSession.prototype);
require("./edit_session/folding").Folding.call(EditSession.prototype);
require("./edit_session/bracket_match").BracketMatch.call(EditSession.prototype);
config.defineOptions(EditSession.prototype, "session", {
wrap: {
set: function(value) {
if (!value || value == "off")
value = false;
else if (value == "free")
value = true;
else if (value == "printMargin")
value = -1;
else if (typeof value == "string")
value = parseInt(value, 10) || false;
if (this.$wrap == value)
return;
if (!value) {
this.setUseWrapMode(false);
} else {
var col = typeof value == "number" ? value : null;
this.setWrapLimitRange(col, col);
this.setUseWrapMode(true);
}
this.$wrap = value;
},
get: function() {
return this.getUseWrapMode() ? this.getWrapLimitRange().min || "free" : "off";
},
handlesSet: true
},
firstLineNumber: {
set: function() {this._emit("changeBreakpoint");},
initialValue: 1
},
useWorker: {
set: function(useWorker) {
this.$useWorker = useWorker;
this.$stopWorker();
if (useWorker)
this.$startWorker();
},
initialValue: true
},
useSoftTabs: {initialValue: true},
tabSize: {
set: function(tabSize) {
if (isNaN(tabSize) || this.$tabSize === tabSize) return;
this.$modified = true;
this.$rowLengthCache = [];
this.$tabSize = tabSize;
this._emit("changeTabSize");
},
initialValue: 4,
handlesSet: true
},
overwrite: {
set: function(val) {this._emit("changeOverwrite");},
initialValue: false
},
newLineMode: {
set: function(val) {this.doc.setNewLineMode(val)},
get: function() {return this.doc.getNewLineMode()},
handlesSet: true
}
});
exports.EditSession = EditSession;
});
|
<reponame>minuk8932/Algorithm_BaekJoon
package simulation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 10157번: 자리배정
*
* @see https://www.acmicpc.net/problem/10157/
*
*/
public class Boj10157 {
private static final String SPACE = " ";
private static StringBuilder sb = new StringBuilder();
private static boolean isOver = false;
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int C = Integer.parseInt(st.nextToken());
int R = Integer.parseInt(st.nextToken());
int K = Integer.parseInt(br.readLine());
int[][] map = new int[R + 1][C + 1];
if(K <= C * R) { // 배정 가능한 자릿수 내의 K인 경우
boolean north = true, east = false, south = false, west = false;
int val = 1, nCost = 1, eCost = R, sCost = C, wCost = 1;
while(val <= K) {
if(north) { // 북쪽으로 움직이는 경우
for(int i = wCost; i < eCost + 1; i++) { // 행값 고정 열값은 서쪽과 동쪽 시작값에 의해 결정됨
if(map[i][nCost] == 0) {
map[i][nCost] = val; // 인원 배치마다 val+1
val++;
}
}
nCost++; // 북쪽 행 1개를 채웠으므로 +1
north = false; // 북쪽 닫고 동쪽을 열어줌
east = true;
} // 아래도 북쪽과 같이 지속적으로 수행
if(east) {
for(int i = nCost; i < sCost + 1; i++) {
if(map[eCost][i] == 0) {
map[eCost][i] = val;
val++;
}
}
eCost--;
east = false;
south = true;
}
if(south) {
for(int i = eCost; i > wCost - 1; i--) {
if(map[i][sCost] == 0) {
map[i][sCost] = val;
val++;
}
}
sCost--;
north = false;
west = true;
}
if(west) {
for(int i = sCost; i > nCost - 1; i--) {
if(map[wCost][i] == 0) {
map[wCost][i] = val;
val++;
}
}
wCost++;
west = false;
north = true;
}
}
sb = new StringBuilder();
for(int i = 1; i < R + 1; i++) { // K와 같은 값을 가지는 map이 나올경우 버퍼에 행과 열을 역으로 담고
for(int j = 1; j < C + 1; j++) {
if(K == map[i][j]) {
sb.append(j).append(SPACE).append(i);
isOver = true;
break;
}
}
}
}
System.out.println(isOver ? sb.toString() : 0); // 배정 가능한 번호일 경우 결과값 출력
}
}
|
<filename>extern/typed-geometry/src/typed-geometry/functions/objects/volume.hh<gh_stars>0
#pragma once
#include <typed-geometry/detail/operators/ops_vec.hh>
#include <typed-geometry/detail/scalar_traits.hh>
#include <typed-geometry/functions/basic/constants.hh>
#include <typed-geometry/functions/basic/scalar_math.hh>
#include <typed-geometry/functions/vector/length.hh>
#include <typed-geometry/types/objects/aabb.hh>
#include <typed-geometry/types/objects/box.hh>
#include <typed-geometry/types/objects/capsule.hh>
#include <typed-geometry/types/objects/cylinder.hh>
#include <typed-geometry/types/objects/ellipse.hh>
#include <typed-geometry/types/objects/hemisphere.hh>
#include <typed-geometry/types/objects/pyramid.hh>
#include <typed-geometry/types/objects/sphere.hh>
#include <typed-geometry/types/size.hh>
namespace tg
{
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(size<3, ScalarT> const& s)
{
return s.width * s.height * s.depth;
}
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(aabb<3, ScalarT> const& b)
{
return volume_of(size<3, ScalarT>(b.max - b.min));
}
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(box<3, ScalarT> const& b)
{
return ScalarT(8) * sqrt(length_sqr(b.half_extents[0]) * length_sqr(b.half_extents[1]) * length_sqr(b.half_extents[2]));
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr ScalarT volume_of(sphere<3, ScalarT, 3, TraitsT> const& s)
{
return ScalarT(4) / ScalarT(3) * tg::pi_scalar<ScalarT> * pow3(s.radius);
}
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(hemisphere<3, ScalarT> const& h)
{
return ScalarT(2) / ScalarT(3) * tg::pi_scalar<ScalarT> * pow3(h.radius);
}
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(ellipse<3, ScalarT> const& e)
{
return ScalarT(4) / ScalarT(3) * tg::pi_scalar<ScalarT> * length(e.semi_axes[0]) * length(e.semi_axes[1]) * length(e.semi_axes[2]);
}
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(cylinder<3, ScalarT> const& c)
{
return tg::pi_scalar<ScalarT> * pow2(c.radius) * length(c.axis);
}
template <class ScalarT>
[[nodiscard]] constexpr ScalarT volume_of(capsule<3, ScalarT> const& c)
{
return tg::pi_scalar<ScalarT> * pow2(c.radius) * (ScalarT(4) / ScalarT(3) * c.radius + length(c.axis));
}
template <class BaseT>
[[nodiscard]] constexpr typename BaseT::scalar_t volume_of(pyramid<BaseT> const& p)
{
using T = typename BaseT::scalar_t;
return area_of(p.base) * p.height * T(1) / T(3);
}
template <class ObjectT>
[[deprecated("use volume_of")]] [[nodiscard]] constexpr auto volume(ObjectT const& o) -> decltype(volume_of(o))
{
return volume_of(o);
}
} // namespace tg
|
const SECOND = 1000
const MINUTE = SECOND * 60
const HOUR = MINUTE * 60
const DAY = HOUR * 24
const WEEK = DAY * 7
module.exports = {SECOND, MINUTE, HOUR, DAY, WEEK}
|
def longest_consecutive_subarray(array):
longest_length = 0
current_length = 0
Hash = {}
for val in array:
if val not in Hash:
Hash[val] = 1
current_length += 1
if current_length > longest_length:
longest_length = current_length
else:
current_length = 0
return longest_length |
#!/bin/bash
set -e
if [ "$2" = 'run' ]; then
echo "TeamCity DATA is set to" ${TEAMCITY_DATA_PATH}
echo "TeamCity HOME is set to" ${TEAMCITY_HOME}
# we need to set the permissions here because docker mounts volumes as root
echo "Setting up permissions..."
chown -R teamcity:teamcity \
${TEAMCITY_DATA_PATH} \
${TEAMCITY_HOME}
echo "Starting TeamCity..."
HOME=${TEAMCITY_HOME}/bin/ exec gosu teamcity "$@"
# Refer to this repo https://gosu-lang.github.io/
fi
exec "$@"
|
#!/bin/bash
if [ "x$1" = "x" ]; then
fusermount -qu /ospsshfs
else
if [ "x$2" = "x" ]; then
sshfs $1@gruenau.informatik.hu-berlin.de:. /ospsshfs
else
sshfs $1@$2:. /ospsshfs
fi
fi
exit 0
|
#include <iostream>
using namespace std;
int main(){
int matriz[2][3]; // 2 linhas e 3 colunas.
matriz[0][0] = 11; // linha 1 coluna 1 , O c++ começa a contagem do 0.
matriz[0][1] = 12; // linha 1 coluna 2 "
matriz[0][2] = 13; // linha 1 coluna 3 "
matriz[1][0] = 21; // linha 2 coluna 1 "
matriz[1][1] = 22; // linha 2 coluna 2 "
matriz[1][2] = 23; // linha 2 coluna 3 "
for (int i=0 ; i<2; i++){ // impressao da linha
for (int j=0; j<3 ; j++){ // impressao coluna
cout << matriz[i][j] << " ";
}
cout << endl;
}
return 0;
} |
#!/bin/bash
# Check domain/port pairs in a CSV file for upcoming expiry, and notify contacts.
# Usage: ./ssl-expiry-check -f data-file [-s notification-hook]
# CSV format: domain,port,days-threshold,contacts
# Contacts are optional, and must be handled by a separate script.
# Script arguments: ./script contact domain state expiry-time time-remaining
# Assignments to uncomment and place into script:
# CONTACT="${1}" # Contact path
# DOMAIN="${2}" # Domain examined
# STATE="${3}" # Mode. Possible states: "expiring", "expiring SOON", or "expired"
# EXPIRY_TIME="${4}" # Expiry date
# REMAINING="${5}" # Remaining time (human-readable)
# Common message functions.
# Define colours
if [ -t 1 ]; then
BLUE='\033[1;34m'
GREEN='\033[1;32m'
RED='\033[1;31m'
YELLOW='\033[1;93m'
PURPLE='\033[1;95m'
BOLD='\033[1m'
NC='\033[0m' # No Color
fi
error(){
[ -t 1 ] && printf "${RED}"'Error'"${NC}"'['"${GREEN}"'%s'"${NC}"']: %s\n' "$(basename "${0}")" "${@}"
__error_count=$((${__error_count:-0}+1))
}
notice(){
[ -t 1 ] && printf "${BLUE}"'Notice'"${NC}"'['"${GREEN}"'%s'"${NC}"']: %s\n' "$(basename "${0}")" "${@}"
}
success(){
[ -t 1 ] && printf "${GREEN}"'Success'"${NC}"'['"${GREEN}"'%s'"${NC}"']: %s\n' "$(basename "${0}")" "${@}"
__success_count=$((${__success_count:-0}+1))
}
warning(){
[ -t 1 ] && printf "${YELLOW}"'Warning'"${NC}"'['"${GREEN}"'%s'"${NC}"']: %s\n' "$(basename "${0}")" "${@}"
__warning_count=$((${__warning_count:-0}+1))
}
# Time-related Functions
####
__translate_seconds(){
# Translate a time given in seconds (e.g. the difference between two Unix timestamps) to more human-friendly units.
# So far, I've mostly used this in hook functions to give me a display of how long the parent process has lasted.
# Example:
# local __ctime=$(date +%s)
# local __stime=$(stat -c%X /proc/$PPID)
# local __time_output="$(__translate_seconds "$(($__ctime - $__stime))")"
# The optional second argument to this function specifies the format mode.
# Mode and format examples:
# 0: 3 hours, 2 minutes, and 1 second (DEFAULT)
# 1: 3 hours, 2 minutes, 1 second
# 2: 3h 2m 1s
local __num=$1
local __c=0
local __i=0
if [ "${2:-0}" -eq 2 ]; then
# Each "module" should be the unit and the number of that unit until the next phrasing.
local __modules=(s:60 m:60 h:24 d:7 w:52 y:100 c:100)
else
# Each "module" should be a pairing of a name (in plural form),
# the number of that unit until the next phrasing,
# and (optionally) the phrasing of a single unit (in case lopping an 's' off of the end won't cut it)
local __modules=(seconds:60 minutes:60 hours:24 days:7 weeks:52 years:100 centuries:100:century)
fi
local __modules_count="$(wc -w <<< "${__modules[*]}")"
while [ "$__i" -lt "$__modules_count" ]; do
# Cycling through to get values for each unit.
local __value="$(cut -d':' -f2 <<< "${__modules[$__i]}")"
local __mod_value="$(($__num % $__value))"
local __num="$((__num / $__value))"
local __times[$__i]="$__mod_value"
local __c=$(($__c+1))
local __i=$(($__i+1))
if (( ! $__num )); then
break
fi
done
unset __module
local __i=$(($__c-1))
while [ "$__i" -ge "0" ]; do
# Splitting logic for compressed version (mode 2) and
# other phrasings requires much less tangled code.
if [ "${2:-0}" -eq 2 ]; then
# Short, compressed, and space-efficient version.
printf "${__times[$__i]}$(cut -d':' -f1 <<< "${__modules[$__i]}")"
if (( $__i )); then
printf " "
fi
else
# Long version
# Cycling through used units in reverse.
if [ "${2:-0}" -eq 0 ] && (( ! $__i )) && [ "$__c" -gt 1 ]; then
printf "and "
fi
# Handle plural
if [ "${__times[$__i]}" -eq 1 ]; then
# Attempt special singluar unit.
local __s="$(cut -d':' -f3 <<< "${__modules[$__i]}")"
if [ -n "$__s" ]; then
# Singular unit had content.
printf "${__times[$__i]} $__s"
else
# Lop the 's' off of unit plural for singular.
printf "${__times[$__i]} $(cut -d':' -f1 <<< "${__modules[$__i]}" | sed 's/s$//')"
fi
else
# Standard plural.
printf "${__times[$__i]} $(cut -d':' -f1 <<< "${__modules[$__i]}")"
fi
if (( $__i )); then
if [ "$__c" -gt 2 ]; then
# Prepare for the next unit.
# If you aren't a fan of the Oxford comma, then you have some adjusting to do.
printf ", "
else
printf " "
fi
fi
fi
local __i=$(($__i-1))
done
}
# Script Functions
is_file(){
[ -f "${1}" ]
}
is_script(){
[ -f "${1}" ] && [ -x "${1}" ]
}
# Handle Arguments
while getopts "f:s:" OPT $@; do
case "${OPT}" in
"f")
DATA_FILE="${OPTARG}"
;;
"s")
SCRIPT="${OPTARG}"
;;
esac
done
if [ -n "${SCRIPT}" ] && ! is_script "${SCRIPT}"; then
error "$(printf "Not an executable script: ${GREEN}%s${NC}" "${SCRIPT}")"
else
SCRIPT="$(readlink -f "${SCRIPT}")"
fi
if [ -z "${DATA_FILE}" ]; then
error "No data file provided."
elif [ -n "${DATA_FILE}" ] && ! is_file "${DATA_FILE}"; then
error "$(printf "Not an valid data file: ${GREEN}%s${NC}" "${DATA_FILE}")"
fi
(( "${__error_count:-0}" )) && exit 1
TIME_NOW="$(date +%s)"
while read data; do
[ -z "${data}" ] && continue
DOMAIN="$(cut -d',' -f1 <<< "${data}")"
PORT="$(cut -d',' -f2 <<< "${data}")"
DAYS_WARNING="$(cut -d',' -f3 <<< "${data}")"
# If invalid day count (empty or bad formatting), skip line
grep -Pq "^\d+$" <<< "${DAYS_WARNING}" || continue
DAYS_WARNING_SECONDS="$((${DAYS_WARNING} * 24 * 60 * 60))"
# If the expiry time of a certificate is before (less) than this point, raise a warning.
WARNING_THRESHOLD="$((${TIME_NOW}+${DAYS_WARNING_SECONDS}))"
CONTACTS="$(cut -d',' -f4- <<< "${data}" | sed 's/,/ /g')"
# If output is not a terminal, then there must be a script contacts to reach.
[ ! -t 1 ] && ( [ -z "${SCRIPT}" ] || [ -z "${CONTACTS}" ] ) && continue
# Get data from server.
EXPIRY_DATE="$(openssl s_client -connect "${DOMAIN}:${PORT}" 2>/dev/null <<< "" | openssl x509 -noout -dates | grep notAfter | cut -d'=' -f2)"
if [ -z "${EXPIRY_DATE}" ]; then
error "$(printf "Unable to get expiry information for domain: ${BOLD}%s${NC}" "${DOMAIN}")"
continue
fi
EXPIRY_DATE_UNIX="$(date -d "${EXPIRY_DATE}" +%s)"
TIME_REMAINING="$((${EXPIRY_DATE_UNIX}-${TIME_NOW}))"
TIME_REMAINING_READABLE="$(__translate_seconds "$(python -c "print abs(${TIME_REMAINING})")")"
FUNCTION=notice
FUNCTION_COLOUR="${GREEN}"
FUNCTION_WORDING="expiring"
SCRIPT_WORDING="is expiring"
TIME_REMAINING_WORDING="to go"
TIME_REMAINING_SCRIPT="in ${TIME_REMAINING_READABLE}"
if [ "${WARNING_THRESHOLD}" -gt "${EXPIRY_DATE_UNIX}" ]; then
FUNCTION=warning
FUNCTION_COLOUR="${RED}"
SCRIPT_WORDING="is expiring SOON"
elif [ "${TIME_REMAINING}" -lt "0" ]; then
FUNCTION=warning
FUNCTION_COLOUR="${RED}"
FUNCTION_WORDING="has expired"
TIME_REMAINING_WORDING="ago"
SCRIPT_WORDING="expired"
TIME_REMAINING_SCRIPT="${TIME_REMAINING_READABLE} ago"
fi
"${FUNCTION}" "$(printf "Domain ${BOLD}%s${NC} %s at ${FUNCTION_COLOUR}%s${NC}: ${BOLD}%s${NC}" "${DOMAIN}" "${FUNCTION_WORDING}" "${EXPIRY_DATE}" "${TIME_REMAINING_READABLE} ${TIME_REMAINING_WORDING}")"
# Script arguments: ./script contact domain state expiry-time time-remaining
if [ -n "${SCRIPT}" ]; then
for CONTACT in ${CONTACTS}; do
"${SCRIPT}" "${CONTACT}" "${DOMAIN}" "${SCRIPT_WORDING}" "${EXPIRY_DATE}" "${TIME_REMAINING_SCRIPT}"
done
fi
done < "${DATA_FILE}"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.