text
stringlengths 1
1.05M
|
|---|
public static int[] reverseArray(int[] arr) {
int[] reversed = new int[arr.length];
int j = 0;
for (int i = arr.length - 1; i >= 0; i--){
reversed[j] = arr[i];
j += 1;
}
return reversed;
}
|
def sort_list(words):
return sorted(words)
sorted_list = sort_list(['Elephant', 'Cat', 'Dog'])
print(sorted_list)
|
import { join } from 'path';
// @ts-ignore
import reduceCalc from 'reduce-css-calc';
import classnames from './config/classnames';
import defaultScreens from './config/screens';
import defaultTokens from './config/tokens';
import {
IClasses,
IClassesByType,
IConfig,
IEvaluatedClassnames,
IEvaluatedConfig,
IEvaluatedThemes,
IExtractedClass,
IExtractedClasses,
IGlobalTokens,
IToken,
} from './types';
export const allowedPseudoDecorators = [
'hover',
'focus',
'active',
'disabled',
'visited',
'firstChild',
'lastChild',
'oddChild',
'evenChild',
'focusWithin',
];
export const getClassesFromConfig = (
classnameKey: string,
config: IEvaluatedConfig,
getShortName: (labelIndex: number) => string,
) => {
const classname = config.classnames[classnameKey];
return Object.keys(classname.tokens).reduce((aggr, tokenKey, tokenIndex) => {
const id = `${camelToDash(classnameKey)}-${tokenKey}`;
const cssDeriver = config.classnames[classnameKey].css;
return {
...aggr,
[id]: {
id,
classname: classnameKey,
token: tokenKey,
derived: Array.isArray(cssDeriver) ? cssDeriver : null,
variable:
classname.tokens[tokenKey] !== classname.tokensWithoutVariables[tokenKey]
? {
value: classname.tokens[tokenKey].value,
originalValue: classname.tokensWithoutVariables[tokenKey].value,
}
: null,
shortName: getShortName(tokenIndex),
},
};
}, {} as IClasses);
};
export const deepAssign = (
a: { [key: string]: { [key: string]: string } },
b: { [key: string]: { [key: string]: string } },
) => {
Object.keys(b).forEach(key => {
if (!a[key]) {
a[key] = {};
}
Object.keys(b[key]).forEach(subKey => {
a[key][subKey] = b[key][subKey];
});
});
return a;
};
export const evaluateConfig = (config: IConfig): IEvaluatedConfig => {
const originalTokens = Object.keys(defaultTokens).reduce<IGlobalTokens<IToken>>((aggr, key) => {
const toExtract =
config.tokens && (config.tokens as any)[key]
? (config.tokens as any)[key]
: config.tokens
? {}
: (defaultTokens as any)[key];
(aggr as any)[key] = Object.keys(toExtract).reduce<{ [token: string]: IToken }>((subAggr, subKey) => {
subAggr[subKey] =
typeof toExtract[subKey] === 'string'
? {
value: toExtract[subKey],
}
: toExtract[subKey];
return subAggr;
}, {});
return aggr;
}, {} as IGlobalTokens<IToken>);
// Reverse themes lookup to tokens instead
const configThemes = config.themes || {};
const themesByTokens = Object.keys(configThemes).reduce((aggr, themeKey) => {
Object.keys(configThemes[themeKey]).forEach(tokenKey => {
if (!aggr[tokenKey]) {
aggr[tokenKey] = {};
}
Object.keys((configThemes[themeKey] as any)[tokenKey]).forEach(valueKey => {
if (!aggr[tokenKey][valueKey]) {
aggr[tokenKey][valueKey] = {};
}
aggr[tokenKey][valueKey][themeKey] = (configThemes[themeKey] as any)[tokenKey][valueKey];
});
});
return aggr;
}, {} as IEvaluatedThemes);
// Evaluated variables where values are replaced by CSS variable
const tokens = Object.keys(originalTokens).reduce<IGlobalTokens<IToken>>((aggr, key) => {
(aggr as any)[key] = Object.keys((originalTokens as any)[key]).reduce<{ [token: string]: IToken }>(
(subAggr, subKey) => {
subAggr[subKey] = {
...(originalTokens as any)[key][subKey],
value:
themesByTokens[key] && themesByTokens[key][subKey]
? `var(--${key}-${subKey})`
: (originalTokens as any)[key][subKey].value,
};
return subAggr;
},
{},
);
return aggr;
}, {} as IGlobalTokens<IToken>);
// Call any dynamic classname tokens with both the original variables and
// the ones who have been evaluated with CSS variables
const evaluatedClassnames = Object.keys(classnames).reduce((aggr, key) => {
aggr[key] = {
...classnames[key],
tokensWithoutVariables:
typeof (classnames[key] as any).tokens === 'function'
? (classnames[key] as any).tokens(originalTokens, { negative })
: (classnames[key] as any).tokens,
tokens:
typeof (classnames[key] as any).tokens === 'function'
? (classnames[key] as any).tokens(tokens, { negative })
: (classnames[key] as any).tokens,
description: (classnames[key] as any).description,
} as any;
return aggr;
}, {} as IEvaluatedClassnames);
return {
tokens,
screens: config.screens || defaultScreens,
classnames: evaluatedClassnames,
themes: themesByTokens,
themeNames: Object.keys(config.themes || {}),
};
};
export const getUserConfig = () => {
try {
return require(join(process.cwd(), 'classy-ui.config.js'));
} catch (error) {
return {};
}
};
export const createProductionCss = (productionClassesByType: IClassesByType, config: IEvaluatedConfig) => {
let css = Object.keys(productionClassesByType.common).reduce(
(aggr, name) => aggr + productionClassesByType.common[name],
'',
);
const screenKeys = Object.keys(productionClassesByType.screens);
screenKeys.forEach(screen => {
if (productionClassesByType.screens[screen].length) {
const screenCss = productionClassesByType.screens[screen].reduce((aggr, classCss) => {
return aggr + classCss;
}, '');
css += config.screens[screen](screenCss);
}
});
const variableKeys = Object.keys(productionClassesByType.rootTokens);
if (variableKeys.length) {
css += ':root{';
variableKeys.forEach(key => {
css += `${key}:${productionClassesByType.rootTokens[key]};`;
});
css += '}';
}
Object.keys(productionClassesByType.themeTokens).forEach(theme => {
const variables = Object.keys(productionClassesByType.themeTokens[theme]).reduce(
(aggr, variableKey) => `${aggr}${productionClassesByType.themeTokens[theme][variableKey]}`,
'',
);
css += `.themes-${theme}{${variables}}`;
});
return css;
};
export const camelToDash = (string: string) => {
return string
.replace(/[\w]([A-Z])/g, function(m) {
return m[0] + '-' + m[1];
})
.toLowerCase();
};
export const createClassEntry = (name: string, decorators: string[], css: (name: string) => string) => {
const groupDecorators = decorators
.filter(decorator => decorator.startsWith('group') && decorator !== 'group')
.map(decorator => camelToDash(decorator.substr(5)));
const pseudoDecorators = decorators
.filter(decorator => allowedPseudoDecorators.includes(decorator))
.map(decorator => camelToDash(decorator));
const evaluatedName = `.${name.replace(/\:/g, '\\:')}${
pseudoDecorators.length ? `:${pseudoDecorators.join(':')}` : ''
}`;
return `${groupDecorators.length ? `.group:${groupDecorators.join(':')} ` : ''}${css(evaluatedName)}`;
};
export const flat = (array: any[]) => array.reduce((aggr, item) => aggr.concat(item), []);
export const injectProduction = (
productionClassesByType: IClassesByType,
classCollection: IExtractedClasses,
classes: IClasses,
config: IEvaluatedConfig,
) => {
Object.keys(classCollection).forEach(uid => {
const extractedClass = classCollection[uid];
const configClass = classes[extractedClass.id as string];
const screenDecorators = extractedClass.decorators.filter(decorator => decorator in config.screens);
const otherDecorators = extractedClass.decorators.filter(decorator => !(decorator in config.screens));
let classEntry: any;
try {
const prefix = extractedClass.name.substr(0, extractedClass.name.lastIndexOf(':') + 1);
const cssProcessor = config.classnames[configClass.classname].css;
// The classname definition might references other CSS processors, we extract the base classname for lookups
const classnameKeys = Array.isArray(cssProcessor) ? cssProcessor : [configClass.classname];
classnameKeys.forEach(classnameKey => {
const classConfig = config.classnames[classnameKey];
const id = `${camelToDash(classnameKey)}-${configClass.token}`;
const name = classes[id].shortName;
const classname = prefix + name;
classEntry = createClassEntry(classname, otherDecorators, evaluatedName =>
(classConfig.css as any)(evaluatedName, classConfig.tokens[configClass.token].value),
);
if (screenDecorators.length) {
screenDecorators.forEach(screen => {
productionClassesByType.screens[screen] = productionClassesByType.screens[screen] || [];
productionClassesByType.screens[screen].push(classEntry);
});
} else {
productionClassesByType.common[classname] = classEntry;
}
if (configClass.variable) {
const themes = config.themes || {};
const variableValue = configClass.variable.value;
const originalValue = configClass.variable.originalValue;
const variables = (variableValue.match(/var\(.*\)/) || []).map(varString =>
varString.replace(/var\(|\)/g, ''),
);
config.themeNames.forEach(theme => {
productionClassesByType.themeTokens[theme] = productionClassesByType.themeTokens[theme] || {};
variables.forEach(variable => {
const variableParts = variable.substr(2).split('-');
const variableKey = variableParts.shift() as string;
const variableValueKey = variableParts.join('-');
productionClassesByType.themeTokens[theme][
variable
] = `${variable}:${themes[variableKey][variableValueKey][theme]};`;
productionClassesByType.rootTokens[variable] = originalValue;
});
});
}
});
} catch (error) {
throw new Error(uid + JSON.stringify(extractedClass, null, 2));
}
});
return productionClassesByType;
};
export const injectDevelopment = (classCollection: IExtractedClasses, classes: IClasses, config: IEvaluatedConfig) => {
return Object.keys(classCollection).reduce((aggr, uid) => {
const extractedClass = classCollection[uid];
const mainId = extractedClass.id as string;
const screenDecorators = extractedClass.decorators.filter(decorator => decorator in config.screens);
const otherDecorators = extractedClass.decorators.filter(decorator => !(decorator in config.screens));
const prefix = extractedClass.name.substr(0, extractedClass.name.lastIndexOf(':') + 1);
const configClass = classes[mainId];
const cssProcessor = config.classnames[configClass.classname].css;
// The classname definition might references other CSS processors, we extract the base classname for lookups
const classnameKeys = Array.isArray(cssProcessor) ? cssProcessor : [configClass.classname];
return aggr.concat(
classnameKeys.reduce((injections, classnameKey) => {
const classConfig = config.classnames[classnameKey];
const name = `${camelToDash(classnameKey)}__${configClass.token}`;
const classname = prefix + name;
const classEntry = createClassEntry(classname, otherDecorators, evaluatedName =>
(classConfig.css as any)(evaluatedName, classConfig.tokens[configClass.token].value),
);
let css = '';
if (screenDecorators.length) {
screenDecorators.forEach(screen => {
css += config.screens[screen](classEntry);
});
} else {
css = classEntry;
}
if (configClass.variable) {
const themes = config.themes || {};
const variableValue = configClass.variable.value;
const originalValue = configClass.variable.originalValue;
const variables = (variableValue.match(/var\(.*\)/) || []).map(varString =>
varString.replace(/var\(|\)/g, ''),
);
variables.forEach(variable => {
const variableParts = variable.substr(2).split('-');
const variableKey = variableParts.shift() as string;
const variableValueKey = variableParts.join('-');
config.themeNames.forEach(theme => {
css += `:root{${variable}:${originalValue};}\n.themes-${theme}{${variable}:${themes[variableKey][variableValueKey][theme]};}`;
});
});
}
return injections.concat([classname, css]);
}, [] as string[]),
);
}, [] as string[]);
};
export const negative = (scale: { [key: string]: IToken }) => {
return Object.keys(scale)
.filter(key => scale[key].value !== '0')
.reduce(
(negativeScale, key) => ({
...negativeScale,
[`NEG_${key}`]: {
...scale[key],
value: negateValue(scale[key].value),
},
}),
{},
);
};
export const negateValue = (value: string) => {
try {
return reduceCalc(`calc(${value} * -1)`);
} catch (e) {
return value;
}
};
export const createClassObject = (
{
baseClass,
token,
decorators,
}: {
baseClass: string;
token: string;
decorators: string[];
},
classes: IClasses,
isProduction: boolean,
): IExtractedClass => {
const id = `${camelToDash(baseClass)}-${token}`;
const uid = [decorators.sort().join(':'), `${camelToDash(baseClass)}__${token}`]
.filter(Boolean)
.filter(i => i!.length > 0)
.join(':');
const returnedDecorators = decorators.slice() as IExtractedClass['decorators'];
let name: string;
if (id && isProduction && classes[id].derived) {
name = classes[id]
.derived!.reduce((aggr, key) => {
return aggr.concat(classes[`${camelToDash(key)}-${token}`].shortName);
}, [] as string[])
.join(' ');
} else if (id && isProduction) {
name = classes[id].shortName;
} else if (id && classes[id].derived) {
name = classes[id]
.derived!.reduce((aggr, key) => {
return aggr.concat(`${camelToDash(key)}__${token}`);
}, [] as string[])
.join(' ');
} else {
name = uid;
}
return {
id,
uid,
name,
decorators: returnedDecorators,
};
};
export const generateShortName = (num: number) => {
const baseChar = 'A'.charCodeAt(0);
let letters = '';
do {
num -= 1;
letters = String.fromCharCode(baseChar + (num % 26)) + letters;
num = (num / 26) >> 0;
} while (num > 0);
return letters;
};
export const hyphenToCamelCase = (str: string) =>
str.replace(/-([a-z])/g, function(g) {
return g[1].toUpperCase();
});
|
package com.nameless.bank.web.forms;
import java.util.Collection;
/**
* Created by Глеб on 06.04.2016.
*/
public class TransactFrameForm {
private int fromId;
private String fromName;
private Collection to;
private int sum;
public int getFromId() {
return fromId;
}
public void setFromId(int fromId) {
this.fromId = fromId;
}
public String getFromName() {
return fromName;
}
public void setFromName(String fromName) {
this.fromName = fromName;
}
public Collection getTo() {
return to;
}
public void setTo(Collection to) {
this.to = to;
}
public int getSum() {
return sum;
}
public void setSum(int sum) {
this.sum = sum;
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/BHUD/BHUDImagesBundle.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/BHUD/BHUDImagesBundle.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
<gh_stars>10-100
var namespacedroid_1_1_runtime_1_1_prototyping_1_1_sensors_1_1_occupancy =
[
[ "Occupancy3dSensor", "classdroid_1_1_runtime_1_1_prototyping_1_1_sensors_1_1_occupancy_1_1_occupancy3d_sensor.html", "classdroid_1_1_runtime_1_1_prototyping_1_1_sensors_1_1_occupancy_1_1_occupancy3d_sensor" ]
];
|
# store the array
nums = [2, 4, 5, 7, 8, 9, 10]
# initialize a new array to store the odd numbers
odd_nums = []
# loop through the array
for num in nums:
# if the number is odd
if num % 2 == 1:
# add it to the new array
odd_nums.append(num)
# print the new array
print("Odd numbers array:", odd_nums)
|
package com.bypassmobile.octo.model;
import java.util.ArrayList;
import java.util.List;
public class DataWrapper <T> {
public enum Status {
NONE,
LOADING,
ERROR
}
private Status status;
private List<T> dataList;
public DataWrapper () {
status = Status.NONE;
dataList = new ArrayList<>();
}
public Status getStatus () {
return status;
}
public void setStatus (final Status status) {
this.status = status;
}
public List<T> getDataList () {
return dataList;
}
public void setData (final List<T> dataList) {
this.dataList = dataList;
}
}
|
<filename>apkanalyser/src/andreflect/gui/action/injection/DalvikMethodFieldReadAction.java
/*
* Copyright (C) 2012 Sony Mobile Communications AB
*
* This file is part of ApkAnalyser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package andreflect.gui.action.injection;
import java.awt.event.ActionEvent;
import java.util.Iterator;
import javax.swing.Icon;
import jerl.bcm.inj.Injection;
import mereflect.MEMethod;
import org.jf.dexlib.CodeItem;
import org.jf.dexlib.FieldIdItem;
import org.jf.dexlib.Code.Instruction;
import org.jf.dexlib.Code.InstructionWithReference;
import analyser.gui.MainFrame;
import analyser.gui.Selection;
import analyser.gui.TextBuilder;
import analyser.gui.actions.bytecodemod.AbstractTreeBytecodeModAction;
import analyser.logic.BytecodeModificationMediator;
import analyser.logic.RefContext;
import analyser.logic.RefField;
import analyser.logic.RefFieldAccess;
import analyser.logic.RefMethod;
import analyser.logic.Reference;
import andreflect.ApkClassContext;
import andreflect.DexField;
import andreflect.DexMethod;
import andreflect.Util;
import andreflect.injection.impl.DalvikMethodField;
public class DalvikMethodFieldReadAction extends AbstractTreeBytecodeModAction {
private static final long serialVersionUID = 8065552041163293845L;
protected static DalvikMethodFieldReadAction m_inst = null;
protected static DalvikMethodFieldReadAction m_inst_onefield = null;
int traverseCount, traverseIndex = 0;
FieldIdItem fieldIdItem;
public static DalvikMethodFieldReadAction getInstance(MainFrame mainFrame)
{
if (m_inst == null)
{
m_inst = new DalvikMethodFieldReadAction("Print reading all fields", null);
m_inst.setMainFrame(mainFrame);
}
return m_inst;
}
public static DalvikMethodFieldReadAction getInstanceOneField(MainFrame mainFrame)
{
if (m_inst_onefield == null)
{
m_inst_onefield = new DalvikMethodFieldReadAction("Print reading this field", null);
m_inst_onefield.setMainFrame(mainFrame);
}
return m_inst_onefield;
}
protected DalvikMethodFieldReadAction(String arg0, Icon arg1)
{
super(arg0, arg1);
}
@Override
public void run(ActionEvent e) throws Throwable {
if (Selection.getSelectedView() instanceof TextBuilder) {
} else {
Object ref = Selection.getSelectedObject();
if (ref != null && ref instanceof RefField) {
RefField rf = (RefField) ref;
Iterator<Reference> i = rf.getChildren().iterator();
while (i.hasNext()) {
RefFieldAccess access = (RefFieldAccess) i.next();
fieldIdItem = access.getAccess().fieldIdItem;
if (access.getAccess().isRead) {
createInjection(access.getAccess().method);
}
}
return;
}
}
super.run(e);
}
@Override
protected void modify(MEMethod method) throws Throwable {
fieldIdItem = Util.getFieldIdItem();
if (fieldIdItem == null) {
if (method.isAbstract()) {
return;
}
DexMethod dexMethod = (DexMethod) method;
if (hasReadField(dexMethod, fieldIdItem)) {
createInjection(dexMethod);
}
} else {
Object ref = Selection.getSelectedView();
TextBuilder tb = (TextBuilder) ref;
Object lineRef = tb.getLineBuilder().getReference(tb.getCurrentLine());
ApkClassContext apkContext = (ApkClassContext) (((DexField) lineRef).getDexClass().getResource().getContext());
Iterator<Reference> i = MainFrame.getInstance().getResolver().getMidletResources().iterator();
RefContext refContext = null;
while (i.hasNext()) {
Object obj = i.next();
if (obj instanceof RefContext
&& ((RefContext) obj).getContext() == apkContext) {
refContext = (RefContext) obj;
break;
}
}
if (refContext != null) {
traverseCount = 0;
traverseIndex = 0;
getTraverseCount(refContext);
if (traverseCount != 0) {
traverseInside(refContext);
}
}
}
}
private void createInjection(DexMethod method) {
DalvikMethodField fieldInjection = new DalvikMethodField(getMethodSignature(method),
method.getMEClass().getName() + ":" + getMethodSignature(method),
fieldIdItem,
true);
BytecodeModificationMediator.getInstance().registerModification(
method.getMEClass().getResource().getContext(),
method.getMEClass(),
fieldInjection,
method);
((MainFrame) getMainFrame()).getMidletTree().findAndMarkNode(method, Reference.MODIFIED);
}
protected void getTraverseCount(Reference ref) throws Throwable {
if (ref instanceof RefMethod) {
traverseCount++;
} else {
Iterator<Reference> i = ref.getChildren().iterator();
while (i.hasNext()) {
getTraverseCount(i.next());
}
}
}
protected void traverseInside(Reference ref) throws Throwable {
if (ref instanceof RefMethod) {
if (hasReadField((DexMethod) ((RefMethod) ref).getMethod(), fieldIdItem)) {
createInjection((DexMethod) ((RefMethod) ref).getMethod());
}
getMainFrame().actionReportWork(this, 100 * traverseIndex++ / traverseCount);
} else {
Iterator<Reference> i = ref.getChildren().iterator();
while (i.hasNext()) {
traverseInside(i.next());
}
}
}
@Override
protected Injection getInjection(String className, String methodSignature) {
//not used
return null;
}
public boolean hasReadField(DexMethod method, FieldIdItem fieldIdItem) {
boolean ret = false;
CodeItem codeItem = method.getEncodedMethod().codeItem;
if (codeItem != null) {
Instruction[] instructions = method.getEncodedMethod().codeItem.getInstructions();
for (Instruction instruction : instructions) {
switch (instruction.deodexedInstruction.opcode) {
case IGET:
case IGET_WIDE:
case IGET_OBJECT:
case IGET_BOOLEAN:
case IGET_BYTE:
case IGET_CHAR:
case IGET_SHORT:
case SGET:
case SGET_WIDE:
case SGET_OBJECT:
case SGET_BOOLEAN:
case SGET_BYTE:
case SGET_CHAR:
case SGET_SHORT:
if (fieldIdItem == null
|| ((InstructionWithReference) instruction).getReferencedItem() == fieldIdItem) {
ret = true;
}
break;
/*
case IGET_QUICK:
case IGET_WIDE_QUICK:
case IGET_OBJECT_QUICK:
//for gingerbread
case IGET_VOLATILE:
case IGET_WIDE_VOLATILE:
case IGET_OBJECT_VOLATILE:
case SGET_VOLATILE:
case SGET_WIDE_VOLATILE:
case SGET_OBJECT_VOLATILE:
Instruction deodexedIns = method.getDeodexedInstruction(instruction);
if (fieldIdItem == null
||((InstructionWithReference)deodexedIns).getReferencedItem() == fieldIdItem){
ret = true;
}
break;
*/
}
if (ret == true) {
break;
}
}
}
return ret;
}
}
|
#!/usr/bin/bash
VERSION=`cat version`
echo "preparing pwa-admin version $VERSION"
rm -rf pwa-admin/tmp
mkdir pwa-admin/tmp
cp -r ../../api pwa-admin/tmp
rm -f pwa-admin/tmp/api/config.js
rm -f pwa-admin/tmp/api/auth.pub
rm -f pwa-admin/tmp/api/auth.key
rm -f pwa-admin/tmp/api/user.jwt
cp -r ../../ui pwa-admin/tmp
cp -r ../../package.json pwa-admin/tmp
rm -rf pwa-admin/tmp/api/config
rm -rf pwa/admin/tmp/ui/node_modules/bootstrap/dist/css
rm -f pwa-admin/tmp/api/auth.pub
rm -f pwa-admin/tmp/api/auth.key
rm -f pwa-admin/tmp/api/user.jwt
docker build pwa-admin -t perfsonar/pwa-admin:$VERSION --no-cache --force-rm
if [ ! $? -eq 0 ]; then
echo "failed to build"
exit
fi
#docker tag perfsonar/pwa-admin perfsonar/pwa-admin:$VERSION
docker push perfsonar/pwa-admin:$VERSION
#docker tag perfsonar/pwa-admin perfsonar/pwa-admin:latest
#docker push perfsonar/pwa-admin:latest
|
<?php
$weekday = date('w');
if($weekday == '0' || $weekday == '6') {
echo 'It\'s the weekend, go and have fun!';
} else {
echo 'It\'s a weekday, time to work!';
}
?>
|
import React from "react";
import _ from "lodash";
import countries from "../../Config/countries";
import ReactCountryFlag from "react-country-flag";
import { isWindows } from "../../Utils/device";
const emojiSupport = require("../../Helpers/detectEmojiSupport");
const emojiSupported = emojiSupport();
export default ({ locationDetails }) => {
if (locationDetails && emojiSupported && !isWindows()) {
let { terms } = locationDetails;
let countryName = _.last(terms);
if (countryName) {
let country = _.find(
countries,
(c) => c.name.toLowerCase() === countryName.value.toLowerCase()
);
if (country) {
return (
<ReactCountryFlag
countryCode={country.countryCode}
// svg
style={{
fontSize: "1em",
lineHeight: "1em"
}}
/>
);
}
}
}
return null;
};
|
package Chapter3_2Low;
import edu.princeton.cs.algs4.BST;
import edu.princeton.cs.algs4.StdIn;
public class TestBST {
//Exercise 3.2.10
public static void main(String[] args) {
BST<String, Integer> bst = new BST<>();
for (int i = 0; !StdIn.isEmpty(); i++) {
String key = StdIn.readString();
bst.put(key, i);
}
for (String s : bst.keys()) {
System.out.println(s + " " + bst.get(s));
}
System.out.println("min(): " + bst.min());
System.out.println("max(): " + bst.max());
System.out.println("floor(\"F\"): " + bst.floor("F"));
System.out.println("ceiling(\"C\"): " + bst.ceiling("C"));
System.out.println("select(1): " + bst.select(1));
System.out.println("rank(\"R\"): " + bst.rank("R"));
System.out.println("delete(\"D\"): ");
bst.delete("D");
for (String s : bst.keys()) {
System.out.println(s + " " + bst.get(s));
}
System.out.println("deleteMin(): ");
bst.deleteMin();
for (String s : bst.keys()) {
System.out.println(s + " " + bst.get(s));
}
System.out.println("deleteMax(): ");
bst.deleteMax();
for (String s : bst.keys()) {
System.out.println(s + " " + bst.get(s));
}
}
}
|
/**
* resolve to absolute for external urls, relative for same domain
* @param {string} path
* @param {string} from
* @returns {string}
*/
export function resolve(path, from) {
if (path.match(/^['"]?data:/)) {
return path;
}
const baseURL = new URL(from, window.location);
const pathURL = new URL(path, baseURL);
if (baseURL.protocol != pathURL.protocol ||
baseURL.host != pathURL.host ||
pathURL.host != window.location.host ||
baseURL.port != pathURL.port ||
pathURL.port != window.location.port ||
baseURL.port != pathURL.port ||
pathURL.protocol != window.location.protocol
) {
return pathURL.toString();
}
return pathURL.pathname + pathURL.search + pathURL.hash;
}
|
<filename>mcu_source/Libraries/utilities/chusb/inc/usbd_cp210x.h<gh_stars>1-10
#ifndef __USBD_CP210X_SERIAL_H_
#define __USBD_CP210X_SERIAL_H_
#include <stdint.h>
#include <usbd.h>
#include <usb_common.h>
/* Config request codes */
#define CP210X_IFC_ENABLE 0x00
#define CP210X_SET_BAUDDIV 0x01
#define CP210X_GET_BAUDDIV 0x02
#define CP210X_SET_LINE_CTL 0x03
#define CP210X_GET_LINE_CTL 0x04
#define CP210X_SET_BREAK 0x05
#define CP210X_IMM_CHAR 0x06
#define CP210X_SET_MHS 0x07
#define CP210X_GET_MDMSTS 0x08
#define CP210X_SET_XON 0x09
#define CP210X_SET_XOFF 0x0A
#define CP210X_SET_EVENTMASK 0x0B
#define CP210X_GET_EVENTMASK 0x0C
#define CP210X_SET_CHAR 0x0D
#define CP210X_GET_CHARS 0x0E
#define CP210X_GET_PROPS 0x0F
#define CP210X_GET_COMM_STATUS 0x10
#define CP210X_RESET 0x11
#define CP210X_PURGE 0x12
#define CP210X_SET_FLOW 0x13
#define CP210X_GET_FLOW 0x14
#define CP210X_EMBED_EVENTS 0x15
#define CP210X_GET_EVENTSTATE 0x16
#define CP210X_SET_CHARS 0x19
#define CP210X_GET_BAUDRATE 0x1D
#define CP210X_SET_BAUDRATE 0x1E
#define CP210X_VENDOR_SPECIFIC 0xFF
typedef struct
{
uint16_t wLength;
uint16_t bcdVersion;
uint32_t ulServiceMask;
uint32_t _reserved8;
uint32_t ulMaxTxQueue;
uint32_t ulMaxRxQueue;
uint32_t ulMaxBaud;
uint32_t ulProvSubType;
uint32_t ulProvCapabilities;
uint32_t ulSettableParams;
uint32_t ulSettableBaud;
uint16_t wSettableData;
uint16_t _reserved42;
uint32_t ulCurrentTxQueue;
uint32_t ulCurrentRxQueue;
uint32_t _reserved52;
uint32_t _reserved56;
uint16_t uniProvName[15];
}cp210x_cpr_t;
__packed typedef struct
{
uint32_t ulErrors;
uint32_t ulHoldReasons;
uint32_t ulAmountInInQueue;
uint32_t ulAmountInOutQueue;
uint8_t bEofReceived;
uint8_t bWaitForImmediate;
uint8_t bReserved;
} cp210x_ssr_t;
struct usbd_cp210x_callback_t
{
uint32_t (*get_line_coding)(struct ucdc_line_coding *line_coding);
uint32_t (*set_line_coding)(struct ucdc_line_coding *line_coding);
uint32_t (*set_control_line_serial_state)(uint8_t val);
uint32_t (*recv_handler)(uint8_t *buf, uint32_t len);
uint32_t (*send_notify)(void);
};
void usbd_vsc_cp210x_init(struct usbd_t *h);
#endif
|
package types
type ReviewDTO struct{
Info string `json:"info"`
Stars string `json:"stars"`
}
|
. "${BASH_LIB_DIR}/test-utils/bats-support/load.bash"
. "${BASH_LIB_DIR}/test-utils/bats-assert-1/load.bash"
. "${BASH_LIB_DIR}/init"
docker_safe_tmp(){
# neither mktemp -d not $BATS_TMPDIR
# produce dirs that docker can mount from
# in macos.
local -r tmp_dir="/tmp/${RANDOM}/spgs"
(
rm -rf "${tmp_dir}"
mkdir -p "${tmp_dir}"
) 1>&2
echo "${tmp_dir}"
}
@test "shellcheck notices compile error" {
tmp_dir="$(docker_safe_tmp)"
spushd "${tmp_dir}"
echo "'" > bad_script
run shellcheck_script bad_script
assert_failure
assert_output --partial "syntax error"
spopd
rm -rf "/tmp/${tmp_dir#/tmp/}"
}
@test "shellcheck passes good script" {
tmp_dir="$(docker_safe_tmp)"
spushd "${tmp_dir}"
echo -e "#!/bin/bash\n:" > good_script
run shellcheck_script good_script
rm -rf "${tmp_dir}"
assert_output --partial "Checking good_script"
assert_success
spopd
rm -rf "/tmp/${tmp_dir#/tmp/}"
}
@test "find_scripts finds git tracked files containing bash shebang" {
tmp_dir="${BATS_TMPDIR}/ffgtfwse"
rm -rf "${tmp_dir}"
mkdir -p "${tmp_dir}"
pushd ${tmp_dir}
git init
git config user.email "ci@ci.ci"
git config user.name "Jenkins"
echo '#!/bin/bash' > a
echo '#!/bin/bash' > b
date > c
date > d
git add a c
git commit -a -m "initial"
run find_scripts
assert_output "a"
assert_success
popd
}
@test "tap2junit correctly converts test file" {
rc=0
fdir="${BASH_LIB_DIR}/tests-for-this-repo/fixtures/test-utils"
# Can't use run / assert_output here
# because assert_output uses $output
# which is a combination of stdout and stderr
# and we are only interested in stdout.
stdout=$(tap2junit < "${fdir}/tap2junit.in")
rc=${?}
assert_equal "${stdout}" "$(cat ${fdir}/tap2junit.out)"
assert_equal "${rc}" "0"
}
|
<reponame>thelegendoflinas/ImageEditor
package com.createchance.imageeditor.shaders;
import android.opengl.GLES20;
/**
* Color phase transition shader.
*
* @author createchance
* @date 2018/12/31
*/
public class ColorPhaseTransShader extends TransitionMainFragmentShader {
private final String TRANS_SHADER = "colorphase.glsl";
private final String U_FROM_STEP = "fromStep";
private final String U_TO_STEP = "toStep";
public ColorPhaseTransShader() {
initShader(new String[]{TRANSITION_FOLDER + BASE_SHADER, TRANSITION_FOLDER + TRANS_SHADER}, GLES20.GL_FRAGMENT_SHADER);
}
@Override
public void initLocation(int programId) {
super.initLocation(programId);
addLocation(U_FROM_STEP, true);
addLocation(U_TO_STEP, true);
loadLocation(programId);
}
public void setUFromStep(float red, float green, float blue, float alpha) {
setUniform(U_FROM_STEP, red, green, blue, alpha);
}
public void setUToStep(float red, float green, float blue, float alpha) {
setUniform(U_TO_STEP, red, green, blue, alpha);
}
}
|
def longest_increasing_sequence(arr):
# Initialize longest sequence length to 1
longest_sequence_length = 1
# Initialize current sequence length to 1
current_sequence_length = 1
# Traverse the array
for i in range(len(arr) - 1):
if (arr[i] < arr[i + 1]):
# Increment the current sequence length
current_sequence_length += 1
# Update the longest sequence length
if (current_sequence_length > longest_sequence_length):
longest_sequence_length = current_sequence_length
else:
# Reset the current sequence length
# if current element is not smaller
# than next
current_sequence_length = 1
return longest_sequence_length
# Test
sequence_length = longest_sequence_length(arr)
print(sequence_length) # 3
|
package ethanjones.mc.inventorybook.handler;
import baubles.api.BaublesApi;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TextComponentString;
public class ModInventoryHandlers {
public static class BaublesHandler extends PageHandler.IInventoryHandler {
@Override
public IInventory getIInventory(EntityPlayer entityPlayer) {
return BaublesApi.getBaubles(entityPlayer);
}
@Override
public ITextComponent title(EntityPlayer obj) {
return new TextComponentString("Baubles");
}
}
public static class BagginsesHandler extends PageHandler<ItemStack> {
@Override
public boolean valid(ItemStack obj) {
String n = Item.itemRegistry.getNameForObject(obj.getItem()).toString().toLowerCase();
if (!n.startsWith("bagginses:") || n.equals("bagginses:void") || n.equals("bagginses:ender")) return false;
return obj.getTagCompound() != null && obj.getTagCompound().hasKey("Items");
}
@Override
public ItemStack itemStack(ItemStack obj, int i) {
NBTTagList items = obj.getTagCompound().getTagList("Items", 10);
return ItemStack.loadItemStackFromNBT(items.getCompoundTagAt(i));
}
@Override
public int itemStacksLength(ItemStack obj) {
NBTTagList items = obj.getTagCompound().getTagList("Items", 10);
return items.tagCount();
}
@Override
public ITextComponent title(ItemStack obj) {
return getItemStackComponent(obj, null, false, false);
}
}
}
|
<gh_stars>0
#include<iostream.h>
#include<conio.h>
#include<iomanip.h>
int main()
{
const double a = 789.1234;
cout.setf(ios::showpos);
cout.setf(ios::fixed);
cout<<setprecision(1)<<a<<"\n";
cout<<setprecision(2)<<a<<"\n";
cout<<setprecision(3)<<a<<"\n";
cout<<setprecision(4)<<a<<"\n";
cout.setf(ios::showpoint);
cout<<setprecision(5)<<a<<"\n";
cout<<setprecision(6)<<a<<"\n";
cout<<setprecision(7)<<a<<"\n";
getch();
return 0;
}
|
<reponame>Nedson202/Harvard-arts
import { Request, Response, NextFunction } from 'express';
import { stackLogger } from 'info-logger';
const errorHandler = (error, req: Request, res: Response, next: NextFunction) => {
stackLogger(error);
return res.sendStatus(error.httpStatusCode).json({
error: true,
message: error.message,
});
};
export default errorHandler;
|
#!/bin/sh
g++ -g -O0 vector_add.cpp -lOpenCL
|
# Copyright 2017 BBVA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from apitest.helpers.fuzzer import build_fuzzed_method
def test_http_methods_case_unexpected(make_requests):
# Get different method that original request
method = ""
while not method and method != "{{ method }}":
method = build_fuzzed_method()
response, original, _ = make_requests("{{ url }}",
method=method,
build_fuzzed_response=False)
assert response.status_code in (500, 405, 404)
def test_http_methods_case_invalid(make_requests):
response, original, _ = make_requests("{{ url }}",
method="POTATO",
build_fuzzed_response=False)
assert response.status_code in (500, 405)
def test_http_methods_case_dangerous(make_requests):
response, original, _ = make_requests("{{ url }}",
method="DELETE",
build_fuzzed_response=False)
assert response.status_code != 200
def test_http_methods_case_trace(make_requests):
response, original, _ = make_requests("{{ url }}",
method="TRACE",
build_fuzzed_response=False)
assert response.status_code != 200
|
def get_maximum_value(lst):
return max(lst)
print(get_maximum_value([1, 5, 2, 9, -2]))
|
#!/bin/bash
set -euo pipefail
echo "NOTE: Expected first time run time is under 5 minutes,"
echo "repeat runs under a minute to just to regenerate reports."
echo
mkdir -p intermediate/ summary/
# Takes arguments via variable names
function import_marker {
echo "Trimming $GENE sequences for $MARKER"
export RIGHT_RC=`python -c "from Bio.Seq import reverse_complement as rc; print(rc('$RIGHT'))"`
# Doing the left and right primer trimming separately:
cutadapt --quiet -g $LEFT $GENE.fasta \
| cutadapt --quiet -a $RIGHT_RC -o $MARKER.fasta /dev/stdin
echo "Adding $GENE $MARKER to database"
thapbi_pict import -d $DB -i $MARKER.fasta -x \
-k $MARKER --left $LEFT --right $RIGHT
}
# Takes arguments via variable names
function analyse {
echo "Running analysis with minimum abundance threshold ten"
# No threshold (-a 0 or -a 1) gives 450k total unique entries over samples
# Using minimum of 2 gives 75k unique, 5 gives 22k, and 10 gives 8.8k unique.
# Using minimum of 100 (default) gives under 800 unique over samples.
# [Counts were over both amplicons using the actual primer pairs, 3 runs]
mkdir -p intermediate/${LIBRARY}/
for METHOD in identity onebp blast; do
thapbi_pict pipeline -d $DB -m $METHOD \
-i raw_data/$LIBRARY/ expected/$LIBRARY/ \
-s intermediate/${LIBRARY}/ \
-o summary/${LIBRARY} -a 10 \
-t metadata_$LIBRARY.tsv -c 5,6,7,3,4,2 -x 1 -g 6
done
echo "$LIBRARY done"
}
# Takes arguments via variable names
function edit_graph {
# Now run an edit-graph at a higher abundance threshold
# (works as long as pipeline or prepare-reads was run with
# the same or lower threshold).
# Including relevant DB entries with -k / --marker argument
# Do not show the classifier output using -m with "-"
thapbi_pict edit-graph -d $DB -k $MARKER \
-i intermediate/${LIBRARY}/${MARKER}/ -a 75 -m - \
-o summary/${LIBRARY}.${MARKER}.edit-graph.a75.xgmml
}
DB=fungi_solo.sqlite
if [ ! -f $DB ]; then
echo ==================================================
echo Creating database - ITS1 - BITS/B58S3 primers only
echo ==================================================
GENE=ITS1
MARKER=BITS-B58S3
LEFT=ACCTGCGGARGGATC
RIGHT=GAGATCCRTTGYTRAAAGTT
import_marker # call function above
fi
echo =====================================================
echo Amplicon library one - ITS1 - BITS/B58S3 primers only
echo =====================================================
LIBRARY=AL1
analyse # call function above
MARKER=BITS-B58S3
edit_graph # call function abover
echo ================================================
echo Amplicon library two - ITS1 - BITS/B58S3 primers
echo ================================================
echo Note: This is a blinkered view of this dataset,
echo really used ITS1f/ITS2 primers which amplify a
echo a larger fragment, see below.
LIBRARY=AL2
analyse # call function above
MARKER=BITS-B58S3
edit_graph # call function abover
DB=fungi_duo.sqlite
if [ ! -f $DB ]; then
echo =================================
echo Creating database - ITS1 and ITS2
echo =================================
GENE=ITS1
MARKER=ITS1f-ITS2
LEFT=CTTGGTCATTTAGAGGAAGTAA
RIGHT=GCTGCGTTCTTCATCGATGC
import_marker # call function above
GENE=ITS2
LIBRARY=AL2
MARKER=ITS3-KYO2-ITS4-KYO3
LEFT=GATGAAGAACGYAGYRAA
RIGHT=CTBTTVCCKCTTCACTCG
import_marker # call function above
fi
echo ================================================
echo Amplicon library two, with two primers products:
echo ITS1 - ITS1f/ITS2 primers
echo ITS2 - ITS3-KYO2/ITS4-KYO3 primers
echo ================================================
LIBRARY=AL2
analyse # call function above
for MARKER in ITS1f-ITS2 ITS3-KYO2-ITS4-KYO3; do
edit_graph # call function above
done
echo ====
echo Done
echo ====
|
<gh_stars>1-10
"use strict";
exports.id = 829;
exports.ids = [829];
exports.modules = {
/***/ 4829:
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
// ESM COMPAT FLAG
__webpack_require__.r(__webpack_exports__);
// EXPORTS
__webpack_require__.d(__webpack_exports__, {
"default": () => (/* binding */ containers_Projects)
});
// EXTERNAL MODULE: external "react"
var external_react_ = __webpack_require__(9297);
// EXTERNAL MODULE: ./portfolio.js + 1 modules
var portfolio = __webpack_require__(4644);
// EXTERNAL MODULE: external "reactstrap"
var external_reactstrap_ = __webpack_require__(6099);
// EXTERNAL MODULE: external "react-reveal"
var external_react_reveal_ = __webpack_require__(9356);
// EXTERNAL MODULE: external "react/jsx-runtime"
var jsx_runtime_ = __webpack_require__(5282);
;// CONCATENATED MODULE: ./components/ProjectsCard.jsx
const ProjectsCard = ({
data
}) => {
return /*#__PURE__*/jsx_runtime_.jsx(external_reactstrap_.Col, {
lg: "6",
children: /*#__PURE__*/jsx_runtime_.jsx(external_react_reveal_.Fade, {
left: true,
duration: 1000,
distance: "40px",
children: /*#__PURE__*/jsx_runtime_.jsx(external_reactstrap_.Card, {
className: "shadow-lg--hover shadow mt-4",
children: /*#__PURE__*/jsx_runtime_.jsx(external_reactstrap_.CardBody, {
children: /*#__PURE__*/jsx_runtime_.jsx("div", {
className: "d-flex px-3",
children: /*#__PURE__*/(0,jsx_runtime_.jsxs)("div", {
className: "pl-4",
children: [/*#__PURE__*/jsx_runtime_.jsx("h3", {
children: data.name
}), /*#__PURE__*/jsx_runtime_.jsx("p", {
className: "description mt-3",
children: data.desc
}), data.github ? /*#__PURE__*/jsx_runtime_.jsx(external_reactstrap_.Button, {
className: "btn-icon",
color: "github",
href: data.link,
target: "_blank",
rel: "noopener",
"aria-label": "Github",
children: /*#__PURE__*/jsx_runtime_.jsx("span", {
className: "btn-inner--icon",
children: /*#__PURE__*/jsx_runtime_.jsx("i", {
className: "fa fa-github"
})
})
}) : null, data.link ? /*#__PURE__*/(0,jsx_runtime_.jsxs)(external_reactstrap_.Button, {
className: "btn-icon",
color: "success",
href: data.link,
target: "_blank",
rel: "noopener",
"aria-label": "Twitter",
children: [/*#__PURE__*/jsx_runtime_.jsx("span", {
className: "btn-inner--icon",
children: /*#__PURE__*/jsx_runtime_.jsx("i", {
className: "fa fa-arrow-right mr-2"
})
}), /*#__PURE__*/jsx_runtime_.jsx("span", {
className: "nav-link-inner--text ml-1",
children: "Demo"
})]
}) : null]
})
})
})
})
})
});
};
/* harmony default export */ const components_ProjectsCard = (ProjectsCard);
;// CONCATENATED MODULE: ./containers/Projects.jsx
const Projects = () => {
return /*#__PURE__*/jsx_runtime_.jsx("section", {
className: "section section-lg",
children: /*#__PURE__*/jsx_runtime_.jsx(external_reactstrap_.Container, {
children: /*#__PURE__*/(0,jsx_runtime_.jsxs)(external_react_reveal_.Fade, {
bottom: true,
duration: 1000,
distance: "40px",
children: [/*#__PURE__*/(0,jsx_runtime_.jsxs)("div", {
className: "d-flex p-4",
children: [/*#__PURE__*/jsx_runtime_.jsx("div", {
children: /*#__PURE__*/jsx_runtime_.jsx("div", {
className: "icon icon-lg icon-shape bg-gradient-white shadow rounded-circle text-info",
children: /*#__PURE__*/jsx_runtime_.jsx("i", {
className: "ni ni-laptop text-info"
})
})
}), /*#__PURE__*/jsx_runtime_.jsx("div", {
className: "pl-4",
children: /*#__PURE__*/jsx_runtime_.jsx("h4", {
className: "display-3 text-info",
children: "Projects"
})
})]
}), /*#__PURE__*/jsx_runtime_.jsx(external_reactstrap_.Row, {
className: "row-grid align-items-center",
children: portfolio/* projects.map */.q.map((data, i) => {
return /*#__PURE__*/jsx_runtime_.jsx(components_ProjectsCard, {
data: data
}, i);
})
})]
})
})
});
};
/* harmony default export */ const containers_Projects = (Projects);
/***/ })
};
;
|
<filename>lib/ayeaye/pinnate.py
import json
class Pinnate:
"""
Dictionary or attribute access to variables loaded either from a JSON
string or supplied as a dictionary.
>>> a = Pinnate({'my_string':'abcdef'})
>>> a.my_string
'abcdef'
>>> a['my_string']
'abcdef'
>>> a.as_dict()
{'my_string': 'abcdef'}
objects within lists-
>>> from ayeaye.pinnate import Pinnate
>>> d={'my_things' : [1,2,{'three':3}]}
>>> a = Pinnate(d)
>>> a.my_things
[1, 2, <ayeaye.pinnate.Pinnate object at 0x108526e10>]
>>> a.my_things[2].three
3
"""
def __init__(self, data=None):
"""
:param data: dictionary or dictionary encoded in json or instance of Pinnate
"""
self._attr = {}
if isinstance(data, self.__class__):
self._attr = data._attr
elif data:
self.load(data, merge=False)
def __unicode__(self):
d = ', '.join([u"{}:{}".format(k, v) for k, v in self._attr.items()])
return '<Pinnate %s>' % d
def __str__(self):
return self.__unicode__().encode("ascii", "replace").decode()
def keys(self):
return self._attr.keys()
def values(self):
return self._attr.values()
def items(self):
return self._attr.items()
def __contains__(self, key):
return key in self._attr
def as_dict(self, select_fields=None):
"""
@param select_fields: (list of str) to only include some fields from model.
@return: (dict) with mixed values
"""
if select_fields is not None:
r = {}
for k in select_fields:
if isinstance(self._attr[k], self.__class__):
v = self._attr[k].as_dict()
else:
v = self._attr[k]
r[k] = v
return r
else:
return {k: v.as_dict() if isinstance(v, self.__class__) else v \
for k, v in self._attr.items()}
def as_json(self, *args, **kwargs):
"""
@see :method:`as_dict` for params.
@returns (str) JSON representation
"""
return json.dumps(self.as_dict(*args, **kwargs), default=str)
def __getattr__(self, attr):
if attr not in self._attr:
raise AttributeError("{} instance has no attribute '{}'".format(self.__class__.__name__, attr))
if isinstance(self._attr[attr], list):
def list_recurse(item):
r = []
for s in item:
if isinstance(s, dict):
r.append(self.__class__(s))
elif isinstance(s, list):
r.append(list_recurse(s))
else:
r.append(s)
return r
return list_recurse(self._attr[attr])
elif isinstance(self._attr[attr], dict):
return self.__class__(self._attr[attr])
else:
return self._attr[attr]
def __setattr__(self, attr, val):
super(Pinnate, self).__setattr__(attr, val)
if attr != '_attr':
self._attr[attr] = val
def __getitem__(self, key):
return self._attr[key]
def __setitem__(self, key, value):
self._attr[key] = value
def get(self, key, default=None):
return self._attr.get(key, default)
def load(self, data, merge=False):
"""
:param data: dict or json string
:param merge: bool see :method:`update` if False or :method:`merge` when True.
"""
if not isinstance(data, dict):
data = json.loads(data)
if merge:
self.merge(data)
else:
self.update(data)
def update(self, data):
"""
Extend the Pinnate with further settings. If a setting with an existing key is supplied,
then the previous value is overwritten.
:param data: dictionary or dictionary encoded in json
"""
for k, v in data.items():
if isinstance(v, dict):
self._attr[k] = Pinnate(v)
else:
self._attr[k] = v
def merge(self, data):
"""
Extend the Pinnate with further settings. If a setting with an existing key is supplied,
then the previous value is either updated (if the previous value is a dict) or overwritten
(if the previous value is a scalar).
Where corresponding values are not of compatible types, a ValueError is raised. Compatible
means that an existing dict value must remain a dict (thus the dicts are merged), or a
non-dict value must remain a non-dict type.
:param data: dictionary or dictionary encoded in json
"""
for k, v in data.items():
if isinstance(v, dict):
try:
self._attr[k].merge(v)
except KeyError:
self._attr[k] = Pinnate(v)
except AttributeError:
raise ValueError("Invalid key '{}'".format(k))
else:
if k in self._attr and isinstance(self._attr[k], self.__class__):
msg = ("Key '{}' attempted to overwrite an existing Pinnate."
"Operation not permitted."
)
raise ValueError(msg.format(k))
self._attr[k] = v
|
#!/bin/bash
#if [ "$use_service" = "knative" ];
#then
# dir=`dirname $0`
# ca_file_name=${dir}/ca.yaml
# kubectl --namespace knative-serving create secret generic customca --from-file=customca.crt=/etc/docker/certs.d/harbor.sigsus.cn:8443/ca.crt --dry-run -o yaml > $ca_file_name
#fi
|
/*********************************************************************************
*
* Copyright (c) 2016, <NAME>
* All rights reserved.
*
* This work is licensed under a Creative Commons Attribution-NonCommercial 4.0 International License.
* http://creativecommons.org/licenses/by-nc/4.0/
*
* You are free to:
*
* Share — copy and redistribute the material in any medium or format
* Adapt — remix, transform, and build upon the material
* The licensor cannot revoke these freedoms as long as you follow the license terms.
* Under the following terms:
*
* Attribution — You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
* NonCommercial — You may not use the material for commercial purposes.
* No additional restrictions — You may not apply legal terms or technological measures that legally restrict others from doing anything the license permits.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*********************************************************************************/
#include <modules/progressivephotonmapping/photondata.h>
namespace inviwo {
const float PhotonData::defaultRadiusRelativeToSceneRadius{ 0.0153866f };
const float PhotonData::defaultSceneRadius{ 1.1447142425533318678080422119397f }; // 0.5 * length(vec3(2))
const double PhotonData::scaleToMakeLightPowerOfOneVisibleForDirectionalLightSource{ 1./M_PI }; // 1/(2*pi)
PhotonData::~PhotonData() {
}
void PhotonData::copyParamsFrom(const PhotonData& rhs) {
maxPhotonInteractions_ = rhs.maxPhotonInteractions_;
sceneRadius_ = rhs.sceneRadius_;
worldSpaceRadius_ = rhs.worldSpaceRadius_;
iteration_ = rhs.iteration_;
}
void PhotonData::setSize(size_t numberOfPhotons, int maxPhotonInteractions) {
maxPhotonInteractions_ = maxPhotonInteractions;
if (numberOfPhotons > 0) {
photons_.setSize(numberOfPhotons * 2 * maxPhotonInteractions);
}
}
void PhotonData::setRadius(double radiusRelativeToSceneSize, double sceneRadius) {
sceneRadius_ = sceneRadius;
worldSpaceRadius_ = radiusRelativeToSceneSize*sceneRadius;
//LogInfo("Scene radius: " << sceneRadius_ << " Wold space radius: " << worldSpaceRadius_);
}
void PhotonData::advanceToNextIteration(double alpha) {
setRadius(progressiveSphereRadius(getRadius(), iteration_, alpha));
iteration_++;
}
double PhotonData::progressiveSphereRadius(double radius, int iteration, double alpha) {
// See: http://www.cs.jhu.edu/~misha/ReadingSeminar/Papers/Knaus11.pdf
// eq. 20: r_(i+1) = r_i*((i+alpha)/(i+1) )^(1/3)
// Sphere
return radius*std::pow((static_cast<double>(iteration)+alpha) / (1.0 + static_cast<double>(iteration)), 1. / 3.);
// Disc
//radius = prevRadius_*std::sqrt((static_cast<float>(iteration_)+alphaProp_.get() )/(1.f+static_cast<float>(iteration_)));
}
double PhotonData::sphereVolume(double radius) {
return std::pow(radius, 3) * (M_PI*4. / 3.);
}
double PhotonData::getRelativeIrradianceScale() const {
// Scale with lightVolumeSizeScale to get the same look for different light volume sizes.
double referenceRadiusVolumeScale = sphereVolume(getRadiusRelativeToSceneSize()) / sphereVolume(defaultRadiusRelativeToSceneRadius);
// Works when the light volume is normalized by the number of photons per voxel.
// Use photon scale to get equivalent appearance when normalizing light volume
double nPhotonsScale = static_cast<double>(getNumberOfPhotons()) / static_cast<double>(defaultNumberOfPhotons);
return referenceRadiusVolumeScale*nPhotonsScale;
}
void PhotonData::setInvalidationReason(InvalidationReason val) {
invalidationFlag_ = val;
}
void Photon::setDirection(vec3 dir) {
float phi = atan2(dir.y, dir.x);
//if ( !isfinite(phi) ) {
//if(dir.y < 0.f) phi = -0.5f*M_PI;
//else phi = 0.5f*M_PI;
//}
// Important: clamp dir.z to avoid NaN
float theta = acos(glm::clamp(dir.z, -1.f, 1.f));
encodedDirection = vec2{ theta, phi };
}
vec3 Photon::getDirection() const {
vec2 cosAngles = glm::cos(encodedDirection);
vec2 sinAngles = glm::sin(encodedDirection);
return vec3{ sinAngles.x*cosAngles.y,
sinAngles.x*sinAngles.y,
cosAngles.x };
}
} // namespace
|
class Site:
def __init__(self, name, slug):
self.name = name
self.slug = slug
def create_and_log_site(name, slug):
try:
site = Site(name, slug)
# Simulate the site creation process
# Log the success of site creation
return f"Successfully created a new site: {name}"
except Exception as e:
return f"Failed to create a new site: {str(e)}"
# Test the function
print(create_and_log_site("Test Site One", "test-site-one"))
print(create_and_log_site("Test Site Two", "test-site-two"))
|
const path = require('path');
const webpack = require('webpack');
module.exports = {
plugins: [new webpack.DefinePlugin({
// Definitions...
})
],
resolve: {
alias: {
'@': path.resolve('resources/js'),
},
},
};
|
#!/bin/bash
SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java \
-cp '/ppml/trusted-big-data-ml/work/spark-2.4.3/conf/:/ppml/trusted-big-data-ml/work/spark-2.4.3/jars/*' \
-Xmx1g org.apache.spark.deploy.SparkSubmit \
--master 'local[4]' \
/ppml/trusted-big-data-ml/work/spark-2.4.3/examples/src/main/python/sql/basic.py" | tee test-sql-basic-sgx.log
|
<reponame>PranavKhadpe/Detecting-usefulness-of-Yelp-Reviews
'''
Takes as input a CSV File with the bin value in the first column and the remaining features in the next column. Headers have to be removed
Improvements: Instead of feature selection techniques used here, a correlation based feature reduction technique can be used. Read about FCBF.
'''
import numpy as np
import pandas as pd
from sklearn.svm import SVC
from sklearn.svm import LinearSVC
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.feature_selection import VarianceThreshold
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.feature_selection import SelectFromModel
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import normalize
data = np.loadtxt('input_para.csv', delimiter= ',')
X = data[:, 1:]
y = data[:, 0].astype(np.int)
print X[:3]
print X.shape
# Below are some standard feauture reduction/selection techniques I thought would help but didn't :'
'''
min_max_scaler = MinMaxScaler()
X = min_max_scaler.fit_transform(X)
print X[:3]
X = normalize(X)
## Dimension Reduction
sel = VarianceThreshold(threshold = (0.8*(1-0.8)))
X = sel.fit_transform(X)
print X.shape
'''
# These feature reduction techniques help a bit
clf = ExtraTreesClassifier()
clf = clf.fit(X, y)
model = SelectFromModel(clf, prefit=True)
X = model.transform(X)
print X.shape
X = SelectKBest( chi2, k = 25).fit_transform(X,y)
print X.shape
## Split into test and train data
X_train, X_test, y_train, y_test = train_test_split( X, y, test_size = 0.2, random_state= 42)
# This part of the code is to write the test data into another file so that we can check later but requires you to know the dimension after reduction so I have commented out for now.
'''
df1 = pd.read_csv('input_param.csv')
del df1['bin']
print len(X_test)
df = pd.DataFrame(X_test, index = df1[:1184].index, columns = df1.columns )
df['bin'] = pd.Series(y_test, index = df.index )
'''
## SVM One vs one
clf = SVC()
clf.fit(X_train, y_train)
training_accuracy = clf.score(X_train, y_train)
print "Training accuracy = ", training_accuracy
test_accuracy = clf.score(X_test, y_test)
print "Test accuracy = ", test_accuracy
## SVM one vs rest
clf = LinearSVC(random_state = 0)
clf.fit(X_train, y_train)
training_accuracy = clf.score(X_train, y_train)
print "Training accuracy = ", training_accuracy
test_accuracy = clf.score(X_test, y_test)
print "Test accuracy = ", test_accuracy
'''
'''
## Logistic Regression:
clf = LogisticRegression(multi_class = 'ovr')
clf.fit(X_train, y_train)
training_accuracy = clf.score(X_train, y_train)
print "Training accuracy = ", training_accuracy
test_accuracy = clf.score(X_test, y_test)
print "Test accuracy = ", test_accuracy
# df['Logistic'] = pd.Series(clf.predict(X_test), index = df.index )
## Random Forest Classifier
clf = RandomForestClassifier(max_depth = 5)
clf.fit(X_train, y_train)
training_accuracy = clf.score(X_train, y_train)
print "Training accuracy = ", training_accuracy
test_accuracy = clf.score(X_test, y_test)
print "Test accuracy = ", test_accuracy
# df['Random Forest'] = pd.Series(clf.predict(X_test), index = df.index )
#df.to_csv('predicted.csv')
clf = KNeighborsClassifier()
clf.fit(X_train, y_train)
training_accuracy = clf.score(X_train, y_train)
print "Training accuracy = ", training_accuracy
test_accuracy = clf.score(X_test, y_test)
print "Test accuracy = ", test_accuracy
|
module MurmurHash
# the 64-bit version of MurmurHash2, supposedly. I have doubts about either this
# java implementation or the digest-murmurhash gem's implementation. I don't
# get equivalent results when passing in a String into them so I'm wondering
# which one is most similar to the original C++ implementation.
#
# Either way, for JD Power's purposes, we just need this java version to be
# thinly wrapped in ruby b/c it's the version we're already using and we need
# all of our keys to stay the same for now. Therefore, I have no need to figure
# out which version (this one or digest-murmurhash's) is more correct.
class MurmurHash64a
# @example rawdigest('some-string-to-digest')
# @param [String] str
def rawdigest(str)
byte_arr = str.to_java_bytes
::MurmurHashJava::MurmurHash.hash64(byte_arr, byte_arr.length)
end
end
end
|
<gh_stars>0
var relativeHRefHook = {
missedFilePaths: [],
badImageDataFound: false,
onDomSerialization: function (dom) {
var thiz = this;
Dom.workOn("//@xlink:href", dom, function (href) {
var hrefValue = href.nodeValue;
if (!hrefValue.match(/^file:\/\/.*$/)) return;
var relativePath = thiz.uriToRelative(hrefValue);
if (relativePath) {
href.nodeValue = relativePath;
}
});
var xpath = "//svg:g[@p:type='Shape' and @p:def='Evolus.Common:Bitmap']//p:property[@name='imageData']";
Dom.workOn(xpath, dom, function (property) {
var imageData = ImageData.fromString(property.textContent);
if (!imageData.data.match(/^file:\/\/.*$/)) return;
var relativePath = thiz.uriToRelative(imageData.data);
if (relativePath) {
imageData.data = relativePath;
Dom.empty(property);
property.appendChild(property.ownerDocument.createCDATASection(imageData.toString()));
}
});
},
onPageLoad: function (page) {
var thiz = this;
Dom.workOn("//@xlink:href", page.contentNode, function (href) {
var hrefValue = href.nodeValue;
if (hrefValue.match(/^[a-z]+:\/\/.*$/)) return;
var uri = thiz.relativeToURI(hrefValue);
if (uri) {
href.nodeValue = uri;
}
});
var xpath = "//svg:g[@p:type='Shape' and @p:def='Evolus.Common:Bitmap']//p:property[@name='imageData']";
Dom.workOn(xpath, page.contentNode, function (property) {
var imageData = ImageData.fromString(property.textContent);
if (!imageData.data) {
thiz.badImageDataFound = true;
return;
}
if (imageData.data.match(/^[a-z]+:.*$/)) return;
imageData.data = thiz.relativeToURI(imageData.data, true);
Dom.empty(property);
property.appendChild(property.ownerDocument.createCDATASection(imageData.toString()));
});
},
uriToRelative: function (absoluteFileURI) {
var file = XMLDocumentPersister.currentFile.parent;
try {
var hrefFile = fileHandler.getFileFromURLSpec(absoluteFileURI).QueryInterface(Components.interfaces.nsILocalFile);
return hrefFile.getRelativeDescriptor(file);
} catch (e) {
return null;
}
},
relativeToURI: function (relativeFileURI, logError) {
var file = XMLDocumentPersister.currentFile.parent;
var hrefFile = Components.classes["@mozilla.org/file/local;1"].createInstance(Components.interfaces.nsILocalFile);
hrefFile.setRelativeDescriptor(file, relativeFileURI);
if (!hrefFile.exists()) {
if (logError) {
this.missedFilePaths.push(relativeFileURI);
} else {
return null;
}
}
return ImageData.ios.newFileURI(hrefFile).spec;
},
onLoad: function (doc) {
if (this.missedFilePaths.length > 0) {
//Util.warn(Util.getMessage("warning.title"), Util.getMessage("document.missing.external.resources", this.missedFilePaths.join("\n\t● ")), Util.getMessage("button.cancel.close"));
alert(Util.getMessage("document.missing.external.resources", this.missedFilePaths.join("\n\t● ")));
this.missedFilePaths = [];
}
if (this.badImageDataFound) {
//Util.error(Util.getMessage("error.title"), Util.getMessage("error.bad.image.data.was.found.in.the.document"), Util.getMessage("button.cancel.close"));
alert(Util.getMessage("error.bad.image.data.was.found.in.the.document"));
this.badImageDataFound = false;
}
}
}
XMLDocumentPersister.hooks.push(relativeHRefHook);
|
let list = ['Apples', 'Oranges', 'Grapes'];
function add(item) {
list.push(item);
console.log(`${item} has been added to the list!`);
}
function remove(item) {
const index = list.indexOf(item);
if (index === -1) {
console.log(`${item} is not on the list!`);
} else {
list.splice(index, 1);
console.log(`${item} has been removed from the list!`);
}
}
function showList() {
console.log(list);
}
add('Bananas');
remove('Grapes');
showList();
// Output:
// Bananas has been added to the list!
// Grapes has been removed from the list!
// ['Apples', 'Oranges', 'Bananas']
|
#!/usr/bin/env bash
docker exec broker bash -c "kafka-console-producer --broker-list broker:9092 --topic oltp.dbo.tablea"
docker exec -it schema-registry /bin/bash
kafka-avro-console-producer --topic oltp.dbo.tablea \
--bootstrap-server broker:9092 \
--property key.schema="$(< /opt/app/schema/key_detail.avsc)" \
--property value.schema="$(< /opt/app/schema/order_detail.avsc)" \
--property parse.key=true \
--property key.separator="#"
##
# {"number":"122345"}#{"number": 2343434, "date": 1596490462, "shipping_address": "456 Everett St, Palo Alto, 94301 CA, USA", "subtotal": 99.0, "shipping_cost": 0.0, "tax": 8.91, "grand_total": 107.91}
# {"number":"256743"}#{"number": 2343435, "date": 1596491687, "shipping_address": "518 Castro St, Mountain View, 94305 CA, USA", "subtotal": 25.0, "shipping_cost": 0.0, "tax": 2.91, "grand_total": 27.91}
##
|
var searchData=
[
['hash',['Hash',['../classHash.html',1,'']]]
];
|
import requests
import lxml.html as lh
# Link of the website to get cities
url = 'http://example.com/cities'
# Create list of cities
cities = []
# Get web page
response = requests.get(url)
# Parse the web page to get the table
doc = lh.fromstring(response.content)
tr_elements = doc.xpath('//tr')
# Loop through each row of the table
for tr in tr_elements[1:]:
# Get the data from each column
td = tr.xpath('td/text()')
name = td[0].strip()
country = td[1].strip()
population = td[2].strip()
area = td[3].strip()
# Add a city to the list
cities.append((name,country,population,area))
# Print the list of cities
print(cities)
|
OS_VER=$( grep VERSION_ID /etc/os-release | cut -d'=' -f2 | sed 's/[^0-9\.]//gI' )
OS_MAJ=$(echo "${OS_VER}" | cut -d'.' -f1)
OS_MIN=$(echo "${OS_VER}" | cut -d'.' -f2)
MEM_MEG=$( free -m | sed -n 2p | tr -s ' ' | cut -d\ -f2 || cut -d' ' -f2 )
CPU_SPEED=$( lscpu | grep -m1 "MHz" | tr -s ' ' | cut -d\ -f3 || cut -d' ' -f3 | cut -d'.' -f1 )
CPU_CORE=$( lscpu -pCPU | grep -v "#" | wc -l )
MEM_GIG=$(( ((MEM_MEG / 1000) / 2) ))
JOBS=$(( MEM_GIG > CPU_CORE ? CPU_CORE : MEM_GIG ))
DISK_INSTALL=$(df -h . | tail -1 | tr -s ' ' | cut -d\ -f1 || cut -d' ' -f1)
DISK_TOTAL_KB=$(df . | tail -1 | awk '{print $2}')
DISK_AVAIL_KB=$(df . | tail -1 | awk '{print $4}')
DISK_TOTAL=$(( DISK_TOTAL_KB / 1048576 ))
DISK_AVAIL=$(( DISK_AVAIL_KB / 1048576 ))
printf "\\n\\tOS name: %s\\n" "${OS_NAME}"
printf "\\tOS Version: %s\\n" "${OS_VER}"
printf "\\tCPU speed: %sMhz\\n" "${CPU_SPEED}"
printf "\\tCPU cores: %s\\n" "${CPU_CORE}"
printf "\\tPhysical Memory: %s Mgb\\n" "${MEM_MEG}"
printf "\\tDisk install: %s\\n" "${DISK_INSTALL}"
printf "\\tDisk space total: %sG\\n" "${DISK_TOTAL%.*}"
printf "\\tDisk space available: %sG\\n" "${DISK_AVAIL%.*}"
if [ "${MEM_MEG}" -lt 1 ]; then
printf "\\tYour system must have 7 or more Gigabytes of physical memory installed.\\n"
printf "\\tExiting now.\\n"
exit 1
fi
case "${OS_NAME}" in
"Linux Mint")
if [ "${OS_MAJ}" -lt 18 ]; then
printf "\\tYou must be running Linux Mint 18.x or higher to install EOSIO.\\n"
printf "\\tExiting now.\\n"
exit 1
fi
;;
"Ubuntu")
if [ "${OS_MAJ}" -lt 16 ]; then
printf "\\tYou must be running Ubuntu 16.04.x or higher to install EOSIO.\\n"
printf "\\tExiting now.\\n"
exit 1
fi
;;
"Debian")
if [ $OS_MAJ -lt 10 ]; then
printf "\tYou must be running Debian 10 to install EOSIO, and resolve missing dependencies from unstable (sid).\n"
printf "\tExiting now.\n"
exit 1
fi
;;
esac
if [ "${DISK_AVAIL%.*}" -lt 1 ]; then
printf "\\tYou must have at least %sGB of available storage to install EOSIO.\\n" "${DISK_MIN}"
printf "\\tExiting now.\\n"
exit 1
fi
DEP_ARRAY=(clang-4.0 lldb-4.0 libclang-4.0-dev cmake make automake libbz2-dev libssl-dev \
libgmp3-dev autotools-dev build-essential libicu-dev python2.7-dev python3-dev \
autoconf libtool curl zlib1g-dev doxygen graphviz)
COUNT=1
DISPLAY=""
DEP=""
if [[ "${ENABLE_CODE_COVERAGE}" == true ]]; then
DEP_ARRAY+=(lcov)
fi
printf "\\n\\tChecking for installed dependencies.\\n\\n"
for (( i=0; i<${#DEP_ARRAY[@]}; i++ ));
do
pkg=$( dpkg -s "${DEP_ARRAY[$i]}" 2>/dev/null | grep Status | tr -s ' ' | cut -d\ -f4 )
if [ -z "$pkg" ]; then
DEP=$DEP" ${DEP_ARRAY[$i]} "
DISPLAY="${DISPLAY}${COUNT}. ${DEP_ARRAY[$i]}\\n\\t"
printf "\\tPackage %s ${bldred} NOT ${txtrst} found.\\n" "${DEP_ARRAY[$i]}"
(( COUNT++ ))
else
printf "\\tPackage %s found.\\n" "${DEP_ARRAY[$i]}"
continue
fi
done
if [ "${COUNT}" -gt 1 ]; then
printf "\\n\\tThe following dependencies are required to install EOSIO.\\n"
printf "\\n\\t${DISPLAY}\\n\\n"
printf "\\tDo you wish to install these packages?\\n"
if is_noninteractive; then exec <<< "1"; fi
select yn in "Yes" "No"; do
case $yn in
[Yy]* )
printf "\\n\\n\\tInstalling dependencies\\n\\n"
sudo apt-get update
if ! sudo apt-get -y install ${DEP}
then
printf "\\n\\tDPKG dependency failed.\\n"
printf "\\n\\tExiting now.\\n"
exit 1
else
printf "\\n\\tDPKG dependencies installed successfully.\\n"
fi
break;;
[Nn]* ) echo "User aborting installation of required dependencies, Exiting now."; exit;;
* ) echo "Please type 1 for yes or 2 for no.";;
esac
done
else
printf "\\n\\tNo required dpkg dependencies to install.\\n"
fi
if [ -d "${HOME}/opt/boost_1_67_0" ]; then
if ! mv "${HOME}/opt/boost_1_67_0" "$BOOST_ROOT"
then
printf "\\n\\tUnable to move directory %s/opt/boost_1_67_0 to %s.\\n" "${HOME}" "${BOOST_ROOT}"
printf "\\n\\tExiting now.\\n"
exit 1
fi
if [ -d "$BUILD_DIR" ]; then
if ! rm -rf "$BUILD_DIR"
then
printf "\\tUnable to remove directory %s. Please remove this directory and run this script %s again. 0\\n" "$BUILD_DIR" "${BASH_SOURCE[0]}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
fi
fi
printf "\\n\\tChecking boost library installation.\\n"
BVERSION=$( grep BOOST_LIB_VERSION "${BOOST_ROOT}/include/boost/version.hpp" 2>/dev/null \
| tail -1 | tr -s ' ' | cut -d\ -f3 | sed 's/[^0-9\._]//gI')
if [ "${BVERSION}" != "1_67" ]; then
printf "\\tRemoving existing boost libraries in %s/opt/boost* .\\n" "${HOME}"
if ! rm -rf "${HOME}"/opt/boost*
then
printf "\\n\\tUnable to remove deprecated boost libraries at this time.\\n"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
printf "\\tInstalling boost libraries.\\n"
if ! cd "${TEMP_DIR}"
then
printf "\\n\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
STATUS=$(curl -LO -w '%{http_code}' --connect-timeout 30 https://dl.bintray.com/boostorg/release/1.67.0/source/boost_1_67_0.tar.bz2)
if [ "${STATUS}" -ne 200 ]; then
printf "\\tUnable to download Boost libraries at this time.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! tar xf "${TEMP_DIR}/boost_1_67_0.tar.bz2"
then
printf "\\n\\tUnable to unarchive file %s/boost_1_67_0.tar.bz2.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -f "${TEMP_DIR}/boost_1_67_0.tar.bz2"
then
printf "\\n\\tUnable to remove file %s/boost_1_67_0.tar.bz2.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/boost_1_67_0/"
then
printf "\\n\\tUnable to enter directory %s/boost_1_67_0.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! ./bootstrap.sh "--prefix=$BOOST_ROOT"
then
printf "\\n\\tInstallation of boost libraries failed. 0\\n"
printf "\\n\\tExiting now.\\n\\n"
exit 1
fi
if ! ./b2 -j"${CPU_CORE}" install
then
printf "\\n\\tInstallation of boost libraries failed. 1\\n"
printf "\\n\\tExiting now.\\n\\n"
exit 1
fi
if ! rm -rf "${TEMP_DIR}"/boost_1_67_0
then
printf "\\n\\tUnable to remove %s/boost_1_67_0.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n\\n"
exit 1
fi
if [ -d "$BUILD_DIR" ]; then
if ! rm -rf "$BUILD_DIR"
then
printf "\\tUnable to remove directory %s. Please remove this directory and run this script %s again. 0\\n" "$BUILD_DIR" "${BASH_SOURCE[0]}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
fi
printf "\\tBoost successfully installed @ %s.\\n" "${BOOST_ROOT}"
else
printf "\\tBoost found at %s.\\n" "${BOOST_ROOT}"
fi
printf "\\n\\tChecking MongoDB installation.\\n"
if [ ! -e "${MONGOD_CONF}" ]; then
printf "\\n\\tInstalling MongoDB 3.6.3.\\n"
if ! cd "${HOME}/opt"
then
printf "\\n\\tUnable to enter directory %s/opt.\\n" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
STATUS=$(curl -LO -w '%{http_code}' --connect-timeout 30 https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-3.6.3.tgz)
if [ "${STATUS}" -ne 200 ]; then
printf "\\tUnable to download MongoDB at this time.\\n"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! tar xf "${HOME}/opt/mongodb-linux-x86_64-3.6.3.tgz"
then
printf "\\tUnable to unarchive file %s/opt/mongodb-linux-x86_64-3.6.3.tgz.\\n" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -f "${HOME}/opt/mongodb-linux-x86_64-3.6.3.tgz"
then
printf "\\tUnable to remove file %s/opt/mongodb-linux-x86_64-3.6.3.tgz.\\n" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! ln -s "${HOME}/opt/mongodb-linux-x86_64-3.6.3/" "${HOME}/opt/mongodb"
then
printf "\\tUnable to symbolic link %s/opt/mongodb-linux-x86_64-3.6.3/ to %s/opt/mongodb.\\n" "${HOME}" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! mkdir "${HOME}/opt/mongodb/data"
then
printf "\\tUnable to create directory %s/opt/mongodb/data.\\n" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! mkdir "${HOME}/opt/mongodb/log"
then
printf "\\tUnable to create directory %s/opt/mongodb/log.\\n" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! touch "${HOME}/opt/mongodb/log/mongodb.log"
then
printf "\\tUnable to create file %s/opt/mongodb/log/mongodb.log.\\n" "${HOME}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
if ! tee > /dev/null "${MONGOD_CONF}" <<mongodconf
systemLog:
destination: file
path: ${HOME}/opt/mongodb/log/mongodb.log
logAppend: true
logRotate: reopen
net:
bindIp: 127.0.0.1,::1
ipv6: true
storage:
dbPath: ${HOME}/opt/mongodb/data
mongodconf
then
printf "\\tUnable to write to file %s.\\n" "${MONGOD_CONF}"
printf "\\n\\tExiting now.\\n\\n"
exit 1;
fi
printf "\\n\\tMongoDB successfully installed at %s/opt/mongodb.\\n" "${HOME}"
else
printf "\\tMongoDB configuration found at %s.\\n" "${MONGOD_CONF}"
fi
printf "\\n\\tChecking MongoDB C++ driver installation.\\n"
MONGO_INSTALL=true
if [ -e "/usr/local/lib/libmongocxx-static.a" ]; then
MONGO_INSTALL=false
if ! version=$( grep "Version:" /usr/local/lib/pkgconfig/libmongocxx-static.pc | tr -s ' ' | awk '{print $2}' )
then
printf "\\tUnable to determine mongodb-cxx-driver version.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
maj=$( echo "${version}" | cut -d'.' -f1 )
min=$( echo "${version}" | cut -d'.' -f2 )
if [ "${maj}" -gt 3 ]; then
MONGO_INSTALL=true
elif [ "${maj}" -eq 3 ] && [ "${min}" -lt 3 ]; then
MONGO_INSTALL=true
fi
fi
if [ $MONGO_INSTALL == "true" ]; then
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
STATUS=$( curl -LO -w '%{http_code}' --connect-timeout 30 https://github.com/mongodb/mongo-c-driver/releases/download/1.10.2/mongo-c-driver-1.10.2.tar.gz )
if [ "${STATUS}" -ne 200 ]; then
if ! rm -f "${TEMP_DIR}/mongo-c-driver-1.10.2.tar.gz"
then
printf "\\tUnable to remove file %s/mongo-c-driver-1.10.2.tar.gz.\\n" "${TEMP_DIR}"
fi
printf "\\tUnable to download MongoDB C driver at this time.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! tar xf mongo-c-driver-1.10.2.tar.gz
then
printf "\\tUnable to unarchive file %s/mongo-c-driver-1.10.2.tar.gz.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -f "${TEMP_DIR}/mongo-c-driver-1.10.2.tar.gz"
then
printf "\\tUnable to remove file mongo-c-driver-1.10.2.tar.gz.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}"/mongo-c-driver-1.10.2
then
printf "\\tUnable to cd into directory %s/mongo-c-driver-1.10.2.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! mkdir cmake-build
then
printf "\\tUnable to create directory %s/mongo-c-driver-1.10.2/cmake-build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd cmake-build
then
printf "\\tUnable to enter directory %s/mongo-c-driver-1.10.2/cmake-build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local -DENABLE_BSON=ON \
-DENABLE_SSL=OPENSSL -DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF -DENABLE_STATIC=ON ..
then
printf "\\tConfiguring MongoDB C driver has encountered the errors above.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! make -j"${CPU_CORE}"
then
printf "\\tError compiling MongoDB C driver.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo make install
then
printf "\\tError installing MongoDB C driver.\\nMake sure you have sudo privileges.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! rm -rf "${TEMP_DIR}/mongo-c-driver-1.10.2"
then
printf "\\tUnable to remove directory %s/mongo-c-driver-1.10.2.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! git clone https://github.com/mongodb/mongo-cxx-driver.git --branch releases/v3.3 --depth 1
then
printf "\\tUnable to clone MongoDB C++ driver at this time.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/mongo-cxx-driver/build"
then
printf "\\tUnable to enter directory %s/mongo-cxx-driver/build.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cmake -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local ..
then
printf "\\tCmake has encountered the above errors building the MongoDB C++ driver.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo make -j"${CPU_CORE}"
then
printf "\\tError compiling MongoDB C++ driver.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo make install
then
printf "\\tError installing MongoDB C++ driver.\\nMake sure you have sudo privileges.\\n"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}"
then
printf "\\tUnable to enter directory %s.\\n" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
if ! sudo rm -rf "${TEMP_DIR}/mongo-cxx-driver"
then
printf "\\tUnable to remove directory %s/mongo-cxx-driver.\\n" "${TEMP_DIR}" "${TEMP_DIR}"
printf "\\tExiting now.\\n\\n"
exit 1;
fi
printf "\\tMongo C++ driver installed at /usr/local/lib/libmongocxx-static.a.\\n"
else
printf "\\tMongo C++ driver found at /usr/local/lib/libmongocxx-static.a.\\n"
fi
printf "\\n\\tChecking for LLVM with WASM support.\\n"
if [ ! -d "${HOME}/opt/wasm/bin" ]; then
# Build LLVM and clang with WASM support:
printf "\\tInstalling LLVM with WASM\\n"
if ! cd "${TEMP_DIR}"
then
printf "\\n\\tUnable to cd into directory %s.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! mkdir "${TEMP_DIR}/llvm-compiler" 2>/dev/null
then
printf "\\n\\tUnable to create directory %s/llvm-compiler.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/llvm-compiler"
then
printf "\\n\\tUnable to enter directory %s/llvm-compiler.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! git clone --depth 1 --single-branch --branch release_40 https://github.com/llvm-mirror/llvm.git
then
printf "\\tUnable to clone llvm repo @ https://github.com/llvm-mirror/llvm.git.\\n"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/llvm-compiler/llvm/tools"
then
printf "\\tUnable to enter directory %s/llvm-compiler/llvm/tools.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! git clone --depth 1 --single-branch --branch release_40 https://github.com/llvm-mirror/clang.git
then
printf "\\tUnable to clone clang repo @ https://github.com/llvm-mirror/clang.git.\\n"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/llvm-compiler/llvm"
then
printf "\\tUnable to enter directory %s/llvm-compiler/llvm.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! mkdir "${TEMP_DIR}/llvm-compiler/llvm/build"
then
printf "\\tUnable to create directory %s/llvm-compiler/llvm/build.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! cd "${TEMP_DIR}/llvm-compiler/llvm/build"
then
printf "\\tUnable to enter directory %s/llvm-compiler/llvm/build.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! cmake -G "Unix Makefiles" -DCMAKE_INSTALL_PREFIX="${HOME}/opt/wasm" -DLLVM_TARGETS_TO_BUILD= \
-DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=WebAssembly -DCMAKE_BUILD_TYPE=Release ../
then
printf "\\tError compiling LLVM and clang with EXPERIMENTAL WASM support.0\\n"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! make -j"${JOBS}" install
then
printf "\\tError compiling LLVM and clang with EXPERIMENTAL WASM support.1\\n"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
if ! rm -rf "${TEMP_DIR}/llvm-compiler"
then
printf "\\tUnable to remove directory %s/llvm-compiler.\\n" "${TEMP_DIR}"
printf "\\n\\tExiting now.\\n"
exit 1;
fi
printf "\\n\\tWASM successffully installed @ %s/opt/wasm/bin.\\n\\n" "${HOME}"
else
printf "\\tWASM found at %s/opt/wasm/bin.\\n" "${HOME}"
fi
function print_instructions()
{
printf '\n\texport PATH=${HOME}/opt/mongodb/bin:$PATH\n'
printf "\\t%s -f %s &\\n" "$( command -v mongod )" "${MONGOD_CONF}"
printf "\\tcd %s; make test\\n\\n" "${BUILD_DIR}"
return 0
}
|
<gh_stars>1-10
//
// ReservationInputViewController.h
// CinemaCity
//
// Created by <NAME> on 29/03/14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ReservationInputViewController : UITableViewController
@property (nonatomic, strong) NSString *cinemaID;
@property (nonatomic, strong) NSString *venueTypeID;
@property (nonatomic, strong) NSString *featureCode;
@property (nonatomic, strong) NSURL *detailURL;
@property (nonatomic, strong) NSDate *date;
@end
|
package com.dimafeng.testcontainers.integration
import java.net.InetSocketAddress
import com.datastax.oss.driver.api.core.CqlSession
import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint
import com.dimafeng.testcontainers.{CassandraContainer, ForAllTestContainer}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class CassandraSpec extends AnyFlatSpec with ForAllTestContainer with Matchers {
override val container: CassandraContainer = CassandraContainer()
"Cassandra container" should "be started" in {
val session = CqlSession.builder
.addContactEndPoint(new DefaultEndPoint(InetSocketAddress
.createUnresolved(container.cassandraContainer.getContainerIpAddress,
container.cassandraContainer.getFirstMappedPort.intValue())))
.withLocalDatacenter("datacenter1").build()
val rs = session.execute("select release_version from system.local")
val row = rs.one
row.getString("release_version").length should be > 0
}
}
|
images=('00761_00761_00018' '00761_00761_00289' '01012_00018_01012' '01012_00841_01012'
'00761_00761_00059' '00761_00761_00314' '01012_00059_01012' '01012_01037_01012'
'00761_00761_00079' '00761_00761_00427' '01012_00421_01012' '00761_00761_00171'
'00761_00761_00736' '01012_00623_01012' '67172_00018_67172' '67172_00421_67172'
'67172_01037_67172' '67172_67172_00079'
'67172_67172_00314' '67172_00059_67172' '67172_00780_67172' '67172_67172_00018'
'67172_67172_00171' '67172_67172_00427' '67172_00380_67172' '67172_00841_67172'
'67172_67172_00059' '67172_67172_00289' '67172_67172_00736')
images2=('00018_00059_00079' '00059_00079_00131' '00079_00059_00018' '00131_00066_00001'
'00037_00079_00001' '00059_00275_00171' '00081_00079_00131' '01012_01037_00436'
'00037_00171_00289' '00066_00200_00018' '00110_00059_00171' '56006_67172_00722')
filename="ids.txt"
images=()
while read -r line; do
name="$line"
images+=($name)
done < "$filename"
len=${#images[@]}
echo $len
train () {
export CUDA_VISIBLE_DEVICES=$1
python exp/inference/inference_pairs.py --loadmodel data/pretrained_model/inference.pth\
--img_path synth/$2
}
for (( i=0; i<len; i=i+4 )) do
train 0 ${images[$i]} &
train 1 ${images[$((i+1))]} &
train 2 ${images[$((i+2))]} &
train 3 ${images[$((i+3))]} &
wait
done
|
//Aditya’s code for recursive list reverse
#include<stdio.h>
#include<stdlib.h>
#include"ll.h"
node *reverse(node *head,node *temp){
//Reversing
node *final;
if(head->next==NULL){
final = head;
}
else if(head->next!=NULL){
temp = head;
head = head->next;
reverse(head,temp);
}
head->next = temp;
if(temp!=NULL){
temp->next = NULL;
}
return final;
}
//Main function
void main(){
node *head = NULL;
//Creating list
for(int i=10;i>0;i--){
head = insertAtEnd(head);
}
head = reverse(head,NULL);
display(head);
}
|
for FILE in out_*;
do echo $FILE;
done
|
require File.expand_path('../../../test_helper', __FILE__)
module Etsy
class ShopTest < Test::Unit::TestCase
context "The Shop class" do
should "be able to find a single shop" do
shops = mock_request('/shops/littletjane', {}, 'Shop', 'getShop.single.json')
Shop.find('littletjane').should == shops.first
end
should "be able to find multiple shops" do
shops = mock_request('/shops/littletjane,reagent', {}, 'Shop', 'getShop.multiple.json')
Shop.find('littletjane', 'reagent').should == shops
end
should "be able to find all shops" do
shops = mock_request('/shops', {}, 'Shop', 'findAllShop.json')
Shop.all.should == shops
end
should "return an array of shops if there is only 1 result returned" do
shops = mock_request('/shops', {}, 'Shop', 'findAllShop.single.json')
Shop.all.should == shops
end
should "allow a configurable limit when finding all shops" do
shops = mock_request('/shops', {:limit => 100, :offset => 0}, 'Shop', 'findAllShop.json')
Shop.all(:limit => 100).should == shops
end
end
context "An instance of the Shop class" do
context "with response data" do
setup do
data = read_fixture('shop/getShop.single.json')
@shop = Shop.new(data.first)
end
should "have a value for :id" do
@shop.id.should == 5500349
end
should "have a value for :user_id" do
@shop.user_id.should == 5327518
end
should "have a value for :image_url" do
@shop.image_url.should == "http://ny-image3.etsy.com/iusb_760x100.8484779.jpg"
end
should "have a value for :url" do
@shop.url.should == "http://www.etsy.com/shop/littletjane"
end
should "have a value for :favorers_count" do
@shop.favorers_count.should == 684
end
should "have a value for :active_listings_count" do
@shop.active_listings_count.should == 0
end
should "have a value for :updated_at" do
@shop.updated_at.should == Time.at(1274923984)
end
should "have a value for :created_at" do
@shop.created_at.should == Time.at(1237430331)
end
should "have a value for :name" do
@shop.name.should == "littletjane"
end
should "have a value for :title" do
@shop.title.should == "a cute and crafty mix of handmade goods."
end
should "have a value for :message" do
@shop.message.should == "thanks!"
end
should "have a value for :announcement" do
@shop.announcement.should == "announcement"
end
end
should "have a collection of listings" do
shop = Shop.new
shop.stubs(:id).with().returns(1)
Listing.stubs(:find_all_by_shop_id).with(1, {}).returns('listings')
shop.listings.should == 'listings'
end
end
end
end
|
import { Address, BigDecimal, BigInt, log} from '@graphprotocol/graph-ts'
import { BondDiscount } from '../../generated/schema'
import { } from './Constants';
import { hourFromTimestamp } from './Dates';
import { toDecimal } from './Decimals';
import { getRIPUSDRate } from './Price';
export function loadOrCreateBondDiscount(timestamp: BigInt): BondDiscount{
let hourTimestamp = hourFromTimestamp(timestamp);
let bondDiscount = BondDiscount.load(hourTimestamp)
if (bondDiscount == null) {
bondDiscount = new BondDiscount(hourTimestamp)
bondDiscount.timestamp = timestamp
bondDiscount.dai_discount = BigDecimal.fromString("0")
bondDiscount.ripdai_discount = BigDecimal.fromString("0")
bondDiscount.frax_discount = BigDecimal.fromString("0")
bondDiscount.ripfrax_discount = BigDecimal.fromString("0")
bondDiscount.eth_discount = BigDecimal.fromString("0")
bondDiscount.lusd_discount = BigDecimal.fromString("0")
bondDiscount.riplusd_discount = BigDecimal.fromString("0")
bondDiscount.save()
}
return bondDiscount as BondDiscount
}
export function updateBondDiscounts(blockNumber: BigInt): void{
let bd = loadOrCreateBondDiscount(blockNumber);
let ripRate = getRIPUSDRate(blockNumber);
// if(blockNumber.gt(BigInt.fromString(RIPDAISLPBOND_CONTRACT4_BLOCK))){
// let bond = RIPDAIBondV4.bind(Address.fromString(RIPDAISLPBOND_CONTRACT4))
// let price_call = bond.try_bondPriceInUSD()
// if(price_call.reverted===false && price_call.value.gt(BigInt.fromI32(0))){
// bd.ripdai_discount = ripRate.div(toDecimal(price_call.value, 18))
// bd.ripdai_discount = bd.ripdai_discount.minus(BigDecimal.fromString("1"))
// bd.ripdai_discount = bd.ripdai_discount.times(BigDecimal.fromString("100"))
// log.debug("RIPDAI Discount RIP price {} Bond Price {} Discount {}", [ripRate.toString(), price_call.value.toString(), bd.ripfrax_discount.toString()])
// }
// }
bd.save()
}
|
<filename>cspBackEnd/src/pythagorean_triples.cpp
#include "pch.h"
#include "pythagorean_triples.h"
using json = nlohmann::json;
csp::ConstraintProblem<int> constructPythagoreanTriplesProblem(int n, std::vector<csp::Variable<int>>& variables,
std::vector<csp::Constraint<int>>& constraints)
{
std::unordered_set<int> domain;
domain.reserve(n);
for (int i = 1; i < n + 1; ++i)
domain.insert(i);
const std::unordered_set<std::string> names{ "x", "y", "z" };
std::unordered_map<std::string, std::reference_wrapper<csp::Variable<int>>> nameToVarRefMap =
csp::Variable<int>::constructFromNamesToEqualDomainPutInVec(names, domain, variables);
std::vector<std::reference_wrapper<csp::Variable<int>>> variablesRefs;
for (const auto& [name, varRef] : nameToVarRefMap)
variablesRefs.push_back(varRef);
csp::Constraint<int> pythagoreanTripleConstraint{ variablesRefs,
[](const std::vector<int>& assignedValues) -> bool
{
if (assignedValues.size() < 3)
{
return true;
}
int xVal = assignedValues.at(0);
int yVal = assignedValues.at(1);
int zVal = assignedValues.at(2);
return xVal * xVal + yVal * yVal == zVal * zVal;
}
};
csp::Constraint<int> totalOrderConstraint{ variablesRefs,
[](const std::vector< int>& assignedValues) -> bool
{
if (assignedValues.size() < 3)
{
return true;
}
return assignedValues.at(0) < assignedValues.at(1) && assignedValues.at(1) < assignedValues.at(2);
}
};
constraints.emplace_back(pythagoreanTripleConstraint);
constraints.emplace_back(totalOrderConstraint);
std::vector<std::reference_wrapper<csp::Constraint<int>>> constraintsRefs{ constraints.begin(), constraints.end() };
csp::ConstraintProblem<int> pythagoreanTriplesProblem{ constraintsRefs, nameToVarRefMap };
return pythagoreanTriplesProblem;
}
json get_pythagorean_triples_solutions(const std::string& strN)
{
json solutions;
solutions[strN] = { };
std::vector<csp::Variable<int>> pythTriplesVars;
std::vector<csp::Constraint<int>> pythTriplesConstrs;
csp::ConstraintProblem<int> pythagoreanTriplesProblem =
constructPythagoreanTriplesProblem(stoi(strN), pythTriplesVars, pythTriplesConstrs);
const std::unordered_set<csp::Assignment<int>> sols = csp::heuristicBacktrackingSolver_findAllSolutions<int>(
pythagoreanTriplesProblem,
csp::minimumRemainingValues_primarySelector<int>,
csp::degreeHeuristic_secondarySelector<int>,
csp::leastConstrainingValue<int>);
std::clog << "pythagorean triples found " << sols.size() << " solutions\n";
for (size_t i = 0; i < sols.size(); ++i)
solutions[strN].push_back({ });
size_t i = 0;
for (const csp::Assignment<int>& assignment : sols)
{
pythagoreanTriplesProblem.unassignAllVariables();
pythagoreanTriplesProblem.assignFromAssignment(assignment);
std::unordered_map<std::string, int> nameToSol = pythagoreanTriplesProblem.GetNameToSolution();
json j = nameToSol;
solutions[strN][i] = j;
++i;
}
std::clog << "pythagorean triples sends: \n";
std::clog << solutions;
std::clog << '\n';
return solutions;
}
|
<gh_stars>1000+
package com.novoda.gradle.release.sample.android;
public class AndroidSample {
public static void hello() {
System.out.println("Hello world from AndroidSample");
}
}
|
import React from 'react';
import { StyleSheet, Text, View, TextInput, Button } from 'react-native';
export default class App extends React.Component {
constructor(props) {
super(props);
this.state = {
people: [{ name: 'John', age: 34, occupation: 'Developer' }, { name: 'Mary', age: 28, occupation: 'Designer' }],
newName: '',
newAge: '',
newOccupation: ''
};
}
render() {
const peopleItems = this.state.people.map(person => (
<View key={person.name}>
<Text>Name: {person.name}</Text>
<Text>Age: {person.age}</Text>
<Text>Occupation: {person.occupation}</Text>
</View>
));
return (
<View style={styles.container}>
<Text>People</Text>
{peopleItems}
<View>
<TextInput
style={styles.input}
value={this.state.newName}
onChangeText={text => this.setState({ newName: text })}
placeholder="Name"
/>
<TextInput
style={styles.input}
value={this.state.newAge}
onChangeText={text => this.setState({ newAge: text })}
placeholder="Age"
/>
<TextInput
style={styles.input}
value={this.state.newOccupation}
onChangeText={text => this.setState({ newOccupation: text })}
placeholder="Occupation"
/>
<Button
onPress={() => {
this.setState(prevState => {
const newPeople = [
...prevState.people,
{ name: prevState.newName, age: prevState.newAge, occupation: prevState.newOccupation }
];
return {
people: newPeople,
newName: '',
newAge '',
newOccupation: ''
};
});
}}
title="Add"
/>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
alignItems: 'center',
justifyContent: 'center',
},
input: {
width: '80%',
padding: 10,
borderWidth: 1,
marginBottom: 10
}
});
|
/*
* Copyright © 2020 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*/
import { BatchChain } from '@liskhq/lisk-db';
import { StateDiff } from '../types';
import { DB_KEY_CONSENSUS_STATE } from '../data_access/constants';
import { DataAccess } from '../data_access';
import { CONSENSUS_STATE_FINALIZED_HEIGHT_KEY } from '../constants';
interface KeyValuePair {
[key: string]: Buffer | undefined;
}
export class ConsensusStateStore {
private readonly _name = 'ConsensusState';
private _data: KeyValuePair;
private _originalData: KeyValuePair;
private _updatedKeys: Set<string>;
private _originalUpdatedKeys: Set<string>;
private readonly _dataAccess: DataAccess;
private _initialValue: KeyValuePair;
public constructor(dataAccess: DataAccess) {
this._dataAccess = dataAccess;
this._data = {};
this._originalData = {};
this._initialValue = {};
this._updatedKeys = new Set();
this._originalUpdatedKeys = new Set();
}
public createSnapshot(): void {
this._originalData = { ...this._data };
this._originalUpdatedKeys = new Set(this._updatedKeys);
}
public restoreSnapshot(): void {
this._data = { ...this._originalData };
this._updatedKeys = new Set(this._originalUpdatedKeys);
}
public async get(key: string): Promise<Buffer | undefined> {
const value = this._data[key];
if (value) {
return value;
}
const dbValue = await this._dataAccess.getConsensusState(key);
// If it doesn't exist in the database, return undefined without caching
if (dbValue === undefined) {
return dbValue;
}
// Finalized height should not be stored as part of this diff because it cannot be undo
if (key !== CONSENSUS_STATE_FINALIZED_HEIGHT_KEY) {
this._initialValue[key] = dbValue;
}
this._data[key] = dbValue;
return this._data[key];
}
public getOrDefault(): void {
throw new Error(`getOrDefault cannot be called for ${this._name}`);
}
public find(): void {
throw new Error(`getOrDefault cannot be called for ${this._name}`);
}
// eslint-disable-next-line @typescript-eslint/require-await
public async set(key: string, value: Buffer): Promise<void> {
this._data[key] = value;
this._updatedKeys.add(key);
}
public finalize(batch: BatchChain): StateDiff {
const stateDiff = { updated: [], created: [], deleted: [] } as StateDiff;
if (this._updatedKeys.size === 0) {
return stateDiff;
}
for (const key of Array.from(this._updatedKeys)) {
const dbKey = `${DB_KEY_CONSENSUS_STATE}:${key}`;
const updatedValue = this._data[key] as Buffer;
batch.put(dbKey, updatedValue);
// finalized height should never be saved to diff, since it will not changed
if (key === CONSENSUS_STATE_FINALIZED_HEIGHT_KEY) {
continue;
}
// Save diff of changed state
const initialValue = this._initialValue[key];
if (initialValue !== undefined && !initialValue.equals(updatedValue)) {
stateDiff.updated.push({
key: dbKey,
value: initialValue,
});
} else if (initialValue === undefined) {
stateDiff.created.push(dbKey);
}
}
return stateDiff;
}
}
|
package lister
import (
"github.com/trek10inc/awsets/context"
"github.com/trek10inc/awsets/resource"
"github.com/aws/aws-sdk-go-v2/service/elasticache"
"github.com/aws/aws-sdk-go-v2/aws"
)
type AWSElasticacheSnapshot struct {
}
func init() {
i := AWSElasticacheSnapshot{}
listers = append(listers, i)
}
func (l AWSElasticacheSnapshot) Types() []resource.ResourceType {
return []resource.ResourceType{resource.ElasticacheSnapshot}
}
func (l AWSElasticacheSnapshot) List(ctx context.AWSetsCtx) (*resource.Group, error) {
svc := elasticache.New(ctx.AWSCfg)
req := svc.DescribeSnapshotsRequest(&elasticache.DescribeSnapshotsInput{
MaxRecords: aws.Int64(50),
})
rg := resource.NewGroup()
paginator := elasticache.NewDescribeSnapshotsPaginator(req)
for paginator.Next(ctx.Context) {
page := paginator.CurrentPage()
for _, v := range page.Snapshots {
r := resource.New(ctx, resource.ElasticacheSnapshot, v.SnapshotName, v.SnapshotName, v)
r.AddRelation(resource.Ec2Vpc, v.VpcId, "")
r.AddRelation(resource.KmsKey, v.KmsKeyId, "")
r.AddRelation(resource.ElasticacheParameterGroup, v.CacheParameterGroupName, "")
r.AddRelation(resource.ElasticacheSubnetGroup, v.CacheSubnetGroupName, "")
r.AddRelation(resource.ElasticacheCluster, v.CacheClusterId, "")
r.AddRelation(resource.ElasticacheReplicationGroup, v.ReplicationGroupId, "")
rg.AddResource(r)
}
}
err := paginator.Err()
return rg, err
}
|
<filename>src/main/java/voot/ErrorController.java
package voot;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.web.ErrorAttributes;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.util.Assert;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
import java.util.Map;
import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
@RestController
@RequestMapping("/error")
public class ErrorController implements org.springframework.boot.autoconfigure.web.ErrorController {
private final ErrorAttributes errorAttributes;
@Autowired
public ErrorController(ErrorAttributes errorAttributes) {
Assert.notNull(errorAttributes, "ErrorAttributes must not be null");
this.errorAttributes = errorAttributes;
}
@Override
public String getErrorPath() {
return "/error";
}
@RequestMapping
public ResponseEntity<Map<String, Object>> error(HttpServletRequest aRequest) {
RequestAttributes requestAttributes = new ServletRequestAttributes(aRequest);
Map<String, Object> result = this.errorAttributes.getErrorAttributes(requestAttributes, false);
HttpStatus statusCode = INTERNAL_SERVER_ERROR;
Object status = result.get("status");
if (status != null && status instanceof Integer) {
statusCode = HttpStatus.valueOf(((Integer) status).intValue());
}
return new ResponseEntity<>(result, statusCode);
}
}
|
def count_characters(string, char):
count = 0
for c in string:
if c== char:
count += 1
return count
print(count_characters('Hello world', 'l')) # 3
|
import datetime
def display_current_date_time():
current_date_time = datetime.datetime.now()
formatted_date_time = current_date_time.strftime("%Y-%m-%d %H:%M:%S")
print("Current Date and Time:", formatted_date_time)
def add_days_to_current_date(days):
current_date = datetime.date.today()
future_date = current_date + datetime.timedelta(days=days)
formatted_future_date = future_date.strftime("%Y-%m-%d %H:%M:%S")
print("Date after", days, "days:", formatted_future_date)
def main():
display_current_date_time()
days_to_add = int(input("Enter the number of days to add: "))
add_days_to_current_date(days_to_add)
if __name__ == "__main__":
main()
|
#!/bin/sh
#BHEADER**********************************************************************
# Copyright (c) 2008, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
# This file is part of HYPRE. See file COPYRIGHT for details.
#
# HYPRE is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License (as published by the Free
# Software Foundation) version 2.1 dated February 1999.
#
# $Revision: 1.15 $
#EHEADER**********************************************************************
TNAME=`basename $0 .sh`
#=============================================================================
#=============================================================================
tail -3 ${TNAME}.out.12 > ${TNAME}.testdata
tail -3 ${TNAME}.out.13 > ${TNAME}.testdata.temp
diff ${TNAME}.testdata ${TNAME}.testdata.temp >&2
#=============================================================================
# compare with baseline case
#=============================================================================
FILES="\
${TNAME}.out.0\
${TNAME}.out.1\
${TNAME}.out.2\
${TNAME}.out.3\
${TNAME}.out.4\
${TNAME}.out.5\
${TNAME}.out.6\
${TNAME}.out.7\
${TNAME}.out.8\
${TNAME}.out.10\
${TNAME}.out.11\
${TNAME}.out.12\
${TNAME}.out.13\
"
for i in $FILES
do
echo "# Output file: $i"
tail -3 $i
done > ${TNAME}.out
# Make sure that the output files are reasonable
CHECK_LINE="Iterations"
OUT_COUNT=`grep "$CHECK_LINE" ${TNAME}.out | wc -l`
SAVED_COUNT=`grep "$CHECK_LINE" ${TNAME}.saved | wc -l`
if [ "$OUT_COUNT" != "$SAVED_COUNT" ]; then
echo "Incorrect number of \"$CHECK_LINE\" lines in ${TNAME}.out" >&2
fi
if [ -z $HYPRE_NO_SAVED ]; then
diff -U3 -bI"time" ${TNAME}.saved ${TNAME}.out >&2
fi
#=============================================================================
# remove temporary files
#=============================================================================
rm -f ${TNAME}.testdata*
|
<html>
<head>
<title>Colors Table</title>
</head>
<body>
<h1>Colors Table in Alphabetical Order</h1>
<table>
<tr>
<th>Color</th>
<th>Hex Code</th>
</tr>
<tr>
<td>Blue</td>
<td>#0000FF</td>
</tr>
<tr>
<td>Green</td>
<td>#00FF00</td>
</tr>
<tr>
<td>Red</td>
<td>#FF0000</td>
</tr>
<tr>
<td>Yellow</td>
<td>#FFFF00</td>
</tr>
<tr>
<td>Purple</td>
<td>#800080</td>
</tr>
</table>
</body>
</html>
|
#!/usr/bin/env bash
# Detect and format/mount extra disk to /etc/lego for storing certs
# If FS is already formatted, don't reformat
# If script has already been executed, do not execute again
set -e
WITNESS_FILE=/usr/local/startup-script-ok
[ -e ${WITNESS_FILE} ] && exit 0
if DISK_NAME=$(curl -H 'Metadata-Flavor: Google' http://metadata.google.internal/computeMetadata/v1/instance/disks/1/device-name); then
eval $(blkid /dev/disk/by-id/google-${DISK_NAME} | awk ' { print $3 } ') # TYPE var contains fs type oris empty
if [ "${TYPE}" != "xfs" ]; then
echo "Formatting /dev/disk/by-id/google-${DISK_NAME}"
mkfs.xfs /dev/disk/by-id/google-${DISK_NAME}
fi
if ! grep -q ${DISK_NAME} /etc/fstab ; then
echo "/dev/disk/by-id/google-${DISK_NAME} /etc/lego xfs defaults,noatime 0 0" >> /etc/fstab
fi
mkdir -p /etc/lego && mount /etc/lego
systemctl restart nginx
touch ${WITNESS_FILE}
fi
|
// Code generated by MockGen. DO NOT EDIT.
// Source: interface.go
package httperr
import (
gomock "github.com/golang/mock/gomock"
reflect "reflect"
)
// MockError is a mock of Error interface
type MockError struct {
ctrl *gomock.Controller
recorder *MockErrorMockRecorder
}
// MockErrorMockRecorder is the mock recorder for MockError
type MockErrorMockRecorder struct {
mock *MockError
}
// NewMockError creates a new mock instance
func NewMockError(ctrl *gomock.Controller) *MockError {
mock := &MockError{ctrl: ctrl}
mock.recorder = &MockErrorMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use
func (_m *MockError) EXPECT() *MockErrorMockRecorder {
return _m.recorder
}
// Error mocks base method
func (_m *MockError) Error() string {
ret := _m.ctrl.Call(_m, "Error")
ret0, _ := ret[0].(string)
return ret0
}
// Error indicates an expected call of Error
func (_mr *MockErrorMockRecorder) Error() *gomock.Call {
return _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, "Error", reflect.TypeOf((*MockError)(nil).Error))
}
// HTTPStatusCode mocks base method
func (_m *MockError) HTTPStatusCode() int {
ret := _m.ctrl.Call(_m, "HTTPStatusCode")
ret0, _ := ret[0].(int)
return ret0
}
// HTTPStatusCode indicates an expected call of HTTPStatusCode
func (_mr *MockErrorMockRecorder) HTTPStatusCode() *gomock.Call {
return _mr.mock.ctrl.RecordCallWithMethodType(_mr.mock, "HTTPStatusCode", reflect.TypeOf((*MockError)(nil).HTTPStatusCode))
}
|
<filename>alg_geometric_series.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""Geometric series: 1 + r + r^2 + ... + r^(n+1)."""
def geometric_series_recur(n, r):
"""Geometric series by recursion.
Time complexity: O(n).
Space complexity: O(n)
"""
# Base case.
if n == 0:
return 1
return pow(r, n) + geometric_series_recur(n - 1, r)
def _geometric_series_memo(n, r, T):
# Base case.
if n == 0:
return 1
if T[n]:
return T[n]
T[n] = pow(r, n) + _geometric_series_memo(n - 1, r, T)
return T[n]
def geometric_series_memo(n, r):
"""Geometric series by recursion+memoization.
Time complexity: O(n).
Space complexity: O(n)
"""
T = [0 for _ in range(n + 1)]
return _geometric_series_memo(n, r, T)
def geometric_series_dp(n, r):
"""Geometric series by bottom-up DP.
Time complexity: O(n).
Space complexity: O(n)
"""
T = [0 for x in range(n + 1)]
T[0] = 1
for k in range(1, n + 1):
T[k] = pow(r, k) + T[k - 1]
return T[n]
def geometric_series_iter(n, r):
"""Geometric series by bottom-up DP w/ optimized space.
Time complexity: O(n).
Space complexity: O(1)
"""
s = 0
for k in range(1, n + 1):
s += pow(r, k)
return s
def geometric_series(n, r):
"""Geometric series by Gauss sum formula.
Time complexity: O(1).
Space complexity: O(1)
"""
return (pow(r, n + 1) - 1) / (r - 1)
def main():
import time
start_time = time.time()
print('By recursion: {}'.format(geometric_series_recur(63, 2)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By memo: {}'.format(geometric_series_memo(63, 2)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By DP: {}'.format(geometric_series_dp(63, 2)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By optimized DP: {}'.format(geometric_series_iter(63, 2)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By closed form: {}'.format(geometric_series(63, 2)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
#!/bin/bash
dieharder -d 15 -g 22 -S 3000572670
|
#!/bin/sh
set -u
subjectName=$1
scanNum=$2
longScanNum=$(seq -f "%02g" $scanNum $scanNum)
imgDir='/mnt/Data01/'`date +%Y%m%d`'.'$subjectName'.'$subjectName''
#imgDir='/mnt/Data01/20161018.1018162_phantom01.1018162_phantom02'
delta=0.0001 #seconds
stamp=`date +%Y%m%d%H%M%S`
echo $imgDir
regFile=${imgDir}/reg/f2mni.feat/reg/example_func2standard.mat
templateFile=/mnt/Data01/offline/strongbad01/anat4mm/canonical.nii.gz
for fileNum in $(seq -f "%04g" 1 9999)
do
fileName=$imgDir/'001_0000'$longScanNum'_00'$fileNum.dcm
echo awaiting $scanNum $fileNum
while [ ! -f $fileName ]
do
#echo 1
sleep $delta
done
echo `date +%s.%N ` >> ./timing_logs/file_arrival_$stamp.txt
go=$(date +%s.%N)
echo changing mod/waiting for filewrite
sleep .02
sudo chmod +r $fileName
sleep .02
bc <<< "$(date +%s.%N)-$go"
# convert to nii
echo dcm2niix $scanNum $fileNum
dcm2niix -s -f %n_%s $fileName > /dev/null
mv ${imgDir}/${subjectName}_${scanNum}.nii ${imgDir}/${longScanNum}_${fileNum}.nii
# mv ${imgDir}/${subjectName}_18.nii ${imgDir}/${longScanNum}_${fileNum}.nii
bc <<< "$(date +%s.%N)-$go"
# convert to mni
echo sub2mni $scanNum $fileNum
fsl5.0-flirt -in ${imgDir}/${longScanNum}_${fileNum}.nii -ref $templateFile -applyxfm -init $regFile -out ${imgDir}/ppnii/w${longScanNum}_${fileNum}.nii
bc <<< "$(date +%s.%N)-$go"
echo `date +%s.%N ` >> ./timing_logs/post_nii_mni_$stamp.txt
done
# python ./classifier/nifti_file_watcher.py tmp ../brainiak_cloud/sb_714.npy ../brainiak_cloud/allsubs_select_seq_top2000.nii.gz ../brainiak_cloud/svm_all_2000.pkl -w 20 -t 714 &
# ./bin/copy_files.sh ../brainiak_cloud/20161219.1219161_rtstrongbad01.1219161_rtstrongbad01/ppnii/*.nii.gz tmp 0.5
# dcm2niix -s -f ../brainiak_cloud/001_000011_000139.dcm > /dev/null
# fsl5.0-flirt -in brainiak_cloud_TR1000ms_Slice44_Res2.5iso_100phase_noG_20161219102259_11.nii -ref canonical.nii.gz -applyxfm -init example_func2standard.mat -out ppnii.nii
# curl -X POST -H "Cache-Control: no-cache" -H "Postman-Token: a01cb4c1-b9f9-cde3-6ceb-c7cab07740e2" -H "Content-Type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW" -F "file=@/Users/dsuo/Downloads/001_000011_000139.dcm" "http://ec2-52-207-158-247.compute-1.amazonaws.com:5000/rtfcma-prisma/"
|
#!/bin/bash
echo [x] Adding $USER to 'docker' group
sudo adduser $USER docker
echo Now you can command docker daemon.
echo [x] Adding $USER to 'cyber' group
sudo adduser $USER cyber
echo Now you can run cybernode components.
echo Please relogin to make new powers effective.
|
#!/bin/bash
# #check if /appdata/space-engineers/config/World is a folder
if [ ! -d "/appdata/space-engineers/World" ]; then
echo "World folder does not exist, exiting"
exit 129
fi
# #check if /appdata/space-engineers/config/World/Sandbox.sbc exists and is a file
if [ ! -f "/appdata/space-engineers/World/Sandbox.sbc" ]; then
echo "Sandbox.sbc file does not exist, exiting."
exit 130
fi
# #check if /appdata/space-engineers/config/SpaceEngineers-Dedicated.cfg is a file
if [ ! -f "/appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg" ]; then
echo "SpaceEngineers-Dedicated.cfg file does not exist, exiting."
exit 131
fi
#set <LoadWorld> to the correct value
cat /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg | sed -E '/.*LoadWorld.*/c\ <LoadWorld>Z:\\appdata\\space-engineers\\World</LoadWorld>' > /tmp/SpaceEngineers-Dedicated.cfg && cat /tmp/SpaceEngineers-Dedicated.cfg > /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg
#set game port to the correct value
#cat /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg | sed -E '/.*ServerPort.*/c\ <ServerPort>27016</ServerPort>' > /tmp/SpaceEngineers-Dedicated.cfg && cat /tmp/SpaceEngineers-Dedicated.cfg > /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg
#configure plugins section in SpaceEngineers-Dedicated.cfg
#get new plugins string
if [ "$(ls -1 /appdata/space-engineers/Plugins/*.dll | wc -l)" -gt "0" ]; then
PLUGINS_STRING=$(ls -1 /appdata/space-engineers/Plugins/*.dll |\
awk '{ print "<string>" $0 "</string>" }' |\
tr -d '\n' |\
awk '{ print "<Plugins>" $0 "</Plugins>" }' )
else
PLUGINS_STRING="<Plugins />"
fi
SED_EXPRESSION_EMPTY="s/<Plugins \/>/${PLUGINS_STRING////\\/} /g"
SED_EXPRESSION_FULL="s/<Plugins>.*<\/Plugins>/${PLUGINS_STRING////\\/} /g"
#find and replace in SpaceEngineers-Dedicated.cfg for empty "<Plugins />" element
cat /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg | sed -E "$SED_EXPRESSION_EMPTY" > /tmp/SpaceEngineers-Dedicated.cfg && cat /tmp/SpaceEngineers-Dedicated.cfg > /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg
#find and replace in SpaceEngineers-Dedicated.cfg for filled out "<Plugins>...</Plugins>" element
# sed can't handle multiple lines easily, so everything needs to be on a single line.
cat /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg | sed -E "$SED_EXPRESSION_FULL" > /tmp/SpaceEngineers-Dedicated.cfg && cat /tmp/SpaceEngineers-Dedicated.cfg > /appdata/space-engineers/SpaceEngineersDedicated/SpaceEngineers-Dedicated.cfg
runuser -l wine bash -c 'steamcmd +@sSteamCmdForcePlatformType windows +force_install_dir /appdata/space-engineers/SpaceEngineersDedicated +login anonymous +app_update 298740 +quit'
runuser -l wine bash -c '/entrypoint-space_engineers.bash'
|
import React, { useState} from 'react';
import './style.scss';
const Card = ({ children }) => {
const [flipped, setFlipped] = useState(false);
return (
<div
className="flip-card"
onClick={()=> {setFlipped(!flipped)}}
>
<div className={flipped ? "card-front card-front-rotate" : "card-front"}>
Front!
</div>
<div className={flipped ? "card-back card-back-rotate" : "card-back"}>
{children}
</div>
</div>
);
}
export default Card;
|
import React from 'react';
import { Link } from 'react-router-dom';
const DiscoverArea = () => {
return (
<div className="discover-area ptb-80">
<div className="container">
<div className="row align-items-center">
<div className="col-lg-6 col-md-12">
<div className="discover-image">
<img
src="/images/bigdata-analytics/discover-img1.png"
alt="image"
/>
<img
src="/images/bigdata-analytics/discover-img2.jpg"
alt="image"
/>
</div>
</div>
<div className="col-lg-6 col-md-12">
<div className="discover-content">
<h2>Engaging New Audiences through Smart Approach</h2>
<p>
There are many variations of passages of Lorem Ipsum available,
but the majority have suffered alteration in some form, by
injected humour, or randomised words which don't look even
slightly believable. If you are going to use a passage.
</p>
<Link to="#">
<a className="btn btn-primary">Discover More</a>
</Link>
</div>
</div>
</div>
</div>
<div className="analytics-shape1">
<img src="/images/bigdata-analytics/analytics-shape1.png" alt="image" />
</div>
</div>
);
};
export default DiscoverArea;
|
<gh_stars>10-100
# coding: utf-8
# # FVCOM horizontal slice at fixed depth
# In[1]:
get_ipython().magic('matplotlib inline')
import numpy as np
import numpy.ma as ma
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import cartopy.crs as ccrs
from cartopy.io import shapereader
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import pyugrid
import iris
import warnings
from ciso import zslice
# In[2]:
#url = 'http://crow.marine.usf.edu:8080/thredds/dodsC/FVCOM-Nowcast-Agg.nc'
url = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc'
# In[3]:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
cubes = iris.load_raw(url)
# In[4]:
var = cubes.extract_strict('sea_water_potential_temperature')[-1, ...] # Last time step.
# In[5]:
lon = var.coord(axis='X').points
lat = var.coord(axis='Y').points
# In[6]:
var
# In[7]:
# calculate the 3D z values using formula terms by specifying this derived vertical coordinate
# with a terrible name
z3d = var.coord('sea_surface_height_above_reference_ellipsoid').points
# In[8]:
# read the 3D chuck of data
var3d = var.data
# In[9]:
# specify depth for fixed z slice
z0 = -25
isoslice = zslice(var3d, z3d, z0)
# In[10]:
# For some reason I cannot tricontourf with NaNs.
isoslice = ma.masked_invalid(isoslice)
vmin, vmax = isoslice.min(), isoslice.max()
isoslice = isoslice.filled(fill_value=-999)
# In[11]:
def make_map(projection=ccrs.PlateCarree()):
fig, ax = plt.subplots(figsize=(9, 13),
subplot_kw=dict(projection=projection))
gl = ax.gridlines(draw_labels=True)
gl.xlabels_top = gl.ylabels_right = False
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
ax.coastlines('50m')
return fig, ax
# In[12]:
# use UGRID conventions to locate lon,lat and connectivity array
ugrid = pyugrid.UGrid.from_ncfile(url)
lon = ugrid.nodes[:, 0]
lat = ugrid.nodes[:, 1]
triangles = ugrid.faces[:]
triang = tri.Triangulation(lon, lat, triangles=triangles)
# In[13]:
fig, ax = make_map()
extent = [lon.min(), lon.max(),
lat.min(), lat.max()]
ax.set_extent(extent)
levels = np.linspace(vmin, vmax, 20)
kw = dict(cmap='jet', alpha=1.0, levels=levels)
cs = ax.tricontourf(triang, isoslice, **kw)
kw = dict(shrink=0.5, orientation='vertical')
cbar = fig.colorbar(cs, **kw)
|
/*
* Copyright (c) 2013-2015 <EMAIL>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published by
* the Free Software Foundation.
*/
#include <linux/input.h>
#include <linux/i2c.h>
#include <linux/delay.h>
#include <linux/interrupt.h>
#include <linux/errno.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/init.h>
#include <asm/io.h>
#include <asm/uaccess.h>
#include <linux/sys_config.h>
#include <linux/clk.h>
#include <linux/gpio.h>
#include <linux/init-input.h>
#include <linux/pinctrl/consumer.h>
#include <linux/of.h>
#include <linux/of_gpio.h>
#include <linux/platform_device.h>
/*********************************CTP*******************************************/
/**
* ctp_fetch_sysconfig_para - get config info from sysconfig.fex file.
* return value:
* = 0; success;
* < 0; err
*/
static int ctp_fetch_sysconfig_para(enum input_sensor_type *ctp_type)
{
int ret = -1;
struct ctp_config_info *data = container_of(ctp_type,
struct ctp_config_info, input_type);
struct device_node *np = NULL;
np = of_find_node_by_name(NULL,"ctp");
if (!np) {
pr_err("ERROR! get ctp_para failed, func:%s, line:%d\n",__FUNCTION__, __LINE__);
goto devicetree_get_item_err;
}
if (!of_device_is_available(np)) {
pr_err("%s: ctp is not used\n", __func__);
goto devicetree_get_item_err;
}else
data->ctp_used = 1;
ret = of_property_read_u32(np, "ctp_twi_id", &data->twi_id);
if (ret) {
pr_err("get twi_id is fail, %d\n", ret);
goto devicetree_get_item_err;
}
ret = of_property_read_string(np, "ctp_name", &data->name);
if (ret) {
pr_err("get ctp_name is fail, %d\n", ret);
}
ret = of_property_read_string(np, "ctp_power_ldo", &data->ctp_power);
if (ret) {
pr_err("get ctp_power is fail, %d\n", ret);
}
ret = of_property_read_u32(np, "ctp_power_ldo_vol", &data->ctp_power_vol);
if (ret) {
pr_err("get ctp_power_ldo_vol is fail, %d\n", ret);
}
data->ctp_power_io.gpio = of_get_named_gpio_flags(np, "ctp_power_io", 0, (enum of_gpio_flags *)(&(data->ctp_power_io)));
if (!gpio_is_valid(data->ctp_power_io.gpio))
pr_err("%s: ctp_power_io is invalid. \n",__func__ );
data->wakeup_gpio.gpio = of_get_named_gpio_flags(np, "ctp_wakeup", 0, (enum of_gpio_flags *)(&(data->wakeup_gpio)));
if (!gpio_is_valid(data->wakeup_gpio.gpio))
pr_err("%s: wakeup_gpio is invalid. \n",__func__ );
ret = of_property_read_u32(np, "ctp_screen_max_x", &data->screen_max_x);
if (ret) {
pr_err("get ctp_screen_max_x is fail, %d\n", ret);
}
ret = of_property_read_u32(np, "ctp_screen_max_y", &data->screen_max_y);
if (ret) {
pr_err("get screen_max_y is fail, %d\n", ret);
}
ret = of_property_read_u32(np, "ctp_revert_x_flag", &data->revert_x_flag);
if (ret) {
pr_err("get revert_x_flag is fail, %d\n", ret);
}
ret = of_property_read_u32(np, "ctp_revert_y_flag", &data->revert_y_flag);
if (ret) {
pr_err("get revert_y_flag is fail, %d\n", ret);
}
ret = of_property_read_u32(np, "ctp_exchange_x_y_flag", &data->exchange_x_y_flag);
if (ret) {
pr_err("get ctp_exchange_x_y_flag is fail, %d\n", ret);
}
data->irq_gpio.gpio = of_get_named_gpio_flags(np, "ctp_int_port", 0, (enum of_gpio_flags *)(&(data->irq_gpio)));
if (!gpio_is_valid(data->irq_gpio.gpio))
pr_err("%s: irq_gpio is invalid. \n",__func__ );
else
data->int_number = data->irq_gpio.gpio;
#ifdef TOUCH_KEY_LIGHT_SUPPORT
data->key_light_gpio.gpio = of_get_named_gpio(np, "ctp_light", 0);
if (!gpio_is_valid(data->key_light_gpio.gpio))
pr_err("%s: key_light_gpio is invalid. \n",__func__ );
#endif
return 0;
devicetree_get_item_err:
pr_notice("=========script_get_item_err============\n");
ret = -1;
return ret;
}
/**
* ctp_free_platform_resource - free ctp related resource
* return value:
*/
static void ctp_free_platform_resource(enum input_sensor_type *ctp_type)
{
struct ctp_config_info *data = container_of(ctp_type,
struct ctp_config_info, input_type);
gpio_free(data->wakeup_gpio.gpio);
#ifdef TOUCH_KEY_LIGHT_SUPPORT
gpio_free(data->key_light_gpio.gpio);
#endif
if(data->ctp_power_ldo) {
regulator_put(data->ctp_power_ldo);
data->ctp_power_ldo= NULL;
} else if(0 != data->ctp_power_io.gpio) {
gpio_free(data->ctp_power_io.gpio);
}
return;
}
/**
* ctp_init_platform_resource - initialize platform related resource
* return value: 0 : success
* -EIO : i/o err.
*
*/
static int ctp_init_platform_resource(enum input_sensor_type *ctp_type)
{
int ret = -1;
struct ctp_config_info *data = container_of(ctp_type,
struct ctp_config_info, input_type);
if (data->ctp_power) {
data->ctp_power_ldo = regulator_get(NULL, data->ctp_power);
if (!data->ctp_power_ldo)
pr_err("%s: could not get ctp ldo '%s' , check"
"if ctp independent power supply by ldo,ignore"
"firstly\n",__func__,data->ctp_power);
else
regulator_set_voltage(data->ctp_power_ldo,
(int)(data->ctp_power_vol)*1000,
(int)(data->ctp_power_vol)*1000);
} else if(0 != data->ctp_power_io.gpio) {
if(0 != gpio_request(data->ctp_power_io.gpio, NULL))
pr_err("ctp_power_io gpio_request is failed,"
"check if ctp independent power supply by gpio,"
"ignore firstly\n");
else
gpio_direction_output(data->ctp_power_io.gpio, 1);
}
if(0 != gpio_request(data->wakeup_gpio.gpio, NULL)) {
pr_err("wakeup gpio_request is failed\n");
return ret;
}
if (0 != gpio_direction_output(data->wakeup_gpio.gpio, 1)) {
pr_err("wakeup gpio set err!");
return ret;
}
#ifdef TOUCH_KEY_LIGHT_SUPPORT
if(0 != gpio_request(data->key_light_gpio.gpio, NULL)) {
pr_err("key_light gpio_request is failed\n");
return ret;
}
if (0 != gpio_direction_output(data->key_light_gpio.gpio, 1)) {
pr_err("key_light gpio set err!");
return ret;
}
#endif
ret = 0;
return ret;
}
/*********************************CTP END***************************************/
/*********************************GSENSOR***************************************/
/**
* gsensor_free_platform_resource - free gsensor related resource
* return value:
*/
static void gsensor_free_platform_resource(enum input_sensor_type *gsensor_type)
{
struct sensor_config_info *data = container_of(gsensor_type,
struct sensor_config_info,input_type);
if (data->sensor_power_ldo){
regulator_put(data->sensor_power_ldo);
data->sensor_power_ldo=NULL;
}
return;
}
/**
* gsensor_init_platform_resource - initialize platform related resource
* return value: 0 : success
* -EIO : i/o err.
*
*/
static int gsensor_init_platform_resource(enum input_sensor_type *gsensor_type)
{
struct sensor_config_info *data = container_of(gsensor_type,
struct sensor_config_info,input_type);
if (data->sensor_power){
data->sensor_power_ldo =regulator_get(NULL,data->sensor_power);
if (!data->sensor_power_ldo){
pr_err("%s: could not get ctp ldo '%s' ,check"
"if ctp independent power supply by ldo,ignore"
"firstly\n",__func__,data->sensor_power);
}else{
regulator_set_voltage(data->sensor_power_ldo,
(int)(data->sensor_power_vol)*1000,
(int)(data->sensor_power_vol)*1000);
}
}
return 0;
}
/**
* gsensor_fetch_sysconfig_para - get config info from sysconfig.fex file.
* return value:
* = 0; success;
* < 0; err
*/
static int gsensor_fetch_sysconfig_para(enum input_sensor_type *gsensor_type)
{
int ret = -1;
struct sensor_config_info *data = container_of(gsensor_type,
struct sensor_config_info, input_type);
struct device_node *np = NULL;
np = of_find_node_by_name(NULL,"gsensor");
if (!np) {
pr_err("ERROR! get gsensor_para failed, func:%s, line:%d\n",__FUNCTION__, __LINE__);
goto devicetree_get_item_err;
}
if (!of_device_is_available(np)) {
pr_err("%s: gsensor is not used\n", __func__);
goto devicetree_get_item_err;
}else
data->sensor_used = 1;
ret = of_property_read_u32(np, "gsensor_twi_id", &data->twi_id);
if (ret) {
pr_err("get gsensor_twi_id is fail, %d\n", ret);
goto devicetree_get_item_err;
}
ret = of_property_read_string(np,"gsensor_vcc_io",&data->sensor_power);
if (ret) {
pr_err("get gsensor_vcc_io is fail, %d\n", ret);
}
ret = of_property_read_u32(np,"gsensor_vcc_io_val",&data->sensor_power_vol);
if (ret) {
pr_err("get gsensor_vcc_io_val is fail, %d\n", ret);
}
data->irq_gpio.gpio = of_get_named_gpio_flags(np, "gsensor_int1", 0, (enum of_gpio_flags *)(&(data->irq_gpio)));
if (!gpio_is_valid(data->irq_gpio.gpio))
pr_err("%s: irq_gpio %d is invalid. \n",__func__ , data->irq_gpio.gpio);
else
data->int_number = data->irq_gpio.gpio;
return 0;
devicetree_get_item_err:
pr_notice("=========gsensor script_get_err============\n");
return ret;
}
/*********************************GSENSOR END***********************************/
/********************************** GYR ****************************************/
/**
* gyr_free_platform_resource - free gyr related resource
* return value:
*/
static void gyr_free_platform_resource(enum input_sensor_type *gyr_type)
{
}
/**
* gyr_init_platform_resource - initialize platform related resource
* return value: 0 : success
* -EIO : i/o err.
*
*/
static int gyr_init_platform_resource(enum input_sensor_type *gyr_type)
{
return 0;
}
/**
* gyr_fetch_sysconfig_para - get config info from sysconfig.fex file.
* return value:
* = 0; success;
* < 0; err
*/
static int gyr_fetch_sysconfig_para(enum input_sensor_type *gyr_type)
{
int ret = -1;
struct sensor_config_info *data = container_of(gyr_type,
struct sensor_config_info, input_type);
struct device_node *np = NULL;
np = of_find_node_by_name(NULL,"gy");
if (!np) {
pr_err("ERROR! get gy_para failed, func:%s, line:%d\n",__FUNCTION__, __LINE__);
goto devicetree_get_item_err;
}
if (!of_device_is_available(np)) {
pr_err("%s: gy is not used\n", __func__);
goto devicetree_get_item_err;
}else
data->sensor_used = 1;
if(1 == data->sensor_used){
ret = of_property_read_u32(np, "gy_twi_id", &data->twi_id);
if (ret) {
pr_err("get gy_twi_id is fail, %d\n", ret);
goto devicetree_get_item_err;
}
}else{
pr_err("%s gy_unused \n",__func__);
}
return ret;
devicetree_get_item_err:
pr_notice("=========script_get_err============\n");
ret = -1;
return ret;
}
/********************************* GYR END *************************************/
/********************************* COMPASS *************************************/
/**
* e_compass_free_platform_resource - free e_compass related resource
* return value:
*/
static void e_compass_free_platform_resource(enum input_sensor_type *e_compass_type)
{
}
/**
* e_compass_init_platform_resource - initialize platform related resource
* return value: 0 : success
* -EIO : i/o err.
*
*/
static int e_compass_init_platform_resource(enum input_sensor_type *e_compass_type)
{
return 0;
}
/**
* e_compass_fetch_sysconfig_para - get config info from sysconfig.fex file.
* return value:
* = 0; success;
* < 0; err
*/
static int e_compass_fetch_sysconfig_para(enum input_sensor_type *e_compass_type)
{
int ret = -1;
struct sensor_config_info *data = container_of(e_compass_type,
struct sensor_config_info, input_type);
struct device_node *np = NULL;
np = of_find_node_by_name(NULL,"compass");
if (!np) {
pr_err("ERROR! get compass_para failed, func:%s, line:%d\n",__FUNCTION__, __LINE__);
goto devicetree_get_item_err;
}
if (!of_device_is_available(np)) {
pr_err("%s: compass is not used\n", __func__);
goto devicetree_get_item_err;
}else
data->sensor_used = 1;
if(1 == data->sensor_used){
ret = of_property_read_u32(np, "compass_twi_id", &data->twi_id);
if (ret) {
pr_err("get compass_twi_id is fail, %d\n", ret);
goto devicetree_get_item_err;
}
}else{
pr_err("%s gsensor_unused \n",__func__);
}
return ret;
devicetree_get_item_err:
pr_notice("=========script_get_err============\n");
ret = -1;
return ret;
}
/******************************* COMPASS END ***********************************/
/****************************** LIGHT SENSOR ***********************************/
/**
* ls_free_platform_resource - free ls related resource
* return value:
*/
static void ls_free_platform_resource(enum input_sensor_type *ls_type)
{
struct regulator *ldo = NULL;
struct sensor_config_info *data = container_of(ls_type,
struct sensor_config_info, input_type);
/* disable ldo if it exist */
if (data->ldo) {
ldo = regulator_get(NULL, data->ldo);
if (!ldo) {
pr_err("%s: could not get ldo '%s' in remove, something error ???, "
"ignore it here !!!!!!!!!\n", __func__, data->ldo);
} else {
regulator_disable(ldo);
regulator_put(ldo);
}
}
}
/**
* ls_init_platform_resource - initialize platform related resource
* return value: 0 : success
* -EIO : i/o err.
*
*/
static int ls_init_platform_resource(enum input_sensor_type *ls_type)
{
struct regulator *ldo = NULL;
struct sensor_config_info *data = container_of(ls_type,
struct sensor_config_info, input_type);
/* enalbe ldo if it exist */
if (data->ldo) {
ldo = regulator_get(NULL, data->ldo);
if (!ldo) {
pr_err("%s: could not get sensor ldo '%s' in probe, maybe config error,"
"ignore firstly !!!!!!!\n", __func__, data->ldo);
}
regulator_set_voltage(ldo, 3000000, 3000000);
if (0 != regulator_enable(ldo))
pr_err("%s: regulator_enable error!\n", __func__);
regulator_put(ldo);
usleep_range(10000, 15000);
}
return 0;
}
/**
* ls_fetch_sysconfig_para - get config info from sysconfig.fex file.
* return value:
* = 0; success;
* < 0; err
*/
static int ls_fetch_sysconfig_para(enum input_sensor_type *ls_type)
{
int ret = -1;
struct sensor_config_info *data = container_of(ls_type,
struct sensor_config_info, input_type);
struct device_node *np = NULL;
np = of_find_node_by_name(NULL,"ls");
if (!np) {
pr_err("ERROR! get ls_para failed, func:%s, line:%d\n",__FUNCTION__, __LINE__);
goto devicetree_get_item_err;
}
if (!of_device_is_available(np)) {
pr_err("%s: ls is not used\n", __func__);
goto devicetree_get_item_err;
}else
data->sensor_used = 1;
if(1 == data->sensor_used){
ret = of_property_read_u32(np, "ls_twi_id", &data->twi_id);
if (ret) {
pr_err("get compass_twi_id is fail, %d\n", ret);
goto devicetree_get_item_err;
}
data->irq_gpio.gpio = of_get_named_gpio(np, "ls_int", 0);
if (!gpio_is_valid(data->irq_gpio.gpio))
pr_err("%s: irq_gpio is invalid. \n",__func__ );
else
data->int_number = data->irq_gpio.gpio;
}else{
pr_err("%s gsensor_unused \n",__func__);
}
return ret;
devicetree_get_item_err:
ret = -1;
return ret;
}
/**************************** LIGHT SENSOR END *********************************/
/********************************** MOTOR *************************************/
/**
* motor_free_platform_resource - free ths related resource
* return value:
*/
static void motor_free_platform_resource(enum input_sensor_type *motor_type)
{
struct motor_config_info *data = container_of(motor_type,
struct motor_config_info, input_type);
if (0 != data->motor_gpio.gpio) {
gpio_free(data->motor_gpio.gpio);
}
return;
}
/**
* motor_init_platform_resource - initialize platform related resource
* return value: 0 : success
* -EIO : i/o err.
*
*/
static int motor_init_platform_resource(enum input_sensor_type *motor_type)
{
struct motor_config_info *data = container_of(motor_type,
struct motor_config_info, input_type);
if (0 != data->motor_gpio.gpio) {
if(0 != gpio_request(data->motor_gpio.gpio, "vibe")) {
pr_err("ERROR: vibe Gpio_request is failed\n");
goto exit;
}
gpio_direction_output(data->motor_gpio.gpio, data->vibe_off);
}
return 0;
exit:
return -1;
}
/**
* motor_fetch_sysconfig_para - get config info from sysconfig.fex file.
* return value:
* = 0; success;
* < 0; err
*/
static int motor_fetch_sysconfig_para(enum input_sensor_type *motor_type)
{
struct motor_config_info *data = container_of(motor_type,
struct motor_config_info, input_type);
struct device_node *np = NULL;
int ret = -1;
np = of_find_node_by_name(NULL, "motor_para");
if (!np) {
pr_err("ERROR! get motor_para failed, func:%s, line:%d\n",
__func__, __LINE__);
return -1;
}
if (!of_device_is_available(np)) {
pr_err("%s: motor_para is not used\n", __func__);
return -1;
} else {
data->motor_used = 1;
}
data->motor_gpio.gpio = of_get_named_gpio_flags(np, "motor_shake", 0,
(enum of_gpio_flags *)(&(data->motor_gpio)));
if (!gpio_is_valid(data->motor_gpio.gpio))
pr_err("%s: motor_shake is invalid\n", __func__);
ret = of_property_read_string(np, "motor_ldo", &data->ldo);
if (ret)
pr_err("get motor_ldo is fail, %d\n", ret);
ret = of_property_read_u32(np, "motor_ldo_voltage", &data->ldo_voltage);
if (ret)
pr_err("get motor_ldo_voltage is fail, %d\n", ret);
return 0;
}
/******************************** MOTOR END ***********************************/
static int (* const fetch_sysconfig_para[])(enum input_sensor_type *input_type) = {
ctp_fetch_sysconfig_para,
gsensor_fetch_sysconfig_para,
gyr_fetch_sysconfig_para,
e_compass_fetch_sysconfig_para,
ls_fetch_sysconfig_para,
motor_fetch_sysconfig_para,
};
static int (*init_platform_resource[])(enum input_sensor_type *input_type) = {
ctp_init_platform_resource,
gsensor_init_platform_resource,
gyr_init_platform_resource,
e_compass_init_platform_resource,
ls_init_platform_resource,
motor_init_platform_resource,
};
static void (*free_platform_resource[])(enum input_sensor_type *input_type) = {
ctp_free_platform_resource,
gsensor_free_platform_resource,
gyr_free_platform_resource,
e_compass_free_platform_resource,
ls_free_platform_resource,
motor_free_platform_resource,
};
int input_set_power_enable(enum input_sensor_type *input_type, u32 enable)
{
int ret = -1;
struct regulator *ldo = NULL;
u32 power_io = 0;
void *data = NULL;
switch (*input_type) {
case CTP_TYPE:
data = container_of(input_type,
struct ctp_config_info, input_type);
ldo = ((struct ctp_config_info *)data)->ctp_power_ldo;
power_io = ((struct ctp_config_info *)data)->ctp_power_io.gpio;
break;
case GSENSOR_TYPE:
data = container_of (input_type,struct sensor_config_info,input_type );
ldo = ((struct sensor_config_info*)data)->sensor_power_ldo;
break;
case LS_TYPE:
break;
default:
break;
}
if ((enable != 0) && (enable != 1)) {
return ret;
}
if(ldo) {
if(enable){
if (0 != regulator_enable(ldo))
pr_err("%s: enable ldo error!\n", __func__);
} else {
if (regulator_is_enabled(ldo))
regulator_disable(ldo);
}
} else if(power_io) {
if(enable) {
__gpio_set_value(power_io,1);
} else {
__gpio_set_value(power_io,0);
}
}
return 0;
}
EXPORT_SYMBOL(input_set_power_enable);
/**
* input_set_int_enable - input set irq enable
* Input:
* type:
* enable:
* return value: 0 : success
* -EIO : i/o err.
*/
int input_set_int_enable(enum input_sensor_type *input_type, u32 enable)
{
int ret = -1;
u32 irq_number = 0;
void *data = NULL;
switch (*input_type)
{
case CTP_TYPE:
data = container_of(input_type,
struct ctp_config_info, input_type);
irq_number = gpio_to_irq(((struct ctp_config_info *)data)->int_number);
break;
case GSENSOR_TYPE:
break;
case LS_TYPE:
data = container_of(input_type,
struct sensor_config_info, input_type);
irq_number = gpio_to_irq(((struct sensor_config_info *)data)->int_number);
break;
default:
break;
}
if ((enable != 0) && (enable != 1)) {
return ret;
}
if (1 == enable)
enable_irq(irq_number);
else
disable_irq_nosync(irq_number);
return 0;
}
EXPORT_SYMBOL(input_set_int_enable);
/**
* input_free_int - input free irq
* Input:
* type:
* return value: 0 : success
* -EIO : i/o err.
*/
int input_free_int(enum input_sensor_type *input_type, void *para)
{
int irq_number = 0;
void *data = NULL;
struct device *dev = NULL;
switch (*input_type)
{
case CTP_TYPE:
data = container_of(input_type,
struct ctp_config_info, input_type);
irq_number = gpio_to_irq(((struct ctp_config_info *)data)->int_number);
dev = ((struct ctp_config_info *)data)->dev;
break;
case GSENSOR_TYPE:
break;
case LS_TYPE:
data = container_of(input_type,
struct sensor_config_info, input_type);
irq_number = gpio_to_irq(((struct sensor_config_info *)data)->int_number);
dev = ((struct sensor_config_info *)data)->dev;
break;
default:
break;
}
devm_free_irq(dev, irq_number, para);
return 0;
}
EXPORT_SYMBOL(input_free_int);
/**
* input_request_int - input request irq
* Input:
* type:
* handle:
* trig_gype:
* para:
* return value: 0 : success
* -EIO : i/o err.
*
*/
int input_request_int(enum input_sensor_type *input_type, irq_handler_t handle,
unsigned long trig_type, void *para)
{
int ret = -1;
int irq_number = 0;
void *data = NULL;
struct device *dev = NULL;
switch (*input_type)
{
case CTP_TYPE:
data = container_of(input_type,
struct ctp_config_info, input_type);
irq_number = gpio_to_irq(((struct ctp_config_info *)data)->int_number);
if (IS_ERR_VALUE(irq_number)) {
pr_warn("map gpio [%d] to virq failed, errno = %d\n",
GPIOA(3), irq_number);
return -EINVAL;
}
dev = ((struct ctp_config_info *)data)->dev;
break;
case GSENSOR_TYPE:
break;
case LS_TYPE:
data = container_of(input_type,
struct sensor_config_info, input_type);
irq_number = gpio_to_irq(((struct sensor_config_info *)data)->int_number);
if (IS_ERR_VALUE(irq_number)) {
pr_warn("map gpio [%d] to virq failed, errno = %d\n",
GPIOA(3), irq_number);
return -EINVAL;
}
dev = ((struct sensor_config_info *)data)->dev;
break;
default:
break;
}
/* request virq, set virq type to high level trigger */
ret = devm_request_irq(dev, irq_number, handle,
trig_type, "PA3_EINT", para);
if (IS_ERR_VALUE(ret)) {
pr_warn("request virq %d failed, errno = %d\n",
irq_number, ret);
return -EINVAL;
}
return 0;
}
EXPORT_SYMBOL(input_request_int);
/**
* input_free_platform_resource - free platform related resource
* Input:
* event:
* return value:
*/
void input_free_platform_resource(enum input_sensor_type *input_type)
{
(*free_platform_resource[*input_type])(input_type);
return;
}
EXPORT_SYMBOL(input_free_platform_resource);
/**
* input_init_platform_resource - initialize platform related resource
* Input:
* type:
* return value: 0 : success
* -EIO : i/o err.
*
*/
int input_init_platform_resource(enum input_sensor_type *input_type)
{
int ret = -1;
ret = (*init_platform_resource[*input_type])(input_type);
return ret;
}
EXPORT_SYMBOL(input_init_platform_resource);
/**
* input_fetch_sysconfig_para - get config info from sysconfig.fex file.
* Input:
* type:
* return value:
* = 0; success;
* < 0; err
*/
int input_fetch_sysconfig_para(enum input_sensor_type *input_type)
{
int ret = -1;
ret = (*fetch_sysconfig_para[*input_type])(input_type);
return ret;
}
EXPORT_SYMBOL(input_fetch_sysconfig_para);
|
<filename>app/src/main/java/me/androidbox/enershared/home/HomeActivity.java
package me.androidbox.enershared.home;
import android.content.res.Configuration;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.NavigationView;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import me.androidbox.enershared.R;
import me.androidbox.enershared.billing.BillingView;
import me.androidbox.enershared.payment.PaymentView;
import me.androidbox.enershared.trading.TradingView;
public class HomeActivity extends AppCompatActivity {
private DrawerLayout drawerLayout;
private Toolbar toolbar;
private ActionBarDrawerToggle actionBarDrawerToggle;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.home_container);
toolbar = findViewById(R.id.tbHome);
toolbar.setTitle(R.string.home);
setSupportActionBar(toolbar);
final NavigationView navigationView = findViewById(R.id.nvHome);
setupDrawerContent(navigationView);
drawerLayout = findViewById(R.id.homeDrawerLayout);
actionBarDrawerToggle = setupDrawerToggle();
drawerLayout.addDrawerListener(actionBarDrawerToggle);
commitFragmentTransaction(HomeView.newInstance(), HomeView.TAG);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch(item.getItemId()) {
case android.R.id.home:
drawerLayout.openDrawer(GravityCompat.START);
return true;
}
return super.onOptionsItemSelected(item);
}
public void selectDrawerItem(final MenuItem menuItem) {
Fragment fragment;
String tag;
switch(menuItem.getItemId()) {
case R.id.menuBilling:
fragment = BillingView.newInstance();
tag = BillingView.TAG;
break;
case R.id.menuTrading:
fragment = TradingView.newInstance();
tag = TradingView.TAG;
break;
case R.id.menuPayment:
fragment = PaymentView.newInstance();
tag = PaymentView.TAG;
break;
default:
fragment = HomeView.newInstance();
tag = HomeView.TAG;
break;
}
commitFragmentTransaction(fragment, tag);
toolbar.setTitle(menuItem.getTitle());
drawerLayout.closeDrawers();
}
private void commitFragmentTransaction(final Fragment fragment, final String tag) {
if(getSupportFragmentManager().findFragmentByTag(tag) == null) {
final FragmentTransaction fragmentTransaction
= getSupportFragmentManager().beginTransaction();
fragmentTransaction.replace(R.id.home_view_container, fragment, tag);
fragmentTransaction.commit();
}
}
@Override
protected void onPostCreate(@Nullable Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
actionBarDrawerToggle.syncState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
actionBarDrawerToggle.onConfigurationChanged(newConfig);
}
private void setupDrawerContent(final NavigationView navigationView) {
navigationView.setNavigationItemSelectedListener(
item -> {
selectDrawerItem(item);
return true;
});
}
private ActionBarDrawerToggle setupDrawerToggle() {
return new ActionBarDrawerToggle(
HomeActivity.this,
drawerLayout,
toolbar,
R.string.drawer_open,
R.string.drawer_close);
}
}
|
<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Solids/PhysBAM_Deformables/Constitutive_Models/CONSTITUTIVE_MODEL.cpp
//#####################################################################
// Copyright 2003-2007, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Log/DEBUG_UTILITIES.h>
#include <PhysBAM_Tools/Matrices/MATRIX_2X2.h>
#include <PhysBAM_Tools/Matrices/MATRIX_3X3.h>
#include <PhysBAM_Solids/PhysBAM_Deformables/Constitutive_Models/CONSTITUTIVE_MODEL.h>
using namespace PhysBAM;
template<class T,int d> CONSTITUTIVE_MODEL<T,d>::
CONSTITUTIVE_MODEL()
:enforce_definiteness(false),constant_lambda(0),constant_mu(0),constant_alpha(0),constant_beta(0)
{
}
template<class T,int d> CONSTITUTIVE_MODEL<T,d>::
~CONSTITUTIVE_MODEL()
{
}
template<class T,int d> T CONSTITUTIVE_MODEL<T,d>::
Maximum_Elastic_Stiffness(const int simplex) const // for elastic CFL computation
{
return lambda.m?lambda(simplex)+2*mu(simplex):constant_lambda+2*constant_mu;
}
template<class T,int d> T CONSTITUTIVE_MODEL<T,d>::
Maximum_Damping_Stiffness(const int simplex) const // for damping CFL computation
{
return alpha.m?alpha(simplex)+2*beta(simplex):constant_alpha+2*constant_beta;
}
template<class T,int d> void CONSTITUTIVE_MODEL<T,d>::
Isotropic_Stress_Derivative(const DIAGONAL_MATRIX<T,d>& F,DIAGONALIZED_ISOTROPIC_STRESS_DERIVATIVE<T,d>& dPi_dF,const int simplex) const
{
PHYSBAM_FUNCTION_IS_NOT_DEFINED();
}
template<class T,int d> int CONSTITUTIVE_MODEL<T,d>::
P_From_Strain_Rate_Forces_Size() const
{
PHYSBAM_FUNCTION_IS_NOT_DEFINED();
}
template<class T,int d> void CONSTITUTIVE_MODEL<T,d>::
P_From_Strain_Rate_First_Half(const DIAGONAL_MATRIX<T,d>& F,ARRAY_VIEW<T> aggregate,const MATRIX<T,d>& F_dot,const T scale,const int simplex) const
{
PHYSBAM_FUNCTION_IS_NOT_DEFINED();
}
template<class T,int d> MATRIX<T,d> CONSTITUTIVE_MODEL<T,d>::
P_From_Strain_Rate_Second_Half(const DIAGONAL_MATRIX<T,d>& F,const ARRAY_VIEW<const T> aggregate,const T scale,const int simplex) const
{
PHYSBAM_FUNCTION_IS_NOT_DEFINED();
}
template class CONSTITUTIVE_MODEL<float,2>;
template class CONSTITUTIVE_MODEL<float,3>;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class CONSTITUTIVE_MODEL<double,2>;
template class CONSTITUTIVE_MODEL<double,3>;
#endif
|
def is_prime?(n)
if n <= 1
return false
end
i = 2
while i*i <= n
if n % i == 0
return false
end
i += 1
end
return true
end
|
import { DataType, FieldType } from "../../../Constants/Form"
import { ChartType, ChartTypeOptions } from "../../../Constants/Widgets/UtilizationPanel"
import { getValue } from "../../../Util/Util"
import { getCircleItems } from "./Circle/Edit"
import { getSparkItems } from "./Spark/Edit"
import { getTableItems } from "./Table/Edit"
// UtilizationPanel items
export const getUtilizationPanelItems = (rootObject) => {
// update chart config items
const subType = getValue(rootObject, "config.type", "")
const items = [
{
label: "sub_type",
fieldId: "config.type",
fieldType: FieldType.SelectTypeAhead,
dataType: DataType.String,
value: "",
options: ChartTypeOptions,
isRequired: true,
validator: { isNotEmpty: {} },
resetFields: {
"config.resource.isMixedResources": false,
"config.resource.resources": [],
"config.table": {},
},
},
]
switch (subType) {
case ChartType.CircleSize50:
case ChartType.CircleSize75:
case ChartType.CircleSize100:
const circleItems = getCircleItems(rootObject)
items.push(...circleItems)
break
case ChartType.Table:
const tableItems = getTableItems(rootObject)
items.push(...tableItems)
break
case ChartType.SparkArea:
case ChartType.SparkLine:
case ChartType.SparkBar:
const sparkItems = getSparkItems(rootObject)
items.push(...sparkItems)
break
default:
// noop
}
return items
}
|
<reponame>Thaslim/Splitwise-Lab2<gh_stars>0
import mongoose from 'mongoose';
const MemberSchema = new mongoose.Schema({
groupID: {
type: mongoose.Schema.Types.ObjectId,
ref: 'group',
},
memberID: {
type: mongoose.Schema.Types.ObjectId,
ref: 'user',
},
getBack: { type: Number, default: 0.0 },
give: { type: Number, default: 0.0 },
});
export default mongoose.model('groupMembers', MemberSchema);
|
<!DOCTYPE html>
<html>
<head>
<title>Student Registration Form</title>
<link rel="stylesheet" type="text/css" href="style.css">
</head>
<body>
<h1>Student Registration</h1>
<form action="submit.php" method="post">
<div class="form-group">
<label for="name">Name</label>
<input type="text" class="form-control" name="name" placeholder="Enter your name">
</div>
<div class="form-group">
<label for="email">Email</label>
<input type="email" class="form-control" name="email" placeholder="Enter your email">
</div>
<div class="form-group">
<label for="age">Age</label>
<input type="number" class="form-control" name="age" placeholder="Enter your age" min="18" max="100">
</div>
<div class="form-group">
<input type="submit" class="btn btn-primary" value="Submit">
</div>
</form>
</body>
</html>
/* style.css */
body {
font-family: Arial;
}
.form-group {
margin-top: 10px;
}
.form-control {
width: 300px;
padding: 10px;
}
.btn-primary {
background-color: #3498db;
color: #fff;
padding: 10px 20px;
font-size: 16px;
}
|
#!/usr/bin/env bash
# Set environment variables for dev
export APP_NAME=${APP_NAME:-test.com}
export APP_ENV=${APP_ENV:-dev}
export APP_PORT=${APP_PORT:-8888}
export DB_PORT=${DB_PORT:-3399}
export DB_ROOT_PASS=${DB_ROOT_PASS:-root}
export DB_NAME=${DB_NAME:-}
export DB_USER=${DB_USER:-root}
export DB_PASS=${DB_PASS:-root}
# Decide which docker-compose file to use
DIR_NAME="$( dirname "${BASH_SOURCE[0]}")"
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Create docker-compose command to run
COMPOSE="docker-compose -f $DIR/docker-compose.yml --project-name ${APP_NAME}"
# If we pass any arguments...
if [ $# -gt 0 ];then
# If "composer" is used, pass-thru to "composer"
# inside a new container
if [ "$1" == "newproject" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
composer create-project silverstripe/installer ./"${APP_NAME}" ^4
# todo also copy .env file
# && cd "${APP_NAME}" &&
# composer require silverstripe-docker &&
# rm -r ../silverstripe-docker
elif [ "$1" == "composer" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
composer "$@"
elif [ "$1" == "redis" ]; then
shift 1
$COMPOSE run --rm redis redis-cli -h redis
elif [ "$1" == "php" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
php "$@"
elif [ "$1" == "bash" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app bash
elif [ "$1" == "devbuild" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
./vendor/bin/sake dev/build "flush=all"
elif [ "$1" == "sake" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
./vendor/bin/sake dev/tasks/"$@"
# If "test" is used, run unit tests,
# pass-thru any extra arguments to php-unit
elif [ "$1" == "testall" ]; then #tODO add sqlite to the command
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
./vendor/bin/phpunit ./mysite/tests \" \" d=l
elif [ "$1" == "testfunction" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
./vendor/bin/phpunit ./mysite/tests --filter "$@" \" \" d=l
elif [ "$1" == "testsome" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html \
app \
./vendor/bin/phpunit ./mysite/tests/"$@" \" \" d=l
elif [ "$1" == "deletecache" ]; then
shift 1
if [ "$(ls -a ./silverstripe-cache/ )" ]; then
$COMPOSE run --rm \
-w /var/www/html \
app \
rm -r ./silverstripe-cache/*
fi
$COMPOSE run --rm redis redis-cli -h redis > echo FLUSHALL
# If "npm" is used, run npm
# from our node container
elif [ "$1" == "npm" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html/themes/base \
node \
npm "$@"
elif [ "$1" == "node" ]; then
shift 1
$COMPOSE run --rm \
-w /var/www/html/ \
node \
bash
# from our node container
# Else, pass-thru args to docker-compose
else
$COMPOSE "$@"
fi
else
$COMPOSE ps
fi
|
#!/usr/bin/env bash
set -eu
PROTO_SRC=./proto
PROTO_DEST=./src/generated
mkdir -p ${PROTO_DEST}
protoc \
-I ${PROTO_SRC} $(find ${PROTO_SRC} -name "*.proto") \
--js_out="import_style=commonjs,binary:${PROTO_DEST}" \
--grpc-web_out="import_style=commonjs+dts,mode=grpcwebtext:${PROTO_DEST}"
|
<gh_stars>0
import React, { Component } from 'react';
import { connect } from 'react-redux';
import classNames from 'classnames';
import withStyles from '@material-ui/core/styles/withStyles';
import SnackbarContent from '@material-ui/core/SnackbarContent';
import Snackbar from '@material-ui/core/Snackbar';
import { changeNotify } from '../../store/actions/notify.action';
const styles = {
message: {
display: 'flex',
alignItems: 'center'
},
success: {
backgroundColor: '#219',
color: '#fff'
},
error: {
backgroundColor: '#f44336',
color: '#fff'
}
}
export class Notify extends Component {
handleClose = () => {
this.props.changeNotify({
open: false,
})
}
render() {
const { classes } = this.props;
const messageClasses = classNames({
[classes[this.props.notify.class]]: this.props.notify.class
})
return (
<Snackbar
open={this.props.notify.open}
autoHideDuration={this.props.notify.time}
onClose={this.handleClose}
anchorOrigin = {
{
vertical: this.props.notify.vertical,
horizontal: this.props.notify.horizontal,
}
}
>
<SnackbarContent
className={messageClasses}
message={
<span className={classes.message}> {this.props.notify.msg} </span>
}
/>
</Snackbar>
)
}
}
const mapStateToProps = (state) => ({
notify: state.notifyReducer
})
const mapDispatchToProps = dispatch =>({
changeNotify: (value) => dispatch(changeNotify(value))
})
export default connect(mapStateToProps, mapDispatchToProps)(withStyles(styles)
(Notify))
|
import legend from '@src/store/legend';
import { InitStoreState, Scale, StateFunc } from '@t/store/store';
import { deepMergedCopy } from '@src/helpers/utils';
import { LineChartOptions, NestedPieChartOptions } from '@t/options';
import Store from '@src/store/store';
describe('Legend Store', () => {
it('should apply default options when legend options not exist', () => {
const state = (legend.state as StateFunc)({
options: { chart: { width: 300, height: 300 } },
series: {
line: [
{
name: 'test',
data: [
{ x: 10, y: 5 },
{ x: 1, y: 2 },
{ x: 3, y: 5 },
],
rawData: [
{ x: 10, y: 5 },
{ x: 1, y: 2 },
{ x: 3, y: 5 },
],
color: '#aaaaaa',
},
],
},
});
expect(state.legend).toEqual({
data: [
{
label: 'test',
checked: true,
active: true,
width: 38,
iconType: 'line',
chartType: 'line',
rowIndex: 0,
columnIndex: 0,
},
],
useSpectrumLegend: false,
useScatterChartIcon: false,
});
});
it('should make legend layout properly when calling the setLegendLayout', () => {
const dispatch = () => {};
const fontTheme = {
fontSize: 11,
fontFamily: 'Arial',
fontWeight: 'normal',
color: '#333333',
};
const initStoreState = {
series: {
line: [
{ name: 'han', data: [1, 4], rawData: [1, 4], color: '#aaaaaa' },
{ name: 'cho', data: [5, 2], rawData: [5, 2], color: '#bbbbbb' },
],
},
options: {
zoomable: false,
},
} as InitStoreState<LineChartOptions>;
const state = {
chart: { width: 300, height: 300 },
layout: {
plot: { width: 250, height: 250, x: 30, y: 10 },
yAxis: { x: 10, y: 10, width: 30, height: 200 },
xAxis: { x: 10, y: 10, width: 250, height: 30 },
},
scale: { yAxis: { limit: { min: 0, max: 5 }, stepSize: 1, stepCount: 1 } } as Scale,
series: {
line: {
data: [
{ name: 'han', data: [1, 4] },
{ name: 'cho', data: [5, 2] },
],
},
},
circleLegend: {},
legend: {
data: [
{
label: 'han',
checked: true,
active: true,
width: 38,
iconType: 'line',
chartType: 'line',
rowIndex: 0,
columnIndex: 0,
},
{
label: 'cho',
checked: true,
active: true,
width: 38,
iconType: 'line',
chartType: 'line',
rowIndex: 0,
columnIndex: 0,
},
],
visible: true,
useSpectrumLegend: false,
useScatterChartIcon: false,
},
axes: {
xAxis: {},
yAxis: {},
},
categories: ['A', 'B'],
options: {
xAxis: { tick: { interval: 2 }, label: { interval: 3 } },
yAxis: { tick: { interval: 4 }, label: { interval: 5 } },
legend: {},
},
theme: {
xAxis: { title: { ...fontTheme }, label: { ...fontTheme } },
yAxis: { title: { ...fontTheme }, label: { ...fontTheme } },
legend: { label: { ...fontTheme } },
},
};
const store = { state, initStoreState } as Store<LineChartOptions>;
legend.action!.setLegendLayout.call({ dispatch }, store);
expect(state.legend).toEqual({
align: 'right',
data: [
{
active: true,
chartType: 'line',
checked: true,
columnIndex: 0,
iconType: 'line',
label: 'han',
rowIndex: 0,
width: 38,
},
{
active: true,
chartType: 'line',
checked: true,
columnIndex: 0,
iconType: 'line',
label: 'cho',
rowIndex: 1,
width: 38,
},
],
height: 78,
width: 94,
showCheckbox: true,
useScatterChartIcon: false,
useSpectrumLegend: false,
visible: true,
});
});
describe('iconType', () => {
const initStoreState = {
options: { chart: { width: 300, height: 300 } },
series: {},
};
const data = [
{
type: 'line',
iconType: 'line',
},
{
type: 'bar',
iconType: 'rect',
},
{
type: 'column',
iconType: 'rect',
},
{
type: 'area',
iconType: 'rect',
},
{
type: 'bubble',
iconType: 'circle',
},
{
type: 'scatter',
iconType: 'circle',
},
];
data.forEach((datum) => {
it(`${datum.type} chart return iconType ${datum.iconType}`, () => {
const series = { [datum.type]: [{ name: 'han' }] };
const state = (legend.state as StateFunc)(deepMergedCopy(initStoreState, { series }));
expect(state.legend!.data.map(({ iconType }) => iconType)).toEqual([datum.iconType]);
});
});
});
describe('using pie donut', () => {
it('should legend data properly for pie donut series', () => {
const state = (legend.state as StateFunc)({
options: { chart: { width: 300, height: 300 } },
series: {
pie: [
{
name: 'pie1',
data: [
{ name: 'A', data: 50 },
{ name: 'B', data: 50 },
],
},
{
name: 'pie2',
data: [
{ name: 'C', data: 60 },
{ name: 'D', data: 40 },
],
},
],
},
});
expect(state.legend!.data).toEqual([
{
label: 'A',
checked: true,
active: true,
width: 35,
iconType: 'rect',
chartType: 'pie',
rowIndex: 0,
columnIndex: 0,
},
{
label: 'B',
checked: true,
active: true,
width: 35,
iconType: 'rect',
chartType: 'pie',
rowIndex: 0,
columnIndex: 0,
},
{
label: 'C',
checked: true,
active: true,
width: 35,
iconType: 'rect',
chartType: 'pie',
rowIndex: 0,
columnIndex: 0,
},
{
label: 'D',
checked: true,
active: true,
width: 35,
iconType: 'rect',
chartType: 'pie',
rowIndex: 0,
columnIndex: 0,
},
]);
});
it('should legend data properly for grouped pie donut series', () => {
const state = (legend.state as StateFunc)({
options: {
chart: { width: 300, height: 300 },
series: { grouped: true },
} as NestedPieChartOptions,
series: {
pie: [
{
name: 'pie1',
data: [
{ name: 'A', data: 50 },
{ name: 'B', data: 50 },
],
},
{
name: 'pie2',
data: [
{ name: 'A1', parentName: 'A', data: 30 },
{ name: 'A2', parentName: 'A', data: 20 },
{ name: 'B1', parentName: 'B', data: 40 },
{ name: 'B2', parentName: 'B', data: 10 },
],
},
],
},
});
expect(state.legend!.data).toEqual([
{
label: 'A',
checked: true,
active: true,
width: 35,
iconType: 'rect',
chartType: 'pie',
rowIndex: 0,
columnIndex: 0,
},
{
label: 'B',
checked: true,
active: true,
width: 35,
iconType: 'rect',
chartType: 'pie',
rowIndex: 0,
columnIndex: 0,
},
]);
});
});
});
|
<reponame>multiplex/multiplex.js<gh_stars>10-100
export var OBJECT_PROTOTYPE = Object.prototype;
export var ARRAY_PROTOTYPE = Array.prototype;
|
#!/bin/bash
SSID=""
PASSWORD=""
CURDATE=$(date +%s)
echo "Updating Wallaby to connect to a wireless network..."
echo "(NOTE: Will no longer function as a Wireless Access Point (WAP), but will continue to connect over USB)."
sleep 4
if [ -z "${SSID}" ]; then
echo "SSID not set in the script."
echo "Please modify the options at the top of this script with another computer if you have no way to provide input."
fi
BKUPDIR=/home/root/wallaby-pre-wifi-mods-backup
mkdir BKUPDIR
if [ -d "$BKUPDIR" ]; then
TARBAK="${BACKUPDIR}-${CURDATE}.tar"
echo "Backup dir already exists. Tarring old backup as ${TARBAK}"
tar -cf "${TARBAK}" "${BKUPDIR}"
fi
echo "Backup directory set to: $BKUPDIR"
echo "backing up /etc/network/interfaces"
cp "/etc/network/interfaces" "$BKUPDIR"
cat > /etc/network/interfaces << EOL
allow-hotplug wlan0
iface wlan0 inet dhcp
wpa-roam /etc/wpa_supplicant/wpa_supplicant-wlan0.conf
iface default inet dhcp
EOL
echo "backing up /etc/wpa_supplicant/wpa_supplicant-wlan0.conf -> /etc/wpa_supplicant/wpa_supplicant-wlan0.conf-${CURDATE}.bak"
cp "/etc/wpa_supplicant/wpa_supplicant-wlan0.conf" "$BKUPDIR"
cat > /etc/wpa_supplicant/wpa_supplicant-wlan0.conf << EOL
ctrl_interface=/var/run/wpa_supplicant
ctrl_interface_group=0
update_config=1
# Connect to a WPA2 protected network
network={
ssid="${SSID}"
proto=WPA2
key_mgmt=WPA-PSK
pairwise=CCMP TKIP
group=CCMP TKIP
scan_ssid=1
psk="${PASSWORD}"
priority=10
}
EOL
echo "moving /etc/udhcpd.conf to backup directory"
mv /etc/udhcpd.conf "$BKUPDIR"
echo "moving /usr/bin/wifi_configurator.py to backup directory"
mv /usr/bin/wifi_configurator.py "$BKUPDIR"
echo "moving /etc/systemd/network/wifi.network to backup directory"
mv /etc/systemd/network/wifi.network "$BKUPDIR"
cat > /etc/systemd/network/wifi.network << EOL
[Match]
Name=wlan0
[Network]
DHCP=v4
#[Address]
#Address=192.168.1.76
EOL
# default is: /usr/sbin/wpa_supplicant -c/etc/wpa_supplicant/wpa_supplicant-wlan0.conf -iwlan0
############ INIT SCRIPT ##########
cat > /etc/init.d/startClientWifi.sh << EOL
touch /home/root/thisRan
ifconfig wlan0 down
sleep 2
wpa_supplicant -B -Dnl80211 -iwlan0 -c/etc/wpa_supplicant/wpa_supplicant-wlan0.conf
sleep 5
ifconfig wlan0 up
ifconfig -a wlan0 | grep 'inet addr:' | wall -n
EOL
###################################
chmod +x /etc/init.d/startClientWifi.sh
echo "moving /etc/systemd/network/ap.network to backup directory"
mv /etc/systemd/network/ap.network "$BKUPDIR"
cat > /etc/systemd/system/wificlient.service << EOL
[Unit]
Description=Controls client wifi connection using wpa_supplicant
After=multi-user.target
[Service]
RemainAfterExit=yes
ExecStart=/bin/sh -c "/etc/init.d/startClientWifi.sh"
ExecStop=/bin/sh -c "pkill wpa_supplicant"
RemainAfterExit=yes
[Install]
WantedBy=graphical.target
EOL
systemctl enable wificlient.service
# defaults for wificlient:
# wificlient.service - Controls client wifi connection using wpa_supplicant
# Loaded: loaded (/etc/systemd/system/wificlient.service; enabled; vendor preset: enabled)
# Active: active (exited) since Tue 2018-02-06 00:18:27 UTC; 54min ago
# Process: 1798 ExecStart=/bin/sh -c /etc/init.d/startClientWifi.sh (code=exited, status=0/SUCCESS)
# Main PID: 1798 (code=exited, status=0/SUCCESS)
# CGroup: /system.slice/wificlient.service
systemctl disable wifi.service
systemctl disable wifi
# defaults for wifi:
# wifi.service - wifi
# Loaded: loaded (/lib/systemd/system/wifi.service; disabled; vendor preset: enabled)
# Active: active (exited) (Result: exit-code) since Tue 2018-02-06 00:18:27 UTC; 55min ago
# Process: 1797 ExecStart=/usr/bin/python /usr/bin/wifi_configurator.py (code=exited, status=2)
# Main PID: 1797 (code=exited, status=2)
# CGroup: /system.slice/wifi.service
echo "Performing network tasks"
update-rc.d -f hostapd remove
systemctl disable hostapd
pkill hostapd
sleep 1
ifconfig wlan0 down
sleep 1
ip addr flush dev wlan0 # Might only need to be run once
sleep 1
wpa_supplicant -B -Dnl80211 -iwlan0 -c/etc/wpa_supplicant/wpa_supplicant-wlan0.conf
sleep 1
ifconfig wlan0 up
sleep 3
echo ""
ifconfig wlan0 | wall -n
echo "DONE"
|
package com.leetcode;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class Solution_1185Test {
@Test
public void testDayOfTheWeek() {
Solution_1185 solution_1185 = new Solution_1185();
System.out.println(solution_1185.dayOfTheWeek(31,8,2019));
}
}
|
use fontconfig_sys as sys;
use std::ptr;
pub struct Fontconfig;
pub struct ObjectSet {
// Define the fields and methods for the ObjectSet struct
}
impl Fontconfig {
/// The `FcObjectSet` must not be null. This method assumes ownership of the `FcObjectSet`.
pub fn from_raw(_: &Fontconfig, raw_set: *mut sys::FcObjectSet) -> ObjectSet {
assert!(!raw_set.is_null());
// Safety: We assume ownership of the raw_set and ensure it is not null
let object_set = unsafe { ObjectSet::construct_from_raw(raw_set) };
// Prevent double-free by setting the raw_set to null
unsafe { ptr::write(raw_set, ptr::null_mut()) };
object_set
}
}
impl ObjectSet {
fn construct_from_raw(raw_set: *mut sys::FcObjectSet) -> ObjectSet {
// Implement the construction of ObjectSet from the raw pointer
// Example: Extract necessary data from raw_set and construct ObjectSet
ObjectSet {
// Construct ObjectSet fields from the raw_set
}
}
}
|
#! /bin/bash
# The bird config file path is different for Red Hat and Debian/Ubuntu.
if [ -f /etc/bird.conf ]; then
BIRD_CONF=/etc/bird.conf
else
BIRD_CONF=/etc/bird/bird.conf
fi
BIRD_CONF_TEMPLATE=/usr/share/calico/bird/calico-bird.conf.template
# Require 3 arguments.
[ $# -eq 3 ] || cat <<EOF
Usage: $0 <my-ip-address> <rr-ip-address> <as-number>
where
<my-ip-address> is the external IP address of the local machine
<rr-ip-address> is the IP address of the route reflector that
the local BIRD should peer with
<as-number> is the BGP AS number that the route relector is using.
Please specify exactly these 3 required arguments.
EOF
[ $# -eq 3 ] || exit -1
# Name the arguments.
my_ip_address=$1
rr_ip_address=$2
as_number=$3
# Generate BIRD config file.
mkdir -p $(dirname $BIRD_CONF)
sed -e "
s/@MY_IP_ADDRESS@/$my_ip_address/;
s/@RR_IP_ADDRESS@/$rr_ip_address/;
s/@AS_NUMBER@/$as_number/;
" < $BIRD_CONF_TEMPLATE > $BIRD_CONF
echo BIRD configuration generated at $BIRD_CONF
service bird restart
echo BIRD restarted
|
<filename>test/unit/word.spec.js<gh_stars>10-100
var chai = require('chai');
var sinon = require('sinon');
chai.use(require('sinon-chai'));
var expect = chai.expect;
var Word = require('../../src/js/word');
var Char = require('../../src/js/char');
describe('Word', function () {
describe('constructor', function () {
it('should exist', function () {
expect(Word).not.to.be.undefined;
});
it('should accept an array of Char objects and set their parent reference', function () {
var chars = [new Char('a'), new Char(' ')],
word = new Word(chars);
expect(word.chars).to.be.an.instanceof(Array);
expect(word.chars).to.equal(chars);
expect(word.chars[0].parent).to.equal(word);
expect(word.chars[1].parent).to.equal(word);
});
it('should accept a string and convert it into Chars', function () {
var str = 'chars ',
word = new Word(str);
expect(word.chars).to.be.an.instanceof(Array);
expect(word.chars.length).to.equal(6);
word.chars.forEach(function (char, idx) {
expect(char).to.be.an.instanceof(Char);
expect(char.toString()).to.equal(str[idx]);
expect(char.parent).to.equal(word);
});
});
it('should accept a parent reference', function () {
var obj = {},
word = new Word('a ', obj);
expect(word.parent).to.equal(obj);
});
it('should create an empty Char when passed an empty string', function () {
var str = '',
word = new Word('');
expect(word.chars.length).to.equal(1);
expect(word.chars[0]).to.be.an.instanceof(Char);
expect(word.chars[0].toString()).to.be.empty;
expect(word.chars[0].parent).to.equal(word);
});
it('should have an empty char array by default', function () {
var word = new Word();
expect(word.chars).to.be.an.instanceof(Array);
expect(word.chars.length).to.equal(0);
});
});
describe('getChars', function () {
it('should return the chars array', function () {
var chars = [new Char('a'), new Char(' ')],
word = new Word(chars);
expect(word.getChars()).to.equal(chars);
});
});
describe('getFirstChar', function () {
it('should return the first Char', function () {
var chars = [new Char('a'), new Char(' ')],
word = new Word(chars);
expect(word.getFirstChar()).to.equal(chars[0]);
});
});
describe('getLastChar', function () {
it('should return the last Char', function () {
var chars = [new Char('a'), new Char(' ')],
word = new Word(chars);
expect(word.getLastChar()).to.equal(chars[1]);
});
});
describe('removeChar', function () {
it('should remove the provided Char and return it', function () {
var word = new Word('chars '),
target = word.getChars()[2];
expect(word.getChars().length).to.equal(6);
expect(target.parent).to.equal(word);
var returned = word.removeChar(target);
expect(returned).to.equal(target);
expect(returned.parent).to.be.undefined;
expect(word.getChars().length).to.equal(5);
expect(word.toString()).to.equal('chrs ');
});
it('should not change anything if the provided Char is not the word', function () {
var char = new Char('a'),
word = new Word('chars '),
returned = word.removeChar(char);
expect(returned).to.equal(char);
expect(word.toString()).to.equal('chars ');
});
it('should call removeWord on its parent if all Chars are removed', function () {
var parent = {
removeWord: sinon.spy()
},
word = new Word('a ', parent);
expect(word.getChars().length).to.equal(2);
var returned = word.removeChar(word.getChars()[0]);
expect(returned.parent).to.be.undefined;
expect(word.getChars().length).to.equal(1);
expect(parent.removeWord).not.to.have.been.called;
returned = word.removeChar(word.getChars()[0]);
expect(returned.parent).to.be.undefined;
expect(word.getChars().length).to.equal(0);
expect(parent.removeWord).to.have.been.calledWith(word);
});
});
describe('toJSON', function () {
it('should return a JSON representation of the Word when initialized with a string', function () {
var word = new Word('chars '),
json = word.toJSON(0);
expect(json).to.deep.equal({
name: 'w',
id: 'w0',
children: [
{ name: 'c', id: 'c0-0', children: [] },
{ name: 'h', id: 'c0-1', children: [] },
{ name: 'a', id: 'c0-2', children: [] },
{ name: 'r', id: 'c0-3', children: [] },
{ name: 's', id: 'c0-4', children: [] },
{ name: '[ ]', id: 'c0-5', children: [] }
]
});
});
it('should return a JSON representation of the Word when initialized with an array of Chars', function () {
var chars = [new Char('c', null, { 'b': true }), new Char('h'), new Char('a'), new Char('r'), new Char('s'), new Char(' ')],
word = new Word(chars),
json = word.toJSON(0);
expect(json).to.deep.equal({
name: 'w',
id: 'w0',
children: [
{ name: 'c', id: 'c0-0', children: ['b'] },
{ name: 'h', id: 'c0-1', children: [] },
{ name: 'a', id: 'c0-2', children: [] },
{ name: 'r', id: 'c0-3', children: [] },
{ name: 's', id: 'c0-4', children: [] },
{ name: '[ ]', id: 'c0-5', children: [] }
]
});
});
it('should return a JSON representation when the Word is empty', function () {
var word = new Word(undefined),
json = word.toJSON(1);
expect(json).to.deep.equal({
name: 'w',
id: 'w1',
children: []
});
});
it('should return a JSON representation when the Word is just an empty string', function () {
var word = new Word(''),
json = word.toJSON(0);
expect(json).to.deep.equal({
name: 'w',
id: 'w0',
children: [{ name: '', id: 'c0-0', children: [] }]
});
});
});
describe('toString', function () {
it('should return the characters converted into a string', function () {
var str = 'chars ',
word = new Word(str),
res = word.toString();
expect(word.getChars()[0]).to.be.an.instanceof(Char);
expect(res).to.be.a('string');
expect(res).to.equal(str);
});
});
describe('toHTML', function () {
it('should return the characters converted into a string', function () {
var str = 'chars ',
word = new Word(str),
res = word.toHTML();
expect(word.getChars()[0]).to.be.an.instanceof(Char);
expect(res).to.be.a('string');
expect(res).to.equal(str);
});
it('should return the characters converted into a string wrapped in tags based on props of Chars', function () {
var chars = [
new Char('c', null, { 'b': true }),
new Char('h', null, { 'i': true }),
new Char('a'),
new Char('r', null, { 'sup': true }),
new Char('s', null, { 'sup': false, 'sub': true }),
new Char(' '),
new Char('\n')
],
word = new Word(chars),
res = word.toHTML();
expect(res).to.be.a('string');
expect(res).to.equal('<b><i><sup><sub>chars </sub></sup></i></b>');
});
});
describe('toggleProp', function () {
it('should turn on a prop for all child Chars', function () {
var word = new Word('chars ');
word.getChars().forEach(function (char) {
expect(char.props).to.deep.equal({});
});
word.toggleProp('b');
word.getChars().forEach(function (char) {
expect(char.props).to.deep.equal({ 'b': true });
});
});
it('should set prop to true for all child Chars if only some are true already', function () {
var chars = [
new Char('c', null, { 'b': true }),
new Char('h', null, { 'b': true }),
new Char('a'),
new Char('r', null, { 'b': true }),
new Char('s', null, { 'b': true }),
new Char(' ')
],
word = new Word(chars);
word.toggleProp('b');
word.getChars().forEach(function (char) {
expect(char.props['b']).to.be.true;
});
});
it('should set prop to false if all child Chars are already true', function () {
var chars = [
new Char('c', null, { 'b': true }),
new Char('h', null, { 'b': true }),
new Char('a', null, { 'b': true }),
new Char('r', null, { 'b': true }),
new Char('s', null, { 'b': true }),
new Char(' ', null, { 'b': true })
],
word = new Word(chars);
word.toggleProp('b');
word.getChars().forEach(function (char) {
expect(char.props['b']).to.be.false;
});
});
});
describe('split', function () {
it('should split a Word containing a space into 2 Words', function () {
var parent = {
otherWord: null,
refWord: null,
insertAfter: function (refWord, otherWord) {
this.refWord = refWord;
this.otherWord = otherWord;
}
},
word = new Word('chars morechars ', parent);
expect(word.parent).to.equal(parent);
word.split();
expect(word.toString()).to.equal('chars ');
expect(parent.refWord).to.equal(word);
expect(parent.otherWord).not.to.be.null;
expect(parent.otherWord.toString()).to.equal('morechars ');
});
it('should split a Word into multiple words if there are multiple spaces before the end of the Word', function () {
var parent = {
otherWords: [],
insertAfter: function (refWord, otherWord) {
this.otherWords.push(otherWord);
otherWord.parent = this;
}
},
word = new Word('c h a r s ', parent);
expect(word.parent).to.equal(parent);
word.split();
expect(word.toString()).to.equal('c ');
expect(parent.otherWords.length).to.equal(4);
expect(parent.otherWords[0].toString()).to.equal('h ');
expect(parent.otherWords[1].toString()).to.equal('a ');
expect(parent.otherWords[2].toString()).to.equal('r ');
expect(parent.otherWords[3].toString()).to.equal('s ');
});
it('should not do anything if the Word does not contain a space or newline before the end of the Word', function () {
var parent = {
otherWord: null,
refWord: null,
insertAfter: function (refWord, otherWord) {
this.refWord = refWord;
this.otherWord = otherWord;
}
},
word = new Word('chars ', parent);
expect(word.parent).to.equal(parent);
word.split();
expect(word.toString()).to.equal('chars ');
expect(parent.refWord).to.be.null;
expect(parent.otherWord).to.be.null;
});
it('should split a Word into multiple Blocks and Words when it contains newlines and spaces', function () {
var parent = {
words: [],
otherBlocks: [],
refWord: null,
splitAndInsertBlocks: function (refWord, wordArrays) {
this.refWord = refWord;
wordArrays.forEach(function (wordArray) {
this.otherBlocks.push(wordArray);
}, this);
},
insertAfter: function (prevWord, word) {
var index = this.words.indexOf(prevWord);
this.words.splice(index + 1, 0, word);
word.parent = this;
}
},
word = new Word('block one\nblock two\nblock three', parent);
parent.words = [word];
word.split();
expect(word.parent).to.equal(parent);
expect(word.toString()).to.equal('block ');
expect(parent.words.length).to.equal(2);
expect(parent.words[0]).to.equal(word);
var secondWord = parent.words[1];
expect(secondWord.toString()).to.equal('one\n');
expect(parent.refWord).to.equal(secondWord);
expect(parent.otherBlocks.length).to.equal(2);
var blockTwo = parent.otherBlocks[0];
expect(blockTwo.length).to.equal(2);
expect(blockTwo[0].toString()).to.equal('block ');
expect(blockTwo[1].toString()).to.equal('two\n');
var blockThree = parent.otherBlocks[1];
expect(blockThree.length).to.equal(2);
expect(blockThree[0].toString()).to.equal('block ');
expect(blockThree[1].toString()).to.equal('three');
});
});
describe('merge', function () {
it('should merge Chars from another Word into this Word', function () {
var parent = {
removeWord: sinon.spy()
},
word = new Word('cha', parent),
otherWord = new Word('rs ', parent);
expect(word.parent).to.equal(parent);
expect(otherWord.parent).to.equal(parent);
word.merge(otherWord);
expect(word.toString()).to.equal('chars ');
expect(word.parent).to.equal(parent);
word.getChars().forEach(function (char) {
expect(char.parent).to.equal(word);
});
expect(parent.removeWord).to.have.been.calledWith(otherWord);
expect(otherWord.getChars().length).to.equal(0);
});
it('should merge and then split words if needed', function () {
var parent = {
otherWords: [],
insertAfter: function (refWord, otherWord) {
this.otherWords.push(otherWord);
otherWord.parent = this;
},
removeWord: sinon.spy()
},
word = new Word('chars one ', parent),
splitSpy = sinon.spy(word, 'split'),
otherWord = new Word('chars word ', parent);
word.merge(otherWord);
expect(splitSpy).to.have.been.called;
expect(word.toString()).to.equal('chars ');
expect(otherWord.getChars()).to.be.empty;
expect(parent.otherWords.length).to.equal(3);
expect(parent.otherWords[0].toString()).to.equal('one ');
expect(parent.otherWords[1].toString()).to.equal('chars ');
expect(parent.otherWords[2].toString()).to.equal('word ');
});
it('should call merge on its parent if the other Word is not paret of the same parent', function () {
var parent = {
merge: sinon.spy()
},
word = new Word('cha', parent),
parentTwo = {},
wordTwo = new Word('rs ', parentTwo);
expect(word.parent).to.equal(parent);
expect(wordTwo.parent).to.equal(parentTwo);
word.merge(wordTwo);
expect(parent.merge).to.have.been.calledWith(parentTwo);
});
});
describe('insertBefore', function () {
it('should add Chars before the provided char', function () {
var word = new Word('chs '),
chars = [new Char('a'), new Char('r')],
letterS = word.getChars()[2];
expect(word.getChars().length).to.equal(4);
word.insertBefore(letterS, chars);
chars.forEach(function (char) {
expect(char.parent).to.equal(word);
});
expect(word.toString()).to.equal('chars ');
});
it('should add Chars at the beginning of the Word if invalid reference is passed', function () {
var word = new Word('ars '),
letterH = new Char('h'),
letterC = new Char('c');
expect(word.toString()).to.equal('ars ');
word.insertBefore(letterC, [letterH]);
expect(letterH.parent).to.equal(word);
expect(word.toString()).to.equal('hars ');
word.insertBefore(null, [letterC]);
expect(letterC.parent).to.equal(word);
expect(word.toString()).to.equal('chars ');
});
it('should do nothing if nothing is passed', function () {
var word = new Word('chars ');
word.insertBefore();
expect(word.toString()).to.equal('chars ');
});
});
describe('insertAfter', function () {
it('should add Chars after the provided char', function () {
var word = new Word('chs '),
chars = [new Char('a'), new Char('r')],
letterH = word.getChars()[1];
expect(word.getChars().length).to.equal(4);
word.insertAfter(letterH, chars);
chars.forEach(function (char) {
expect(char.parent).to.equal(word);
});
expect(word.toString()).to.equal('chars ');
});
it('should add Chars at the end of the Word if invalid reference is passed', function () {
var word = new Word('char'),
letterS = new Char('s'),
letterSpace = new Char(' ');
expect(word.toString()).to.equal('char');
word.insertAfter(letterSpace, [letterS]);
expect(letterS.parent).to.equal(word);
expect(word.toString()).to.equal('chars');
word.insertAfter(null, [letterSpace]);
expect(letterSpace.parent).to.equal(word);
expect(word.toString()).to.equal('chars ');
});
it('should do nothing if nothing is passed', function () {
var word = new Word('chars ');
word.insertAfter();
expect(word.toString()).to.equal('chars ');
});
});
});
|
<filename>generator/src/main/java/net/synqg/qg/implicative/ImplicativeType.java
package net.synqg.qg.implicative;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.experimental.Accessors;
/**
* Types of implications used by verbs as well as phrases (verb-noun collocations)
*
* @author kaustubhdholé.
*/
@AllArgsConstructor
@Getter
@Accessors(fluent = true)
public enum ImplicativeType {
/**
* cause NP to, force NP to, make NP to
* Kim forced Mary to leave. --> Kim left.
*/
POS_POS(Boolean.TRUE, Boolean.TRUE),
/**
* refuse to, prevent NP from, keep NP from
* Kim prevented Mary from leaving. --> Kim did not leave.
*/
POS_NEG(Boolean.TRUE, Boolean.FALSE),
/**
* hesitate to
* Kim did not hesitate to speak her mind. --> Kim spoke her mind.
*/
NEG_POS(Boolean.FALSE, Boolean.TRUE),
/**
* can (= be able to)
* Kim could not finish her sentence.
*/
NEG_NEG(Boolean.FALSE, Boolean.FALSE);
/**
* Polarity of the host clause.
*/
private Boolean mainClausePolarity;
/**
* Polarity of the subordinate clause.
*/
private Boolean entailedPolarity;
public Boolean match(Boolean mainClausePolarity) {
return this.mainClausePolarity == mainClausePolarity;
}
public static ImplicativeType fromString(String type) throws Exception {
type = type.replace("*", "");
switch (type) {
case "np":
return NEG_POS;
case "nn":
return NEG_NEG;
case "pn":
return POS_NEG;
case "pp":
return POS_POS;
default:
throw new Exception("Unsupported type: " + type);
}
}
}
|
#! /usr/bin/env bash
#
# Given a commit, update all the Copyrights of the changed files.
#
# Copyright 2021, Verizon Media
# SPDX-License-Identifier: Apache-2.0
#
usage="$(basename $0) <git_commit>"
fail()
{
echo -e $1
exit 1
}
[ $# -eq 1 ] || fail "Provide a git commit to check changed files for.\n\n${usage}"
commit=${1}
tools_dir=$(dirname $0)
git_root=$(dirname ${tools_dir})
cd ${git_root}
current_year=$(date +%Y)
sed -i'.sedbak' "s/Copyright 20[[:digit:]][[:digit:]]/Copyright ${current_year}/g" \
`git diff-tree --no-commit-id --name-only -r ${commit}`
find . -name '*.sedbak' -delete
|
eval(loadFile("src/main/webapp/factorial.js"));
testCases(test,
function test15() {
assert.that(factorial(15), eq(1307674368000));
},
function testRegEx() {
var actual = "JUnit in Action";
assert.that(actual, matches(/in/));
assert.that(actual, not(matches(/out/)));
}
);
|
#!/usr/bin/env bash
set -o pipefail
set -o nounset
set -m
if [[ $# -lt 1 ]]; then
echo "Usage :"
echo ' $1: hub|spoke'
echo "Sample: "
echo " ${0} hub|spoke"
exit 1
fi
# variables
# #########
# Load common vars
source ${WORKDIR}/shared-utils/common.sh
echo ">>>> Get the pull secret from hub to file pull-secret"
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
export REGISTRY=kubeframe-registry
export AUTH_SECRET=../${SHARED_DIR}/htpasswd
export REGISTRY_MANIFESTS=manifests
export SECRET=auth
export REGISTRY_CONFIG=config.yml
export SOURCE_PACKAGES='kubernetes-nmstate-operator,metallb-operator,ocs-operator,local-storage-operator,advanced-cluster-management'
export PACKAGES_FORMATED=$(echo ${SOURCE_PACKAGES} | tr "," " ")
export EXTRA_IMAGES='quay.io/jparrill/registry:2'
export OCP_RELEASE=${OC_OCP_VERSION}
export OCP_RELEASE_FULL=${OCP_RELEASE}.0
# TODO: Change static passwords by dynamic ones
export REG_US=dummy
export REG_PASS=dummy
if [[ ${1} == "hub" ]]; then
echo ">>>> Get the registry cert and update pull secret for: ${1}"
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
export OCP_RELEASE=$(oc --kubeconfig=${KUBECONFIG_HUB} get clusterversion -o jsonpath={'.items[0].status.desired.version'})
export OPENSHIFT_RELEASE_IMAGE="quay.io/openshift-release-dev/ocp-release:${OCP_RELEASE}-x86_64"
export SOURCE_REGISTRY="quay.io"
export SOURCE_INDEX="registry.redhat.io/redhat/redhat-operator-index:v${OC_OCP_VERSION}"
export DESTINATION_REGISTRY="$(oc --kubeconfig=${KUBECONFIG_HUB} get route -n ${REGISTRY} ${REGISTRY} -o jsonpath={'.status.ingress[0].host'})"
## OLM
## NS where the OLM images will be mirrored
export OLM_DESTINATION_REGISTRY_IMAGE_NS=olm
## NS where the OLM INDEX for RH OPERATORS image will be mirrored
export OLM_DESTINATION_REGISTRY_INDEX_NS=${OLM_DESTINATION_REGISTRY_IMAGE_NS}/redhat-operator-index
## OLM INDEX IMAGE
export OLM_DESTINATION_INDEX="${DESTINATION_REGISTRY}/${OLM_DESTINATION_REGISTRY_INDEX_NS}:v${OC_OCP_VERSION}"
## OCP
## The NS for INDEX and IMAGE will be the same here, this is why there is only 1
export OCP_DESTINATION_REGISTRY_IMAGE_NS=ocp4/openshift4
## OCP INDEX IMAGE
export OCP_DESTINATION_INDEX="${DESTINATION_REGISTRY}/${OCP_DESTINATION_REGISTRY_IMAGE_NS}:${OC_OCP_TAG}"
elif [[ ${1} == "spoke" ]]; then
if [[ ${SPOKE_KUBECONFIG:-} == "" ]]; then
echo "Avoiding Hub <-> Spoke sync on favor of registry deployment"
else
echo ">>>> Filling variables for Registry sync on Spoke"
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo "HUB: ${KUBECONFIG_HUB}"
echo "SPOKE: ${SPOKE_KUBECONFIG}"
## Common
export DESTINATION_REGISTRY="$(oc --kubeconfig=${SPOKE_KUBECONFIG} get route -n ${REGISTRY} ${REGISTRY} -o jsonpath={'.status.ingress[0].host'})"
## OCP Sync vars
export OPENSHIFT_RELEASE_IMAGE="$(oc --kubeconfig=${KUBECONFIG_HUB} get clusterimageset --no-headers $(yq eval ".config.clusterimageset" ${SPOKES_FILE}) -o jsonpath={.spec.releaseImage})"
## The NS for INDEX and IMAGE will be the same here, this is why there is only 1
export OCP_DESTINATION_REGISTRY_IMAGE_NS=ocp4/openshift4
## OCP INDEX IMAGE
export OCP_DESTINATION_INDEX="${DESTINATION_REGISTRY}/${OCP_DESTINATION_REGISTRY_IMAGE_NS}:${OC_OCP_TAG}"
## OLM Sync vars
export SOURCE_REGISTRY="$(oc --kubeconfig=${KUBECONFIG_HUB} get route -n ${REGISTRY} ${REGISTRY} -o jsonpath={'.status.ingress[0].host'})"
## NS where the OLM images will be mirrored
export OLM_DESTINATION_REGISTRY_IMAGE_NS=olm
## NS where the OLM INDEX for RH OPERATORS image will be mirrored
export OLM_DESTINATION_REGISTRY_INDEX_NS=${OLM_DESTINATION_REGISTRY_IMAGE_NS}/redhat-operator-index
export SOURCE_INDEX="${SOURCE_REGISTRY}/${OLM_DESTINATION_REGISTRY_INDEX_NS}:v${OC_OCP_VERSION}"
export OLM_DESTINATION_INDEX="${DESTINATION_REGISTRY}/${OLM_DESTINATION_REGISTRY_INDEX_NS}:v${OC_OCP_VERSION}"
fi
fi
|
// <NAME>, Geometric Tools, Redmond WA 98052
// Copyright (c) 1998-2016
// Distributed under the Boost Software License, Version 1.0.
// http://www.boost.org/LICENSE_1_0.txt
// http://www.geometrictools.com/License/Boost/LICENSE_1_0.txt
// File Version: 3.0.0 (2016/06/19)
#include "SphereMapEffect.h"
using namespace gte;
SphereMapEffect::SphereMapEffect(std::shared_ptr<ProgramFactory> const& factory,
std::shared_ptr<Texture2> const& texture, SamplerState::Filter filter,
SamplerState::Mode mode0, SamplerState::Mode mode1)
:
mTexture(texture),
mPVWMatrix(nullptr),
mVWMatrix(nullptr)
{
int api = factory->GetAPI();
mProgram = factory->CreateFromSources(*msVSSource[api], *msPSSource[api], "");
if (mProgram)
{
mPVWMatrixConstant = std::make_shared<ConstantBuffer>(
sizeof(Matrix4x4<float>), true);
mPVWMatrix = mPVWMatrixConstant->Get<Matrix4x4<float>>();
*mPVWMatrix = Matrix4x4<float>::Identity();
mVWMatrixConstant = std::make_shared<ConstantBuffer>(
sizeof(Matrix4x4<float>), true);
mVWMatrix = mVWMatrixConstant->Get<Matrix4x4<float>>();
*mVWMatrix = Matrix4x4<float>::Identity();
mSampler = std::make_shared<SamplerState>();
mSampler->filter = filter;
mSampler->mode[0] = mode0;
mSampler->mode[1] = mode1;
mProgram->GetVShader()->Set("PVWMatrix", mPVWMatrixConstant);
mProgram->GetVShader()->Set("VWMatrix", mVWMatrixConstant);
#if defined(GTE_DEV_OPENGL)
mProgram->GetPShader()->Set("baseSampler", texture);
#else
mProgram->GetPShader()->Set("baseTexture", texture);
#endif
mProgram->GetPShader()->Set("baseSampler", mSampler);
}
}
std::string const SphereMapEffect::msGLSLVSSource =
"uniform PVWMatrix\n"
"{\n"
" mat4 pvwMatrix;\n"
"};\n"
"\n"
"uniform VWMatrix\n"
"{\n"
" mat4 vwMatrix;\n"
"};\n"
"\n"
"layout(location = 0) in vec3 inModelPosition;\n"
"layout(location = 1) in vec3 inModelNormal;\n"
"\n"
"layout(location = 0) out vec2 vertexTCoord;\n"
"\n"
"void main()\n"
"{\n"
" vec4 modelPosition = vec4(inModelPosition, 1.0f);\n"
" vec4 modelNormal = vec4(inModelNormal, 0.0f);\n"
"\n"
"#if GTE_USE_MAT_VEC\n"
" vec4 cameraSpacePosition = vwMatrix * modelPosition;\n"
" vec3 cameraSpaceNormal = normalize((vwMatrix * modelNormal).xyz);\n"
" gl_Position = pvwMatrix * modelPosition;\n"
"#else\n"
" vec4 cameraSpacePosition = modelPosition * vwMatrix;\n"
" vec3 cameraSpaceNormal = normalize((modelNormal * vwMatrix).xyz);\n"
" gl_Position = modelPosition * pvwMatrix;\n"
"#endif\n"
"\n"
" vec3 eyeDirection = normalize(cameraSpacePosition.xyz);\n"
" vec3 r = reflect(eyeDirection, cameraSpaceNormal);\n"
"\n"
" float oneMRZ = 1.0f - r.z;\n"
" float invLength = 1.0f / sqrt(r.x * r.x + r.y * r.y + oneMRZ * oneMRZ);\n"
" vertexTCoord = 0.5f * (r.xy * invLength + 1.0f);\n"
"}\n";
std::string const SphereMapEffect::msGLSLPSSource =
"layout(location = 0) in vec2 vertexTCoord;\n"
"\n"
"layout(location = 0) out vec4 pixelColor;\n"
"\n"
"uniform sampler2D baseSampler;\n"
"\n"
"void main()\n"
"{\n"
" pixelColor = texture(baseSampler, vertexTCoord);\n"
"}\n";
std::string const SphereMapEffect::msHLSLSource =
"cbuffer PVWMatrix\n"
"{\n"
" float4x4 pvwMatrix;\n"
"};\n"
"\n"
"cbuffer VWMatrix\n"
"{\n"
" float4x4 vwMatrix;\n"
"};\n"
"\n"
"struct VS_INPUT\n"
"{\n"
" float3 modelPosition : POSITION;\n"
" float3 modelNormal : NORMAL;\n"
"};\n"
"\n"
"struct VS_OUTPUT\n"
"{\n"
" float2 vertexTCoord : TEXCOORD0;\n"
" float4 clipPosition : SV_POSITION;\n"
"};\n"
"\n"
"VS_OUTPUT VSMain(VS_INPUT input)\n"
"{\n"
" VS_OUTPUT output;\n"
"\n"
" float4 modelPosition = float4(input.modelPosition, 1.0f);\n"
" float4 modelNormal = float4(input.modelNormal, 0.0f);\n"
"\n"
"#if GTE_USE_MAT_VEC\n"
" float4 cameraSpacePosition = mul(vwMatrix, modelPosition);\n"
" float3 cameraSpaceNormal = normalize(mul(vwMatrix, modelNormal).xyz);\n"
" output.clipPosition = mul(pvwMatrix, modelPosition);\n"
"#else\n"
" float4 cameraSpacePosition = mul(modelPosition, vwMatrix);\n"
" float3 cameraSpaceNormal = normalize(mul(modelNormal, vwMatrix).xyz);\n"
" output.clipPosition = mul(modelPosition, pvwMatrix);\n"
"#endif\n"
"\n"
" float3 eyeDirection = normalize(cameraSpacePosition.xyz);\n"
" float3 r = reflect(eyeDirection, cameraSpaceNormal);\n"
"\n"
" float oneMRZ = 1.0f - r.z;\n"
" float invLength = 1.0f / sqrt(r.x * r.x + r.y * r.y + oneMRZ * oneMRZ);\n"
" output.vertexTCoord = 0.5f * (r.xy * invLength + 1.0f);\n"
"\n"
" return output;\n"
"}\n"
"\n"
"struct PS_INPUT\n"
"{\n"
" float2 vertexTCoord : TEXCOORD0;\n"
"};\n"
"\n"
"struct PS_OUTPUT\n"
"{\n"
" float4 pixelColor : SV_TARGET0;\n"
"};\n"
"\n"
"Texture2D<float4> baseTexture;\n"
"SamplerState baseSampler;\n"
"\n"
"PS_OUTPUT PSMain(PS_INPUT input)\n"
"{\n"
" PS_OUTPUT output;\n"
" output.pixelColor = baseTexture.Sample(baseSampler, input.vertexTCoord);\n"
" return output;\n"
"}\n";
std::string const* SphereMapEffect::msVSSource[] =
{
&msGLSLVSSource,
&msHLSLSource
};
std::string const* SphereMapEffect::msPSSource[] =
{
&msGLSLPSSource,
&msHLSLSource
};
|
import React, { forwardRef } from 'react'
import PropTypes from 'prop-types'
import { pathStrOr } from 'utils/fp'
import { uniq } from 'ramda'
import { ValidatedFormInputPropTypes } from 'core/components/validatedForm/withFormContext'
import useDataLoader from 'core/hooks/useDataLoader'
import Picklist from 'core/components/Picklist'
import { loadCloudProviderRegionDetails } from 'k8s/components/infrastructure/cloudProviders/actions'
const AzureResourceGroupPicklist = forwardRef(({
cloudProviderId, cloudProviderRegionId, hasError, errorMessage, ...rest
}, ref) => {
const [details, loading] = useDataLoader(loadCloudProviderRegionDetails, { cloudProviderId, cloudProviderRegionId })
const networks = pathStrOr([], '0.virtualNetworks', details)
// Azure might have more than 1 virtualNetwork with the same resourceGroup be sure to use 'uniq'
const options = uniq(networks.map(x => ({ label: x.resourceGroup, value: x.resourceGroup })))
return (
<Picklist
{...rest}
ref={ref}
loading={loading}
options={options}
error={hasError}
helperText={errorMessage}
/>
)
})
AzureResourceGroupPicklist.propTypes = {
id: PropTypes.string.isRequired,
cloudProviderId: PropTypes.string,
cloudProviderRegionId: PropTypes.string,
initialValue: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
onChange: PropTypes.func,
...ValidatedFormInputPropTypes,
}
export default AzureResourceGroupPicklist
|
#!/bin/sh
#
# After a PR merge, Chef Expeditor will bump the PATCH version in the VERSION file.
# It then executes this file to update any other files/components with that new version.
#
set -evx
sed -i -r "s/^(\s*)VERSION = \".+\"/\1VERSION = \"$(cat VERSION)\"/" lib/dep-selector-libgecode/version.rb
# Once Expeditor finshes executing this script, it will commit the changes and push
# the commit as a new tag corresponding to the value in the VERSION file.
|
<html>
<head>
<title>Upload Photos</title>
</head>
<body>
<h1>Upload Photos</h1>
<form action="upload.php" method="post" enctype="multipart/form-data">
Select images to upload:
<input type="file" name="photos[]" multiple>
<input type="submit" value="Upload files" name="submit">
</form>
</body>
</html>
<?php
if (isset($_POST['submit'])) {
$files = $_FILES['photos'];
foreach($files['name'] as $key => $name) {
if ($files['error'][$key] == 0 && move_uploaded_file($files['tmp_name'][$key], "uploads/$name")) {
echo "The file $name was moved successfully.\n";
}
}
}
?>
|
#!/bin/bash
#SBATCH --job-name=songbird # Job name
#SBATCH -p normal # priority
#SBATCH --mail-type=ALL # Mail events (NONE, BEGIN, END, FAIL, ALL)
#SBATCH --mail-user=mcalgaro93@gmail.com # Where to send mail
#SBATCH --nodes=1 # Use one node
#SBATCH --ntasks=1 # Run a single task
#SBATCH --mem-per-cpu=5gb # Memory per processor
#SBATCH --time=24:00:00 # Time limit hrs:min:sec
#SBATCH --output=array_%A-%a.out # Standard output and error log
#SBATCH --array=1-20 # Array range
# This is an example script that combines array tasks with
# bash loops to process many short runs. Array jobs are convenient
# for running lots of tasks, but if each task is short, they
# quickly become inefficient, taking more time to schedule than
# they spend doing any work and bogging down the scheduler for
# all users.
#Set the number of runs that each SLURM task should do
PER_TASK=5
# Calculate the starting and ending values for this task based
# on the SLURM task and the number of runs per task.
START_NUM=$(( ($SLURM_ARRAY_TASK_ID - 1) * $PER_TASK + 1 ))
END_NUM=$(( $SLURM_ARRAY_TASK_ID * $PER_TASK ))
# Print the task and run range
echo This is task $SLURM_ARRAY_TASK_ID, which will do runs $START_NUM to $END_NUM
module purge
module load Anaconda/2019.07
source activate songbird_env
# Run the loop of runs for this task.
run=$START_NUM
while [ $run -le $END_NUM ]; do
echo This is SLURM task $SLURM_ARRAY_TASK_ID, run number $run;
biom convert -i './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset1_otutable.tsv' -o './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset1_otutable.biom' --to-hdf5
biom convert -i './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset2_otutable.tsv' -o './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset2_otutable.biom' --to-hdf5
biom convert -i './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset1_otutable.tsv' -o './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset1_otutable.biom' --to-hdf5
biom convert -i './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset2_otutable.tsv' -o './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset2_otutable.biom' --to-hdf5
biom convert -i './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset1_otutable.tsv' -o './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset1_otutable.biom' --to-hdf5
biom convert -i './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset2_otutable.tsv' -o './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset2_otutable.biom' --to-hdf5
mkdir './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset1'
songbird multinomial --input-biom './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset1_otutable.biom' \
--metadata-file './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset1_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset1'
mkdir './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset2'
songbird multinomial --input-biom './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset2_otutable.biom' \
--metadata-file './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset2_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/16S_subgingival_supragingival_Comparison'$run'_Subset2'
mkdir './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset1'
songbird multinomial --input-biom './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset1_otutable.biom' \
--metadata-file './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset1_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset1'
mkdir './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset2'
songbird multinomial --input-biom './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset2_otutable.biom' \
--metadata-file './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset2_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/16S_gingiva_mucosa_Comparison'$run'_Subset2'
mkdir './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset1'
songbird multinomial --input-biom './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset1_otutable.biom' \
--metadata-file './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset1_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset1'
mkdir './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset2'
songbird multinomial --input-biom './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset2_otutable.biom' \
--metadata-file './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset2_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/16S_tonguedorsum_stool_Comparison'$run'_Subset2'
############################################################################################################################
biom convert -i './songbird/WMS_CRC_control_Comparison'$run'_Subset1_otutable.tsv' -o './songbird/WMS_CRC_control_Comparison'$run'_Subset1_otutable.biom' --to-hdf5
biom convert -i './songbird/WMS_CRC_control_Comparison'$run'_Subset2_otutable.tsv' -o './songbird/WMS_CRC_control_Comparison'$run'_Subset2_otutable.biom' --to-hdf5
biom convert -i './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset1_otutable.tsv' -o './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset1_otutable.biom' --to-hdf5
biom convert -i './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset2_otutable.tsv' -o './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset2_otutable.biom' --to-hdf5
biom convert -i './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset1_otutable.tsv' -o './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset1_otutable.biom' --to-hdf5
biom convert -i './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset2_otutable.tsv' -o './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset2_otutable.biom' --to-hdf5
mkdir './songbird/WMS_CRC_control_Comparison'$run'_Subset1'
songbird multinomial --input-biom './songbird/WMS_CRC_control_Comparison'$run'_Subset1_otutable.biom' \
--metadata-file './songbird/WMS_CRC_control_Comparison'$run'_Subset1_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/WMS_CRC_control_Comparison'$run'_Subset1'
mkdir './songbird/WMS_CRC_control_Comparison'$run'_Subset2'
songbird multinomial --input-biom './songbird/WMS_CRC_control_Comparison'$run'_Subset2_otutable.biom' \
--metadata-file './songbird/WMS_CRC_control_Comparison'$run'_Subset2_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/WMS_CRC_control_Comparison'$run'_Subset2'
mkdir './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset1'
songbird multinomial --input-biom './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset1_otutable.biom' \
--metadata-file './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset1_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset1'
mkdir './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset2'
songbird multinomial --input-biom './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset2_otutable.biom' \
--metadata-file './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset2_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/WMS_schizophrenia_control_Comparison'$run'_Subset2'
mkdir './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset1'
songbird multinomial --input-biom './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset1_otutable.biom' \
--metadata-file './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset1_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset1'
mkdir './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset2'
songbird multinomial --input-biom './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset2_otutable.biom' \
--metadata-file './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset2_samdata.tsv' \
--formula "grp" \
--summary-dir './songbird/WMS_tonguedorsum_stool_Comparison'$run'_Subset2'
run=$((run+1));
done
|
class Gallery:
def ordered_images(self):
# Returns a list of image objects in the gallery
pass
class GalleryView:
def __init__(self, gallery):
self.gallery = gallery
def render(self, **kwargs):
request = kwargs.get('request')
objects = self.gallery.ordered_images()
remaining = []
# Pagination logic
if request.GET.get('type') == 'paginated':
page = int(request.GET.get('page', 1))
items_per_page = 10
start_index = (page - 1) * items_per_page
end_index = start_index + items_per_page
remaining = objects[end_index:]
objects = objects[start_index:end_index]
return HttpResponse(objects)
|
#!/bin/bash
#
# This test is for basic NAT functionality: snat, dnat, redirect, masquerade.
#
# Kselftest framework requirement - SKIP code is 4.
ksft_skip=4
ret=0
test_inet_nat=true
sfx=$(mktemp -u "XXXXXXXX")
ns0="ns0-$sfx"
ns1="ns1-$sfx"
ns2="ns2-$sfx"
cleanup()
{
for i in 0 1 2; do ip netns del ns$i-"$sfx";done
}
nft --version > /dev/null 2>&1
if [ $? -ne 0 ];then
echo "SKIP: Could not run test without nft tool"
exit $ksft_skip
fi
ip -Version > /dev/null 2>&1
if [ $? -ne 0 ];then
echo "SKIP: Could not run test without ip tool"
exit $ksft_skip
fi
ip netns add "$ns0"
if [ $? -ne 0 ];then
echo "SKIP: Could not create net namespace $ns0"
exit $ksft_skip
fi
trap cleanup EXIT
ip netns add "$ns1"
if [ $? -ne 0 ];then
echo "SKIP: Could not create net namespace $ns1"
exit $ksft_skip
fi
ip netns add "$ns2"
if [ $? -ne 0 ];then
echo "SKIP: Could not create net namespace $ns2"
exit $ksft_skip
fi
ip link add veth0 netns "$ns0" type veth peer name eth0 netns "$ns1" > /dev/null 2>&1
if [ $? -ne 0 ];then
echo "SKIP: No virtual ethernet pair device support in kernel"
exit $ksft_skip
fi
ip link add veth1 netns "$ns0" type veth peer name eth0 netns "$ns2"
ip -net "$ns0" link set lo up
ip -net "$ns0" link set veth0 up
ip -net "$ns0" addr add 10.0.1.1/24 dev veth0
ip -net "$ns0" addr add dead:1::1/64 dev veth0
ip -net "$ns0" link set veth1 up
ip -net "$ns0" addr add 10.0.2.1/24 dev veth1
ip -net "$ns0" addr add dead:2::1/64 dev veth1
for i in 1 2; do
ip -net ns$i-$sfx link set lo up
ip -net ns$i-$sfx link set eth0 up
ip -net ns$i-$sfx addr add 10.0.$i.99/24 dev eth0
ip -net ns$i-$sfx route add default via 10.0.$i.1
ip -net ns$i-$sfx addr add dead:$i::99/64 dev eth0
ip -net ns$i-$sfx route add default via dead:$i::1
done
bad_counter()
{
local ns=$1
local counter=$2
local expect=$3
local tag=$4
echo "ERROR: $counter counter in $ns has unexpected value (expected $expect) at $tag" 1>&2
ip netns exec $ns nft list counter inet filter $counter 1>&2
}
check_counters()
{
ns=$1
local lret=0
cnt=$(ip netns exec $ns nft list counter inet filter ns0in | grep -q "packets 1 bytes 84")
if [ $? -ne 0 ]; then
bad_counter $ns ns0in "packets 1 bytes 84" "check_counters 1"
lret=1
fi
cnt=$(ip netns exec $ns nft list counter inet filter ns0out | grep -q "packets 1 bytes 84")
if [ $? -ne 0 ]; then
bad_counter $ns ns0out "packets 1 bytes 84" "check_counters 2"
lret=1
fi
expect="packets 1 bytes 104"
cnt=$(ip netns exec $ns nft list counter inet filter ns0in6 | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter $ns ns0in6 "$expect" "check_counters 3"
lret=1
fi
cnt=$(ip netns exec $ns nft list counter inet filter ns0out6 | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter $ns ns0out6 "$expect" "check_counters 4"
lret=1
fi
return $lret
}
check_ns0_counters()
{
local ns=$1
local lret=0
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns0in | grep -q "packets 0 bytes 0")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns0in "packets 0 bytes 0" "check_ns0_counters 1"
lret=1
fi
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns0in6 | grep -q "packets 0 bytes 0")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns0in6 "packets 0 bytes 0"
lret=1
fi
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns0out | grep -q "packets 0 bytes 0")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns0out "packets 0 bytes 0" "check_ns0_counters 2"
lret=1
fi
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns0out6 | grep -q "packets 0 bytes 0")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns0out6 "packets 0 bytes 0" "check_ns0_counters3 "
lret=1
fi
for dir in "in" "out" ; do
expect="packets 1 bytes 84"
cnt=$(ip netns exec "$ns0" nft list counter inet filter ${ns}${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" $ns$dir "$expect" "check_ns0_counters 4"
lret=1
fi
expect="packets 1 bytes 104"
cnt=$(ip netns exec "$ns0" nft list counter inet filter ${ns}${dir}6 | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" $ns$dir6 "$expect" "check_ns0_counters 5"
lret=1
fi
done
return $lret
}
reset_counters()
{
for i in 0 1 2;do
ip netns exec ns$i-$sfx nft reset counters inet > /dev/null
done
}
test_local_dnat6()
{
local family=$1
local lret=0
local IPF=""
if [ $family = "inet" ];then
IPF="ip6"
fi
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family nat {
chain output {
type nat hook output priority 0; policy accept;
ip6 daddr dead:1::99 dnat $IPF to dead:2::99
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add add $family dnat hook"
return $ksft_skip
fi
# ping netns1, expect rewrite to netns2
ip netns exec "$ns0" ping -q -c 1 dead:1::99 > /dev/null
if [ $? -ne 0 ]; then
lret=1
echo "ERROR: ping6 failed"
return $lret
fi
expect="packets 0 bytes 0"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns1$dir "$expect" "test_local_dnat6 1"
lret=1
fi
done
expect="packets 1 bytes 104"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns2$dir "$expect" "test_local_dnat6 2"
lret=1
fi
done
# expect 0 count in ns1
expect="packets 0 bytes 0"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_local_dnat6 3"
lret=1
fi
done
# expect 1 packet in ns2
expect="packets 1 bytes 104"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns0$dir "$expect" "test_local_dnat6 4"
lret=1
fi
done
test $lret -eq 0 && echo "PASS: ipv6 ping to $ns1 was $family NATted to $ns2"
ip netns exec "$ns0" nft flush chain ip6 nat output
return $lret
}
test_local_dnat()
{
local family=$1
local lret=0
local IPF=""
if [ $family = "inet" ];then
IPF="ip"
fi
ip netns exec "$ns0" nft -f /dev/stdin <<EOF 2>/dev/null
table $family nat {
chain output {
type nat hook output priority 0; policy accept;
ip daddr 10.0.1.99 dnat $IPF to 10.0.2.99
}
}
EOF
if [ $? -ne 0 ]; then
if [ $family = "inet" ];then
echo "SKIP: inet nat tests"
test_inet_nat=false
return $ksft_skip
fi
echo "SKIP: Could not add add $family dnat hook"
return $ksft_skip
fi
# ping netns1, expect rewrite to netns2
ip netns exec "$ns0" ping -q -c 1 10.0.1.99 > /dev/null
if [ $? -ne 0 ]; then
lret=1
echo "ERROR: ping failed"
return $lret
fi
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns1$dir "$expect" "test_local_dnat 1"
lret=1
fi
done
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns2$dir "$expect" "test_local_dnat 2"
lret=1
fi
done
# expect 0 count in ns1
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_local_dnat 3"
lret=1
fi
done
# expect 1 packet in ns2
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns0$dir "$expect" "test_local_dnat 4"
lret=1
fi
done
test $lret -eq 0 && echo "PASS: ping to $ns1 was $family NATted to $ns2"
ip netns exec "$ns0" nft flush chain $family nat output
reset_counters
ip netns exec "$ns0" ping -q -c 1 10.0.1.99 > /dev/null
if [ $? -ne 0 ]; then
lret=1
echo "ERROR: ping failed"
return $lret
fi
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns1$dir "$expect" "test_local_dnat 5"
lret=1
fi
done
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns2$dir "$expect" "test_local_dnat 6"
lret=1
fi
done
# expect 1 count in ns1
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns0$dir "$expect" "test_local_dnat 7"
lret=1
fi
done
# expect 0 packet in ns2
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns0$dir "$expect" "test_local_dnat 8"
lret=1
fi
done
test $lret -eq 0 && echo "PASS: ping to $ns1 OK after $family nat output chain flush"
return $lret
}
test_masquerade6()
{
local family=$1
local natflags=$2
local lret=0
ip netns exec "$ns0" sysctl net.ipv6.conf.all.forwarding=1 > /dev/null
ip netns exec "$ns2" ping -q -c 1 dead:1::99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 via ipv6"
return 1
lret=1
fi
expect="packets 1 bytes 104"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns2$dir "$expect" "test_masquerade6 1"
lret=1
fi
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_masquerade6 2"
lret=1
fi
done
reset_counters
# add masquerading rule
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family nat {
chain postrouting {
type nat hook postrouting priority 0; policy accept;
meta oif veth0 masquerade $natflags
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add add $family masquerade hook"
return $ksft_skip
fi
ip netns exec "$ns2" ping -q -c 1 dead:1::99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 with active $family masquerade $natflags"
lret=1
fi
# ns1 should have seen packets from ns0, due to masquerade
expect="packets 1 bytes 104"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_masquerade6 3"
lret=1
fi
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_masquerade6 4"
lret=1
fi
done
# ns1 should not have seen packets from ns2, due to masquerade
expect="packets 0 bytes 0"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_masquerade6 5"
lret=1
fi
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns1$dir "$expect" "test_masquerade6 6"
lret=1
fi
done
ip netns exec "$ns2" ping -q -c 1 dead:1::99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 with active ipv6 masquerade $natflags (attempt 2)"
lret=1
fi
ip netns exec "$ns0" nft flush chain $family nat postrouting
if [ $? -ne 0 ]; then
echo "ERROR: Could not flush $family nat postrouting" 1>&2
lret=1
fi
test $lret -eq 0 && echo "PASS: $family IPv6 masquerade $natflags for $ns2"
return $lret
}
test_masquerade()
{
local family=$1
local natflags=$2
local lret=0
ip netns exec "$ns0" sysctl net.ipv4.conf.veth0.forwarding=1 > /dev/null
ip netns exec "$ns0" sysctl net.ipv4.conf.veth1.forwarding=1 > /dev/null
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from "$ns2" $natflags"
lret=1
fi
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns2$dir "$expect" "test_masquerade 1"
lret=1
fi
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_masquerade 2"
lret=1
fi
done
reset_counters
# add masquerading rule
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family nat {
chain postrouting {
type nat hook postrouting priority 0; policy accept;
meta oif veth0 masquerade $natflags
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add add $family masquerade hook"
return $ksft_skip
fi
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 with active $family masquerade $natflags"
lret=1
fi
# ns1 should have seen packets from ns0, due to masquerade
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_masquerade 3"
lret=1
fi
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_masquerade 4"
lret=1
fi
done
# ns1 should not have seen packets from ns2, due to masquerade
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_masquerade 5"
lret=1
fi
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns1$dir "$expect" "test_masquerade 6"
lret=1
fi
done
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 with active ip masquerade $natflags (attempt 2)"
lret=1
fi
ip netns exec "$ns0" nft flush chain $family nat postrouting
if [ $? -ne 0 ]; then
echo "ERROR: Could not flush $family nat postrouting" 1>&2
lret=1
fi
test $lret -eq 0 && echo "PASS: $family IP masquerade $natflags for $ns2"
return $lret
}
test_redirect6()
{
local family=$1
local lret=0
ip netns exec "$ns0" sysctl net.ipv6.conf.all.forwarding=1 > /dev/null
ip netns exec "$ns2" ping -q -c 1 dead:1::99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannnot ping $ns1 from $ns2 via ipv6"
lret=1
fi
expect="packets 1 bytes 104"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns2$dir "$expect" "test_redirect6 1"
lret=1
fi
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_redirect6 2"
lret=1
fi
done
reset_counters
# add redirect rule
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family nat {
chain prerouting {
type nat hook prerouting priority 0; policy accept;
meta iif veth1 meta l4proto icmpv6 ip6 saddr dead:2::99 ip6 daddr dead:1::99 redirect
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add add $family redirect hook"
return $ksft_skip
fi
ip netns exec "$ns2" ping -q -c 1 dead:1::99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 via ipv6 with active $family redirect"
lret=1
fi
# ns1 should have seen no packets from ns2, due to redirection
expect="packets 0 bytes 0"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_redirect6 3"
lret=1
fi
done
# ns0 should have seen packets from ns2, due to masquerade
expect="packets 1 bytes 104"
for dir in "in6" "out6" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_redirect6 4"
lret=1
fi
done
ip netns exec "$ns0" nft delete table $family nat
if [ $? -ne 0 ]; then
echo "ERROR: Could not delete $family nat table" 1>&2
lret=1
fi
test $lret -eq 0 && echo "PASS: $family IPv6 redirection for $ns2"
return $lret
}
test_redirect()
{
local family=$1
local lret=0
ip netns exec "$ns0" sysctl net.ipv4.conf.veth0.forwarding=1 > /dev/null
ip netns exec "$ns0" sysctl net.ipv4.conf.veth1.forwarding=1 > /dev/null
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2"
lret=1
fi
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" $ns2$dir "$expect" "test_redirect 1"
lret=1
fi
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_redirect 2"
lret=1
fi
done
reset_counters
# add redirect rule
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family nat {
chain prerouting {
type nat hook prerouting priority 0; policy accept;
meta iif veth1 ip protocol icmp ip saddr 10.0.2.99 ip daddr 10.0.1.99 redirect
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add add $family redirect hook"
return $ksft_skip
fi
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 with active $family ip redirect"
lret=1
fi
# ns1 should have seen no packets from ns2, due to redirection
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_redirect 3"
lret=1
fi
done
# ns0 should have seen packets from ns2, due to masquerade
expect="packets 1 bytes 84"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns0$dir "$expect" "test_redirect 4"
lret=1
fi
done
ip netns exec "$ns0" nft delete table $family nat
if [ $? -ne 0 ]; then
echo "ERROR: Could not delete $family nat table" 1>&2
lret=1
fi
test $lret -eq 0 && echo "PASS: $family IP redirection for $ns2"
return $lret
}
# test port shadowing.
# create two listening services, one on router (ns0), one
# on client (ns2), which is masqueraded from ns1 point of view.
# ns2 sends udp packet coming from service port to ns1, on a highport.
# Later, if n1 uses same highport to connect to ns0:service, packet
# might be port-forwarded to ns2 instead.
# second argument tells if we expect the 'fake-entry' to take effect
# (CLIENT) or not (ROUTER).
test_port_shadow()
{
local test=$1
local expect=$2
local daddrc="10.0.1.99"
local daddrs="10.0.1.1"
local result=""
local logmsg=""
# make shadow entry, from client (ns2), going to (ns1), port 41404, sport 1405.
echo "fake-entry" | ip netns exec "$ns2" timeout 1 socat -u STDIN UDP:"$daddrc":41404,sourceport=1405
echo ROUTER | ip netns exec "$ns0" timeout 5 socat -u STDIN UDP4-LISTEN:1405 &
sc_r=$!
echo CLIENT | ip netns exec "$ns2" timeout 5 socat -u STDIN UDP4-LISTEN:1405,reuseport &
sc_c=$!
sleep 0.3
# ns1 tries to connect to ns0:1405. With default settings this should connect
# to client, it matches the conntrack entry created above.
result=$(echo "data" | ip netns exec "$ns1" timeout 1 socat - UDP:"$daddrs":1405,sourceport=41404)
if [ "$result" = "$expect" ] ;then
echo "PASS: portshadow test $test: got reply from ${expect}${logmsg}"
else
echo "ERROR: portshadow test $test: got reply from \"$result\", not $expect as intended"
ret=1
fi
kill $sc_r $sc_c 2>/dev/null
# flush udp entries for next test round, if any
ip netns exec "$ns0" conntrack -F >/dev/null 2>&1
}
# This prevents port shadow of router service via packet filter,
# packets claiming to originate from service port from internal
# network are dropped.
test_port_shadow_filter()
{
local family=$1
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family filter {
chain forward {
type filter hook forward priority 0; policy accept;
meta iif veth1 udp sport 1405 drop
}
}
EOF
test_port_shadow "port-filter" "ROUTER"
ip netns exec "$ns0" nft delete table $family filter
}
# This prevents port shadow of router service via notrack.
test_port_shadow_notrack()
{
local family=$1
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family raw {
chain prerouting {
type filter hook prerouting priority -300; policy accept;
meta iif veth0 udp dport 1405 notrack
}
chain output {
type filter hook output priority -300; policy accept;
meta oif veth0 udp sport 1405 notrack
}
}
EOF
test_port_shadow "port-notrack" "ROUTER"
ip netns exec "$ns0" nft delete table $family raw
}
# This prevents port shadow of router service via sport remap.
test_port_shadow_pat()
{
local family=$1
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family pat {
chain postrouting {
type nat hook postrouting priority -1; policy accept;
meta iif veth1 udp sport <= 1405 masquerade to : 1406-65535 random
}
}
EOF
test_port_shadow "pat" "ROUTER"
ip netns exec "$ns0" nft delete table $family pat
}
test_port_shadowing()
{
local family="ip"
conntrack -h >/dev/null 2>&1
if [ $? -ne 0 ];then
echo "SKIP: Could not run nat port shadowing test without conntrack tool"
return
fi
socat -h > /dev/null 2>&1
if [ $? -ne 0 ];then
echo "SKIP: Could not run nat port shadowing test without socat tool"
return
fi
ip netns exec "$ns0" sysctl net.ipv4.conf.veth0.forwarding=1 > /dev/null
ip netns exec "$ns0" sysctl net.ipv4.conf.veth1.forwarding=1 > /dev/null
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table $family nat {
chain postrouting {
type nat hook postrouting priority 0; policy accept;
meta oif veth0 masquerade
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add add $family masquerade hook"
return $ksft_skip
fi
# test default behaviour. Packet from ns1 to ns0 is not redirected
# due to automatic port translation.
test_port_shadow "default" "ROUTER"
# test packet filter based mitigation: prevent forwarding of
# packets claiming to come from the service port.
test_port_shadow_filter "$family"
# test conntrack based mitigation: connections going or coming
# from router:service bypass connection tracking.
test_port_shadow_notrack "$family"
# test nat based mitigation: fowarded packets coming from service port
# are masqueraded with random highport.
test_port_shadow_pat "$family"
ip netns exec "$ns0" nft delete table $family nat
}
test_stateless_nat_ip()
{
local lret=0
ip netns exec "$ns0" sysctl net.ipv4.conf.veth0.forwarding=1 > /dev/null
ip netns exec "$ns0" sysctl net.ipv4.conf.veth1.forwarding=1 > /dev/null
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 before loading stateless rules"
return 1
fi
ip netns exec "$ns0" nft -f /dev/stdin <<EOF
table ip stateless {
map xlate_in {
typeof meta iifname . ip saddr . ip daddr : ip daddr
elements = {
"veth1" . 10.0.2.99 . 10.0.1.99 : 10.0.2.2,
}
}
map xlate_out {
typeof meta iifname . ip saddr . ip daddr : ip daddr
elements = {
"veth0" . 10.0.1.99 . 10.0.2.2 : 10.0.2.99
}
}
chain prerouting {
type filter hook prerouting priority -400; policy accept;
ip saddr set meta iifname . ip saddr . ip daddr map @xlate_in
ip daddr set meta iifname . ip saddr . ip daddr map @xlate_out
}
}
EOF
if [ $? -ne 0 ]; then
echo "SKIP: Could not add ip statless rules"
return $ksft_skip
fi
reset_counters
ip netns exec "$ns2" ping -q -c 1 10.0.1.99 > /dev/null # ping ns2->ns1
if [ $? -ne 0 ] ; then
echo "ERROR: cannot ping $ns1 from $ns2 with stateless rules"
lret=1
fi
# ns1 should have seen packets from .2.2, due to stateless rewrite.
expect="packets 1 bytes 84"
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0insl | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0insl "$expect" "test_stateless 1"
lret=1
fi
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns2" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns2" ns1$dir "$expect" "test_stateless 2"
lret=1
fi
done
# ns1 should not have seen packets from ns2, due to masquerade
expect="packets 0 bytes 0"
for dir in "in" "out" ; do
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns2${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0$dir "$expect" "test_stateless 3"
lret=1
fi
cnt=$(ip netns exec "$ns0" nft list counter inet filter ns1${dir} | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns0" ns1$dir "$expect" "test_stateless 4"
lret=1
fi
done
reset_counters
socat -h > /dev/null 2>&1
if [ $? -ne 0 ];then
echo "SKIP: Could not run stateless nat frag test without socat tool"
if [ $lret -eq 0 ]; then
return $ksft_skip
fi
ip netns exec "$ns0" nft delete table ip stateless
return $lret
fi
local tmpfile=$(mktemp)
dd if=/dev/urandom of=$tmpfile bs=4096 count=1 2>/dev/null
local outfile=$(mktemp)
ip netns exec "$ns1" timeout 3 socat -u UDP4-RECV:4233 OPEN:$outfile < /dev/null &
sc_r=$!
sleep 1
# re-do with large ping -> ip fragmentation
ip netns exec "$ns2" timeout 3 socat - UDP4-SENDTO:"10.0.1.99:4233" < "$tmpfile" > /dev/null
if [ $? -ne 0 ] ; then
echo "ERROR: failed to test udp $ns1 to $ns2 with stateless ip nat" 1>&2
lret=1
fi
wait
cmp "$tmpfile" "$outfile"
if [ $? -ne 0 ]; then
ls -l "$tmpfile" "$outfile"
echo "ERROR: in and output file mismatch when checking udp with stateless nat" 1>&2
lret=1
fi
rm -f "$tmpfile" "$outfile"
# ns1 should have seen packets from 2.2, due to stateless rewrite.
expect="packets 3 bytes 4164"
cnt=$(ip netns exec "$ns1" nft list counter inet filter ns0insl | grep -q "$expect")
if [ $? -ne 0 ]; then
bad_counter "$ns1" ns0insl "$expect" "test_stateless 5"
lret=1
fi
ip netns exec "$ns0" nft delete table ip stateless
if [ $? -ne 0 ]; then
echo "ERROR: Could not delete table ip stateless" 1>&2
lret=1
fi
test $lret -eq 0 && echo "PASS: IP statless for $ns2"
return $lret
}
# ip netns exec "$ns0" ping -c 1 -q 10.0.$i.99
for i in 0 1 2; do
ip netns exec ns$i-$sfx nft -f /dev/stdin <<EOF
table inet filter {
counter ns0in {}
counter ns1in {}
counter ns2in {}
counter ns0out {}
counter ns1out {}
counter ns2out {}
counter ns0in6 {}
counter ns1in6 {}
counter ns2in6 {}
counter ns0out6 {}
counter ns1out6 {}
counter ns2out6 {}
map nsincounter {
type ipv4_addr : counter
elements = { 10.0.1.1 : "ns0in",
10.0.2.1 : "ns0in",
10.0.1.99 : "ns1in",
10.0.2.99 : "ns2in" }
}
map nsincounter6 {
type ipv6_addr : counter
elements = { dead:1::1 : "ns0in6",
dead:2::1 : "ns0in6",
dead:1::99 : "ns1in6",
dead:2::99 : "ns2in6" }
}
map nsoutcounter {
type ipv4_addr : counter
elements = { 10.0.1.1 : "ns0out",
10.0.2.1 : "ns0out",
10.0.1.99: "ns1out",
10.0.2.99: "ns2out" }
}
map nsoutcounter6 {
type ipv6_addr : counter
elements = { dead:1::1 : "ns0out6",
dead:2::1 : "ns0out6",
dead:1::99 : "ns1out6",
dead:2::99 : "ns2out6" }
}
chain input {
type filter hook input priority 0; policy accept;
counter name ip saddr map @nsincounter
icmpv6 type { "echo-request", "echo-reply" } counter name ip6 saddr map @nsincounter6
}
chain output {
type filter hook output priority 0; policy accept;
counter name ip daddr map @nsoutcounter
icmpv6 type { "echo-request", "echo-reply" } counter name ip6 daddr map @nsoutcounter6
}
}
EOF
done
# special case for stateless nat check, counter needs to
# be done before (input) ip defragmentation
ip netns exec ns1-$sfx nft -f /dev/stdin <<EOF
table inet filter {
counter ns0insl {}
chain pre {
type filter hook prerouting priority -400; policy accept;
ip saddr 10.0.2.2 counter name "ns0insl"
}
}
EOF
sleep 3
# test basic connectivity
for i in 1 2; do
ip netns exec "$ns0" ping -c 1 -q 10.0.$i.99 > /dev/null
if [ $? -ne 0 ];then
echo "ERROR: Could not reach other namespace(s)" 1>&2
ret=1
fi
ip netns exec "$ns0" ping -c 1 -q dead:$i::99 > /dev/null
if [ $? -ne 0 ];then
echo "ERROR: Could not reach other namespace(s) via ipv6" 1>&2
ret=1
fi
check_counters ns$i-$sfx
if [ $? -ne 0 ]; then
ret=1
fi
check_ns0_counters ns$i
if [ $? -ne 0 ]; then
ret=1
fi
reset_counters
done
if [ $ret -eq 0 ];then
echo "PASS: netns routing/connectivity: $ns0 can reach $ns1 and $ns2"
fi
reset_counters
test_local_dnat ip
test_local_dnat6 ip6
reset_counters
$test_inet_nat && test_local_dnat inet
$test_inet_nat && test_local_dnat6 inet
for flags in "" "fully-random"; do
reset_counters
test_masquerade ip $flags
test_masquerade6 ip6 $flags
reset_counters
$test_inet_nat && test_masquerade inet $flags
$test_inet_nat && test_masquerade6 inet $flags
done
reset_counters
test_redirect ip
test_redirect6 ip6
reset_counters
$test_inet_nat && test_redirect inet
$test_inet_nat && test_redirect6 inet
test_port_shadowing
test_stateless_nat_ip
if [ $ret -ne 0 ];then
echo -n "FAIL: "
nft --version
fi
exit $ret
|
package dbis.piglet.codegen.flink.emitter
import dbis.piglet.codegen.{ CodeEmitter, CodeGenContext, CodeGenException }
import dbis.piglet.expr._
import dbis.piglet.op._
import dbis.piglet.plan.DataflowPlan
import dbis.piglet.schema.Schema
import dbis.piglet.codegen.scala_lang.ScalaEmitter
import dbis.piglet.udf.UDFTable
import dbis.piglet.codegen.scala_lang.ForeachEmitter
class StreamForeachEmitter extends ForeachEmitter {
override def template: String = """ val <out> = <in>.map(t => <class>(<expr>))""".stripMargin
override def templateNested: String = """ val <out> = <in>.map(t => <expr>)""".stripMargin
override def templateFlatMap: String = """ val <out> = <in>.flatMap(t => <expr>)""".stripMargin
def templateHelper: String = """ .foreach { t => out.collect(<class>(<expr>)) }""".stripMargin
override def helper(ctx: CodeGenContext, op: Foreach): String = {
if (!op.windowMode && containsAggregates(op.generator)) {
val genL: GeneratorList = op.generator match {
case gl @ GeneratorList(expr) => gl
case GeneratorPlan(plan) => GeneratorList(plan.last.asInstanceOf[Generate].exprs)
}
val e = new StreamAccumulateEmitter
val acc = new Accumulate(op.outputs.head, op.inputs.head, genL)
acc.constructSchema
e.helper(ctx, acc)
} else ""
}
def windowApply(ctx: CodeGenContext, op: Foreach): String = {
var params = Map[String, Any]()
params += "expr" -> emitForeachExpr(ctx, op, false)
if (!op.generator.isNested) params += "class" -> ScalaEmitter.schemaClassName(op.schema.get.className)
CodeEmitter.render(templateHelper, params)
}
override def code(ctx: CodeGenContext, op: Foreach): String = {
if (op.windowMode) return ""
if (!op.schema.isDefined)
throw CodeGenException("FOREACH requires a schema definition")
val className = ScalaEmitter.schemaClassName(op.schema.get.className)
val aggr = !op.windowMode && containsAggregates(op.generator)
val expr = emitForeachExpr(ctx, op, aggr)
val requiresFlatMap = op.asInstanceOf[Foreach].containsFlatten(onBag = true)
if (aggr) expr
else if (requiresFlatMap)
CodeEmitter.render(templateFlatMap, Map("out" -> op.outPipeName, "in" -> op.inPipeName, "expr" -> expr, "class" -> className))
else
render(Map("out" -> op.outPipeName, "in" -> op.inPipeName, "expr" -> expr, "class" -> className))
}
def emitForeachExpr(ctx: CodeGenContext, op: Foreach, aggr: Boolean): String = {
// we need to know if the generator contains flatten on tuples or on bags (which require flatMap)
val requiresPlainFlatten = op.asInstanceOf[Foreach].containsFlatten(onBag = false)
val requiresFlatMap = op.containsFlatten(onBag = true)
op.generator match {
case gl @ GeneratorList(expr) => {
if (requiresFlatMap)
emitBagFlattenGenerator(CodeGenContext(ctx, Map("schema" -> op.inputSchema)), op, expr)
else {
if (aggr) {
val e = new StreamAccumulateEmitter
val acc = new Accumulate(op.outputs.head, op.inputs.head, gl)
acc.constructSchema
e.code(ctx, acc)
} else
emitGenerator(CodeGenContext(ctx, Map("schema" -> op.inputSchema)), expr)
}
}
case GeneratorPlan(plan) => {
val subPlan = op.subPlan.get
emitNestedPlan(ctx, parent = op, plan = subPlan, aggr)
}
}
}
/**
* Generates Scala code for a nested plan, i.e. statements within nested FOREACH.
*
* @param parent the parent FOREACH statement
* @param plan the dataflow plan representing the nested statements
* @param aggr generate clause contains aggregates
* @return the generated code
*/
def emitNestedPlan(ctx: CodeGenContext, parent: PigOperator, plan: DataflowPlan, aggr: Boolean): String = {
val schema = parent.inputSchema
require(parent.schema.isDefined)
val className = ScalaEmitter.schemaClassName(parent.schema.get.className)
"{\n" + plan.operators.map {
case n @ Generate(expr) =>
if (aggr) {
val e = new StreamAccumulateEmitter
val acc = new Accumulate(n.outputs.head, n.inputs.head, GeneratorList(expr))
acc.constructSchema
e.code(ctx, acc)
} else s"""${className}(${emitGenerator(CodeGenContext(ctx, Map("schema" -> schema, "namedRef" -> true)), expr)})"""
case n @ ConstructBag(out, ref) => ref match {
case DerefTuple(r1, r2) => {
// there are two options of ConstructBag
// 1. r1 refers to the input pipe of the outer operator (for automatically
// inserted ConstructBag operators)
if (r1.toString == parent.inPipeName) {
val pos = findFieldPosition(schema, r2)
// println("pos = " + pos)
s"""val ${n.outPipeName} = t._$pos.toList"""
} else {
// 2. r1 refers to a field in the schema
val p1 = findFieldPosition(schema, r1)
val p2 = findFieldPosition(ScalaEmitter.tupleSchema(schema, r1), r2)
// println("pos2 = " + p1 + ", " + p2)
s"""val ${n.outPipeName} = t._$p1.map(l => l._$p2).toList"""
}
}
case _ => "" // should not happen
}
case n @ Distinct(out, in, windowMode) => s"""val ${n.outPipeName} = ${n.inPipeName}.distinct"""
case n @ Filter(out, in, pred, windowMode) => {
val e = new StreamFilterEmitter
e.code(ctx, n)
}
case OrderBy(out, in, orderSpec, windowMode) => throw CodeGenException("nested ORDER BY not implemented")
case _ => ""
}.mkString("\n") + "}"
}
def containsAggregates(gen: ForeachGenerator): Boolean = {
var exprs = gen match {
case GeneratorList(expr) => expr
case GeneratorPlan(plan) => plan.last.asInstanceOf[Generate].exprs
}
exprs.foreach { e =>
e.expr match {
case Func(f, _) => UDFTable.findFirstUDF(f) match {
case Some(udf) if udf.isAggregate => return true
case _ =>
}
case _ =>
}
}
return false
}
}
object StreamForeachEmitter {
lazy val instance = new StreamForeachEmitter
}
|
package io.opensphere.heatmap;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Polygon;
import java.awt.RadialGradientPaint;
import java.awt.image.BufferedImage;
import org.jdesktop.swingx.image.AbstractFilter;
import org.jdesktop.swingx.image.GaussianBlurFilter;
/** Creates heat maps. */
public class HeatmapCreator
{
/**
* Creates a buffered image from the model.
*
* @param model the model
* @param options the options
* @return the buffered image
*/
public BufferedImage createImage(HeatmapModel model, HeatmapOptions options)
{
int width = model.getImageSize().width;
int height = model.getImageSize().height;
BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_4BYTE_ABGR);
Graphics2D g = (Graphics2D)image.getGraphics();
double altitude = model.getMapManager().getStandardViewer().getAltitude() / 1000;
double sizeAdjustment = 1 + Math.pow(altitude, .4) / 25.0;
int size = (int)(options.getSize() * sizeAdjustment) / 10;
combineGradients(model, g, size, options.getIntensity());
mapToGradient(image, options.getGradient().getGradients());
BufferedImage blur = blur(image);
return blur;
}
/**
* @param model
* @param options
* @param g
*/
public void drawCircles(HeatmapModel model, HeatmapOptions options, Graphics2D g)
{
// Render points
int radius = options.getSize();
int diameter = radius * 2;
model.forEachValue((Point coord, Integer count) ->
{
g.setPaint(Color.BLACK);
g.setStroke(new BasicStroke(5, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND));
g.drawOval(coord.x - radius, coord.y - radius, diameter, diameter);
g.drawLine(coord.x, coord.y - radius, coord.x, coord.y + radius);
g.drawLine(coord.x-radius, coord.y, coord.x + radius, coord.y);
g.setPaint(Color.WHITE);
g.setStroke(new BasicStroke(3, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND));
g.drawOval(coord.x - radius, coord.y - radius, diameter, diameter);
g.drawLine(coord.x, coord.y - radius, coord.x, coord.y + radius);
g.drawLine(coord.x-radius, coord.y, coord.x + radius, coord.y);
g.setPaint(Color.RED);
g.setStroke(new BasicStroke(1, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND));
g.drawOval(coord.x - radius, coord.y - radius, diameter, diameter);
g.drawLine(coord.x, coord.y - radius, coord.x, coord.y + radius);
g.drawLine(coord.x-radius, coord.y, coord.x + radius, coord.y);
});
}
/**
* Paints a gradient around each pixel, combining them additively to produce
* a new image representing pixel values.
*
* @param model the model
* @param g the image graphics
* @param size the size of the element boundary used to generate
* intersections.
* @param intensity the number of points needed for the maximum intensity.
*/
private void combineGradients(HeatmapModel model, Graphics2D g, float size, int intensity)
{
int radius = (int)size;
int diameter = radius * 2;
float[] gradientFractions = { 0f, 1f };
Color[] gradientColors = new Color[] { null, Color.BLACK };
double maxCount = intensity;
// composite function: just keep adding color
g.setComposite(new AddComposite());
// Render points
model.forEachValue((Point coord, Integer count) ->
{
double percent = count.intValue() / maxCount;
gradientColors[0] = new Color(rgbFromPercent(percent));
g.setPaint(new RadialGradientPaint(coord, size, gradientFractions, gradientColors));
g.fillOval(coord.x - radius, coord.y - radius, diameter, diameter);
});
// Render lines & polygons
if (!model.getPolylines().isEmpty() || !model.getPolygons().isEmpty())
{
final double lowColorPercent = 1.05 - .1 * (intensity / 50.0);
Color shapeColor = new Color(rgbFromPercent(lowColorPercent));
g.setColor(shapeColor);
for (Polygon polygon : model.getPolygons())
{
g.fillPolygon(polygon);
}
g.setStroke(new BasicStroke(size, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND));
for (Polygon polygon : model.getPolylines())
{
g.drawPolyline(polygon.xpoints, polygon.ypoints, polygon.npoints);
}
}
}
/**
* Maps the value of each pixel to a gradient color and updates the image.
*
* @param image the image
* @param gradient the gradient
*/
private void mapToGradient(BufferedImage image, int[] gradient)
{
for (int y = 0, h = image.getHeight(); y < h; y++)
{
for (int x = 0, w = image.getWidth(); x < w; x++)
{
int rgb = image.getRGB(x, y);
if (rgb > 0)
{
double percent = percentFromRgb(rgb);
int color = getColor(percent, gradient);
image.setRGB(x, y, color);
}
}
}
}
/**
* Blurs the image using a filter.
*
* @param image the image to blur.
* @return a new {@link BufferedImage} in which the contents of the supplied
* image have been blurred.
*/
private BufferedImage blur(BufferedImage image)
{
BufferedImage filteredImage = new BufferedImage(image.getWidth(), image.getHeight(), BufferedImage.TYPE_4BYTE_ABGR);
AbstractFilter filter = new GaussianBlurFilter(8);
filteredImage = filter.filter(image, filteredImage);
return filteredImage;
}
/**
* Converts a percent into an rgb value.
*
* @param percent the percent
* @return the rgb value
*/
private static int rgbFromPercent(double percent)
{
return (int)Math.round(0xFFFFFF * percent);
}
/**
* Converts an rgb value into a percent.
*
* @param rgb the rgb value
* @return the percent
*/
private static double percentFromRgb(int rgb)
{
return (rgb & 0x00FFFFFF) / (double)0xFFFFFF;
}
/**
* Gets the color for the given percent and gradients.
*
* @param percent the percent
* @param gradient the gradients
* @return the color in rgb
*/
private static int getColor(double percent, int[] gradient)
{
int gradientIndex = (int)(gradient.length * percent);
int color1 = gradient[Math.min(gradientIndex, gradient.length - 1)];
/* Interpolate between the two colors that surround the value. We'll
* leave the first three indices alone since they produce a pink
* artifact with the current algorithm. */
if (gradientIndex > 2 && gradientIndex < gradient.length - 1)
{
int color2 = gradient[gradientIndex + 1];
double binPercent = gradientIndex / (double)gradient.length;
double nextBinPercent = (gradientIndex + 1) / (double)gradient.length;
color1 = interpolateColor(color1, color2, percent - binPercent, nextBinPercent - binPercent);
}
return color1;
}
/**
* Interpolates between two colors.
*
* @param color1 the first color
* @param color2 the second color
* @param step the step?
* @param max the max?
* @return the interpolated color
*/
private static int interpolateColor(int color1, int color2, double step, double max)
{
final long alphaMask = 0xFF000000L;
long a1 = (color1 & alphaMask) >> 24;
int r1 = (color1 & 0xFF0000) >> 16;
int g1 = (color1 & 0xFF00) >> 8;
int b1 = color1 & 0xFF;
long a2 = (color2 & alphaMask) >> 24;
int r2 = (color2 & 0xFF0000) >> 16;
int g2 = (color2 & 0xFF00) >> 8;
int b2 = color2 & 0xFF;
int a3 = (int)Math.floor(interpolate(a1, a2, step, max)) & 0xFF;
int r3 = (int)Math.floor(interpolate(r1, r2, step, max)) & 0xFF;
int g3 = (int)Math.floor(interpolate(g1, g2, step, max)) & 0xFF;
int b3 = (int)Math.floor(interpolate(b1, b2, step, max)) & 0xFF;
int toReturn = ((a3 << 8 | r3) << 8 | g3) << 8 | b3;
return toReturn;
}
/**
* Interpolates between two values.
*
* @param begin the first value
* @param end the second value
* @param step the step?
* @param max the max?
* @return the interpolated value
*/
private static double interpolate(long begin, long end, double step, double max)
{
long diff = end > begin ? end - begin : begin - end;
double multiplier = Math.abs(step / max);
double interpolateBy = diff * multiplier;
if (interpolateBy > diff)
{
return end > begin ? end - diff >> 1 : end + diff >> 1;
}
else
{
return end > begin ? begin + interpolateBy : end + interpolateBy;
}
}
}
|
<?php
// Declaring variables
$name = $_POST['name'];
$email = $_POST['email'];
// Establishing a connection to the database
$conn = mysqli_connect("localhost", "username", "password", "dbname");
if($conn === false){
die("ERROR: Could not connect. " . mysqli_connect_error());
}
// Attempt insert query execution
$sql = "INSERT INTO users (name, email) VALUES ('$name', '$email')";
if(mysqli_query($conn, $sql)){
echo "Records inserted successfully.";
} else{
echo "ERROR: Could not execute $sql. " . mysqli_error($conn);
}
// Close connection
mysqli_close($conn);
?>
|
from typing import Dict
import re
def count_word_occurrences(file_path: str) -> Dict[str, int]:
stop_words = {"the", "and", "is", "it", "a", "an", "in", "on", "at", "to", "of", "for", "with", "as"}
word_counts = {}
with open(file_path, 'r') as file:
text = file.read().lower()
words = re.findall(r'\b\w+\b', text)
for word in words:
if word not in stop_words:
word_counts[word] = word_counts.get(word, 0) + 1
return word_counts
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.