text stringlengths 1 1.05M |
|---|
<reponame>phedny/Mammon<gh_stars>1-10
package org.mammon.math;
/**
* This interface can be used for generic typing, to bind classes of an actual
* implementation compile-time.
*/
public interface FiniteField<F extends FiniteField<F>> {
/**
* @return the zero element of this finite field.
*/
Element<F> getZero();
/**
* @return the unit (one) element of this finite field.
*/
Element<F> getOne();
/**
* @return a random element from this finite field.
*/
Element<F> getRandomElement();
/**
* This interface represents an element from a finite field.
*/
public interface Element<F extends FiniteField<F>> {
/**
* @return the finite field this element belongs to.
*/
F getFiniteField();
/**
* @param other
* an element from the same finite field to add together.
* @return the sum of this element and the other element.
*/
Element<F> add(Element<F> other);
/**
* @return the opposite of this element.
*/
Element<F> getOpposite();
/**
* @param other
* an element from the same finite field to multiply
* together.
* @return the product of this element and the other element.
*/
Element<F> multiply(Element<F> other);
/**
* @return the inverse of this element.
*/
Element<F> getInverse();
/**
* @param exponent
* an element from the same finite field to use as exponent.
* @return the exponentiation of this element by the exponent.
*/
FiniteField.Element<F> exponentiate(FiniteField.Element<F> other);
/**
*
* @param <G>
* A generic <code>Group</code>
* @param An
* element of the <code>Group</code> which gets raised the
* <code>this</code> power.
* @return the exponentiation of <code>groupElement</code> to
* <code>this</code>.
*/
<G extends Group<G>> Group.Element<G> raise(Group.Element<G> groupElement);
}
}
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.event.feed.windows;
import brooklyn.event.AttributeSensor;
import brooklyn.event.feed.PollConfig;
import com.google.common.base.Function;
import com.google.common.base.Functions;
public class WindowsPerformanceCounterPollConfig<T> extends PollConfig<Object, T, WindowsPerformanceCounterPollConfig<T>>{
private String performanceCounterName;
@SuppressWarnings({ "unchecked", "rawtypes" })
public WindowsPerformanceCounterPollConfig(AttributeSensor<T> sensor) {
super(sensor);
onSuccess((Function)Functions.identity());
}
public WindowsPerformanceCounterPollConfig(WindowsPerformanceCounterPollConfig<T> other) {
super(other);
this.performanceCounterName = other.performanceCounterName;
}
public String getPerformanceCounterName() {
return performanceCounterName;
}
public WindowsPerformanceCounterPollConfig<T> performanceCounterName(String val) {
this.performanceCounterName = val; return this;
}
@Override
public String toString() {
return "windowsPerformanceCounter["+performanceCounterName+"]";
}
}
|
<gh_stars>10-100
import * as AssertionError from "assertion-error";
import * as ts from "ts-morph";
import { ClassDeclarations } from "./ClassDeclarations";
import { Exports } from "./Exports";
import { Imports } from "./Imports";
export class SourceFile {
constructor(private _node: ts.SourceFile) {}
/** The class declarations for the source file. */
get classes(): ClassDeclarations {
return new ClassDeclarations(this._node.getClasses());
}
/** The export interfaces for the source file. */
get exports(): Exports {
return new Exports(this._node.getExportedDeclarations());
}
/** The imports for the source file. */
get imports(): Imports {
return new Imports(this._node.getImportDeclarations());
}
/** The underlying AST node. */
get node(): ts.SourceFile {
return this._node;
}
/** Assert the file path of the source file matches the expected value. */
filePath(expected: string | RegExp, msg = "Unexpected file path."): this {
const actual = this._node.getFilePath();
if (!actual.match(expected)) {
throw new AssertionError(
msg,
{
actual,
expected,
showDiff: false,
},
this.filePath
);
}
return this;
}
}
|
def longest_word(string):
word_list = string.split()
lengths = [len(word) for word in word_list]
return max(lengths) |
<filename>src/pages/Category/Category.js
/*
* @Author: liuyr
* @Date: 2019-09-11 15:16:42
* @Last Modified by: liuyr
* @Last Modified time: 2019-09-14 11:18:01
*/
import React, { PureComponent } from 'react'
import { connect } from 'dva';
import styles from './Category.less';
import { Button, Table, Modal, message } from 'antd';
import config from '@/utils/config';
import CategoryForm from './CategoryForm'
const { confirm } = Modal;
@connect(state => state.category)
class Category extends PureComponent {
state = {
// 批量删除的数组
ids: [],
// 模态框的标题
modalTitle: '添加分类信息',
// 模态框的显示与隐藏
visible: false,
// 表单内设置的数据
category: {}
}
/**
* 组件渲染完毕调用的钩子函数
* @memberof Category
*/
componentDidMount() {
// 获取数据
this.props.dispatch({
type: "category/getCategoryDataA"
});
}
/**
* 单个删除类型
* @param {number} id 单个删除的id
* @returns void
* @memberof Category
*/
toDelete = (id) => {
// id就是要删除的对象的id
// 分发异步action进行删除
const t = this;
confirm({
title: '是否确定删除?',
content: '',
okText: '确定',
okType: 'danger',
cancelText: '取消',
onOk() {
t.props.dispatch({
type: 'category/deleteCategoryByIdA',
payload: { id }
});
},
onCancel() {
console.log('Cancel');
},
});
}
/**
* 批量删除类型
* @returns void
* @memberof Category
*/
toBatchDelete = () => {
const { ids } = this.state;
if (ids.length > 0) {
// 询问是否删除,是,发送异步action
const t = this;
confirm({
title: '是否确定批量删除?',
content: '',
okText: '确定',
okType: 'danger',
cancelText: '取消',
onOk() {
t.props.dispatch({
type: 'category/batchDeleteCategoryA',
payload: { ids: ids.join(',') }
});
},
onCancel() {
console.log('Cancel');
},
});
} else {
// 提示用户选择
message.warning('请选择要删除的数据!');
}
}
/**
* 添加类型
* @returns void
* @memberof Category
*/
toAdd = () => {
// 修改模态框的标题,显示模态框。清空表单数据
this.setState({
modalTitle: '添加分类信息',
visible: true,
category: {}
});
}
/**
* 修改类型
* @param {Object} record 修改的顾客对象
* @returns void
* @memberof Category
*/
toUpdate = (record) => {
// 修改模态框的标题,显示模态框。填充表单数据
this.setState({
modalTitle: '修改分类信息',
visible: true,
category: record
});
}
/**
* 模态框的点击确定
* @param {Object} e 事件对象
* @returns void
* @memberof Category
*/
handleOk = (e) => {
e.preventDefault();
this.form.validateFieldsAndScroll((err, values) => {
if (!err) {
// console.log('表单的数据是:', values);
// 分发异步action进行保存
if (this.state.modalTitle === '添加分类信息') {
this.props.dispatch({
type: 'category/saveCategoryA',
payload: values
});
} else {
this.props.dispatch({
type: 'category/updateCategoryA',
payload: values
});
}
this.handleCancel();
}
});
}
/**
* 模态框的点击取消
* @returns void
* @memberof Category
*/
handleCancel = () => {
this.setState({
visible: false
})
}
/**
* 将表单暴露,只有表单暴露了,才能在当前父组件中进行提交
* @param {Object} form 表单对象
* @returns void
* @memberof Category
*/
formRef = (form) => {
this.form = form;
}
/**
* 渲染组件
* @param {Object} form 表单对象
* @returns void
* @memberof Category
*/
render() {
const { modalTitle, visible, category } = this.state;
const t = this;
const rowSelection = {
onChange: (selectedRowKeys) => {
// selectedRowKeys是一个数组,是用户在表格中选中的复选框那一行的id值
// console.log(selectedRowKeys);
t.setState({
ids: selectedRowKeys
})
}
};
const columns = [{
title: '编号',
dataIndex: 'id',
align: 'center'
}, {
title: '名称',
dataIndex: 'name',
align: 'center',
}, {
title: '数量',
dataIndex: 'num',
align: 'center',
}, {
title: '父类型ID',
dataIndex: 'parentId',
align: 'center',
}, {
title: '操作',
dataIndex: '',
align: 'center',
width: 180,
render: (text, record) => {
return (
<div>
<Button type="dashed" onClick={this.toUpdate.bind(this, record)}>修改</Button>
<Button
type="link"
onClick={this.toDelete.bind(this, record.id)}
>删除</Button>
</div>
);
}
}];
return (
<div className={styles.category}>
<div>
<Button type="primary" onClick={this.toAdd}>添加</Button>
<Button type="danger" onClick={this.toBatchDelete}>批量删除</Button>
<Button>导出</Button>
</div>
<div className={styles.tableDiv}>
<Table
bordered
size="small"
rowKey="id"
rowSelection={rowSelection}
columns={columns}
dataSource={this.props.categoryData} />
</div>
{/* 模态框 */}
<Modal
title={modalTitle}
visible={visible}
onOk={this.handleOk}
onCancel={this.handleCancel}
>
{/* 表单组件 */}
<CategoryForm
category={category}
ref={this.formRef}
></CategoryForm>
</Modal>
</div>
)
}
}
export default Category;
|
<reponame>sbcgua/abapmerge
import PackageInfo from "../package.json";
export default class AbapmergeMarker {
public render(): string {
return `
****************************************************
INTERFACE lif_abapmerge_marker.
* abapmerge ${ PackageInfo.version } - ${ new Date().toJSON() }
ENDINTERFACE.
****************************************************`;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.timestamp = exports.write = exports.read = exports.parsePoData = exports.poGenerator = void 0;
var _fs = require("fs");
var _child_process = require("child_process");
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
const poGenerator = obj => Object.keys(obj).map(key => `msgctxt "${key}"\nmsgid "${obj[key]}"\nmsgstr ""\n`).join('\n');
exports.poGenerator = poGenerator;
const parsePoData = data => {
const split = data.split('\n\n');
const parsed = split.map(chunk => {
const item = {
msgctxt: '',
msgid: '',
msgstr: ''
};
let lastkey = ''
chunk.split('\n').forEach(line => {
const res = line.match(/^(\w+) "(.*)"$/);
if (res) {
const [, key, value] = res;
lastkey = key
item[key] = value;
} else {
if (line != '') {
item[lastkey] += line.replace(/"/g, "");
}
}
});
return item;
});
return parsed.map(item => ({
[item.msgctxt]: item.msgstr || item.msgid
})).reduce((acc, curr) => _objectSpread({}, acc, curr), {});
};
exports.parsePoData = parsePoData;
const read = file => (0, _fs.readFileSync)(file, 'utf8');
exports.read = read;
const write = (file, content) => (0, _fs.writeFileSync)(file, content, {
encoding: 'utf8'
});
exports.write = write;
const timestamp = () => {
const res = (0, _child_process.execSync)('git log --format=%ct -n 1', {
encoding: 'utf8'
}).trim();
return parseInt(res, 10);
};
exports.timestamp = timestamp;
|
<reponame>automation555/rumble
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Authors: <NAME>, <NAME>
*
*/
package org.rumbledb.expressions.postfix;
import org.rumbledb.compiler.VisitorConfig;
import org.rumbledb.exceptions.ExceptionMetadata;
import org.rumbledb.exceptions.OurBadException;
import org.rumbledb.expressions.AbstractNodeVisitor;
import org.rumbledb.expressions.ExecutionMode;
import org.rumbledb.expressions.Expression;
import org.rumbledb.expressions.Node;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
public class DynamicFunctionCallExpression extends Expression {
private static final long serialVersionUID = 1L;
private Expression mainExpression;
private List<Expression> arguments;
public DynamicFunctionCallExpression(
Expression mainExpression,
List<Expression> arguments,
ExceptionMetadata metadata
) {
super(metadata);
if (mainExpression == null) {
throw new OurBadException("Main expression cannot be null in a postfix expression.");
}
this.mainExpression = mainExpression;
this.arguments = arguments;
if (this.arguments == null) {
this.arguments = new ArrayList<>();
}
}
public List<Expression> getArguments() {
return this.arguments;
}
@Override
public List<Node> getChildren() {
List<Node> result = new ArrayList<>();
result.add(this.mainExpression);
result.addAll(this.arguments.stream().filter(arg -> arg != null).collect(Collectors.toList()));
return result;
}
/**
* DynamicFunctionCall is always locally evaluated as execution mode cannot be determined at static analysis phase.
* This behavior is different from all other postfix extensions, hence no override is required.
*/
@Override
public <T> T accept(AbstractNodeVisitor<T> visitor, T argument) {
return visitor.visitDynamicFunctionCallExpression(this, argument);
}
public Expression getMainExpression() {
return this.mainExpression;
}
public void print(StringBuffer buffer, int indent) {
for (int i = 0; i < indent; ++i) {
buffer.append(" ");
}
buffer.append(getClass().getSimpleName());
buffer.append(" | " + this.highestExecutionMode);
buffer.append(" | " + (this.inferredSequenceType == null ? "not set" : this.inferredSequenceType));
buffer.append("\n");
for (Expression arg : this.arguments) {
if (arg == null) {
for (int i = 0; i < indent; ++i) {
buffer.append(" ");
}
buffer.append("?\n");
} else {
arg.print(buffer, indent + 1);
}
}
}
@Override
public void serializeToJSONiq(StringBuffer sb, int indent) {
indentIt(sb, indent);
this.mainExpression.serializeToJSONiq(sb, indent + 1);
// TODO always ending with \n might be an issue here
sb.append("(");
if (this.arguments != null) {
for (int i = 0; i < this.arguments.size(); i++) {
this.arguments.get(i).serializeToJSONiq(sb, 0);
if (i == this.arguments.size() - 1) {
sb.append(") ");
} else {
sb.append(", ");
}
}
}
sb.append(")\n");
}
@Override
public void initHighestExecutionMode(VisitorConfig visitorConfig) {
if (this.arguments.size() == 0) {
this.highestExecutionMode = ExecutionMode.LOCAL;
return;
}
this.highestExecutionMode = this.arguments.get(0).getHighestExecutionMode(visitorConfig);
}
}
|
#!/bin/bash
if [ x"$1" = x ]; then
echo "请输入分支参数"
exit 1
fi
echo "git pull origin $1"
git pull origin $1
echo "build..."
go build
export GIN_MODE=release
echo "重启服务"
pm2 restart zhuque
pm2 logs
|
#!/bin/bash
set -eo pipefail -o nounset
if [[ -z $(conda info --envs | grep "*" | grep -o "\/.*") ]]; then
export CONDA_ROOT=$(conda info --root)
env_dir=$CONDA_ROOT
export RECIPE_DIR=$CONDA_ROOT/share/ggd/Homo_sapiens/hg19/hg19-protein-coding-features-ensembl-v1/1
elif [[ $(conda info --envs | grep "*" | grep -o "\/.*") == "base" ]]; then
export CONDA_ROOT=$(conda info --root)
env_dir=$CONDA_ROOT
export RECIPE_DIR=$CONDA_ROOT/share/ggd/Homo_sapiens/hg19/hg19-protein-coding-features-ensembl-v1/1
else
env_dir=$(conda info --envs | grep "*" | grep -o "\/.*")
export CONDA_ROOT=$env_dir
export RECIPE_DIR=$env_dir/share/ggd/Homo_sapiens/hg19/hg19-protein-coding-features-ensembl-v1/1
fi
PKG_DIR=`find "$CONDA_SOURCE_PREFIX/pkgs/" -name "$PKG_NAME-$PKG_VERSION*" | grep -v ".tar.bz2" | grep "$PKG_VERSION.*$PKG_BUILDNUM$"`
if [ -d $RECIPE_DIR ]; then
rm -r $RECIPE_DIR
fi
mkdir -p $RECIPE_DIR
(cd $RECIPE_DIR && bash $PKG_DIR/info/recipe/recipe.sh)
cd $RECIPE_DIR
## Iterate over new files and replace file name with data package name and data version
for f in *; do
ext="${f#*.}"
filename="{f%%.*}"
if [[ ! -f "hg19-protein-coding-features-ensembl-v1.$ext" ]]
then
(mv $f "hg19-protein-coding-features-ensembl-v1.$ext")
fi
done
## Add environment variables
#### File
if [[ `find $RECIPE_DIR -type f -maxdepth 1 | wc -l | sed 's/ //g'` == 1 ]] ## If only one file
then
recipe_env_file_name="ggd_hg19-protein-coding-features-ensembl-v1_file"
recipe_env_file_name="$(echo "$recipe_env_file_name" | sed 's/-/_/g' | sed 's/\./_/g')"
file_path="$(find $RECIPE_DIR -type f -maxdepth 1)"
elif [[ `find $RECIPE_DIR -type f -maxdepth 1 | wc -l | sed 's/ //g'` == 2 ]] ## If two files
then
indexed_file=`find $RECIPE_DIR -type f \( -name "*.tbi" -or -name "*.fai" -or -name "*.bai" -or -name "*.crai" -or -name "*.gzi" \) -maxdepth 1`
if [[ ! -z "$indexed_file" ]] ## If index file exists
then
recipe_env_file_name="ggd_hg19-protein-coding-features-ensembl-v1_file"
recipe_env_file_name="$(echo "$recipe_env_file_name" | sed 's/-/_/g' | sed 's/\./_/g')"
file_path="$(echo $indexed_file | sed 's/\.[^.]*$//')" ## remove index extension
fi
fi
#### Dir
recipe_env_dir_name="ggd_hg19-protein-coding-features-ensembl-v1_dir"
recipe_env_dir_name="$(echo "$recipe_env_dir_name" | sed 's/-/_/g' | sed 's/\./_/g')"
activate_dir="$env_dir/etc/conda/activate.d"
deactivate_dir="$env_dir/etc/conda/deactivate.d"
mkdir -p $activate_dir
mkdir -p $deactivate_dir
echo "export $recipe_env_dir_name=$RECIPE_DIR" >> $activate_dir/env_vars.sh
echo "unset $recipe_env_dir_name">> $deactivate_dir/env_vars.sh
#### File
## If the file env variable exists, set the env file var
if [[ ! -z "${recipe_env_file_name:-}" ]]
then
echo "export $recipe_env_file_name=$file_path" >> $activate_dir/env_vars.sh
echo "unset $recipe_env_file_name">> $deactivate_dir/env_vars.sh
fi
echo 'Recipe successfully built!'
|
import * as React from 'react'
import { connect } from 'react-redux'
import {
FormGroup,
InputField,
Tooltip,
useHoverTooltip,
UseHoverTooltipTargetProps,
} from '@opentrons/components'
import { getWellsDepth } from '@opentrons/shared-data'
import {
getIsTouchTipField,
getIsDelayPositionField,
} from '../../../../form-types'
import { i18n } from '../../../../localization'
import { selectors as stepFormSelectors } from '../../../../step-forms'
import stepFormStyles from '../../StepEditForm.css'
import styles from './TipPositionInput.css'
import { TipPositionModal } from './TipPositionModal'
import { getDefaultMmFromBottom } from './utils'
import { BaseState } from '../../../../types'
import { FieldProps } from '../../types'
interface OP extends FieldProps {
labwareId?: string | null
className?: string
}
interface SP {
mmFromBottom: number | null
wellDepthMm: number
}
type Props = OP & SP
function TipPositionInput(props: Props): JSX.Element {
const [isModalOpen, setModalOpen] = React.useState(false)
const handleOpen = (): void => {
if (props.wellDepthMm) {
setModalOpen(true)
}
}
const handleClose = (): void => {
setModalOpen(false)
}
const {
disabled,
name,
mmFromBottom,
tooltipContent,
wellDepthMm,
updateValue,
isIndeterminate,
} = props
const isTouchTipField = getIsTouchTipField(name)
const isDelayPositionField = getIsDelayPositionField(name)
let value: number | string = ''
if (wellDepthMm !== null) {
// show default value for field in parens if no mmFromBottom value is selected
value =
mmFromBottom !== null
? mmFromBottom
: getDefaultMmFromBottom({ name, wellDepthMm })
}
const [targetProps, tooltipProps] = useHoverTooltip()
return (
<>
<Tooltip {...tooltipProps}>{tooltipContent}</Tooltip>
{isModalOpen && (
<TipPositionModal
name={name}
closeModal={handleClose}
wellDepthMm={wellDepthMm}
mmFromBottom={mmFromBottom}
updateValue={updateValue}
isIndeterminate={isIndeterminate}
/>
)}
<Wrapper
targetProps={targetProps}
disabled={disabled}
isTouchTipField={isTouchTipField}
isDelayPositionField={isDelayPositionField}
>
<InputField
disabled={disabled}
className={props.className || stepFormStyles.small_field}
readOnly
onClick={handleOpen}
value={String(value)}
isIndeterminate={isIndeterminate}
units={i18n.t('application.units.millimeter')}
id={`TipPositionField_${name}`}
/>
</Wrapper>
</>
)
}
interface WrapperProps {
isTouchTipField: boolean
isDelayPositionField: boolean
children: React.ReactNode
disabled: boolean
targetProps: UseHoverTooltipTargetProps
}
const Wrapper = (props: WrapperProps): JSX.Element =>
props.isTouchTipField || props.isDelayPositionField ? (
<div {...props.targetProps}>{props.children}</div>
) : (
<span {...props.targetProps}>
<FormGroup
label={i18n.t('form.step_edit_form.field.tip_position.label')}
disabled={props.disabled}
className={styles.well_order_input}
>
{props.children}
</FormGroup>
</span>
)
const mapSTP = (state: BaseState, ownProps: OP): SP => {
const { labwareId, value } = ownProps
let wellDepthMm = 0
if (labwareId != null) {
const labwareDef = stepFormSelectors.getLabwareEntities(state)[labwareId]
.def
// NOTE: only taking depth of first well in labware def, UI not currently equipped for multiple depths
const firstWell = labwareDef.wells['A1']
if (firstWell) wellDepthMm = getWellsDepth(labwareDef, ['A1'])
}
return {
wellDepthMm,
mmFromBottom: typeof value === 'number' ? value : null,
}
}
export const TipPositionField = connect(mapSTP, () => ({}))(TipPositionInput)
|
#!/bin/bash
set -euo pipefail
if [[ -z ${1+x} ]];
then
echo 'version number required'
exit 1
else
VERSION=$1
fi
if [[ -z ${3+x} ]];
then
echo 'AWS Profile required'
exit 1
else
PROFILE=$3
fi
function releaseToRegion {
version=$1
region=$2
profile=$3
bucket="aws-lambda-r-runtime.$region"
echo "publishing layers to region $region"
sam package \
--output-template-file packaged.yaml \
--s3-bucket ${bucket} \
--s3-prefix R-${version} \
--region ${region} \
--profile ${profile}
version_="${version//\./_}"
stack_name=r-${version//\./-}
sam deploy \
--template-file packaged.yaml \
--stack-name ${stack_name} \
--parameter-overrides Version=${version_} \
--no-fail-on-empty-changeset \
--region ${region} \
--profile ${profile} \
--capabilities CAPABILITY_IAM
layers=(runtime recommended awspack)
echo "Published layers:"
aws cloudformation describe-stack-resources \
--stack-name ${stack_name} \
--query "StackResources[?ResourceType=='AWS::Lambda::LayerVersion'].PhysicalResourceId" \
--region ${region} \
--profile ${profile}
}
regions=(
us-east-1 us-east-2
us-west-1 us-west-2
ap-south-1
ap-northeast-1 ap-northeast-2
ap-southeast-1 ap-southeast-2
ca-central-1
eu-central-1
eu-north-1
eu-west-1 eu-west-2 eu-west-3
sa-east-1
)
for region in "${regions[@]}"
do
releaseToRegion ${VERSION} ${region}
done
|
<reponame>raiden101/eda<gh_stars>0
import React, { Component, Fragment } from "react";
import "./Login.css";
import LoginForm from "./LoginForm/LoginForm";
import { Route, Switch } from "react-router-dom";
class Login extends Component {
componentDidMount() {
localStorage.setItem("auth", "");
}
render() {
return (
<Fragment>
<div className="login">
<Switch>
<Route path={this.props.match.url} exact>
<LoginForm type="user" />
</Route>
<Route path={this.props.match.url + "/admin"}>
<LoginForm type="admin" />
</Route>
</Switch>
</div>
</Fragment>
);
}
}
export default Login;
|
# This shell script executes Slurm jobs for running predictions
# with Justin Salamon's ICASSP 2017 convolutional neural network
# on BirdVox-70k.
# Trial ID: 3.
# Augmentation kind: all-but-noise.
sbatch 040_aug-all-but-noise_test-unit01_trial-3_predict-unit07.sbatch
sbatch 040_aug-all-but-noise_test-unit01_trial-3_predict-unit10.sbatch
sbatch 040_aug-all-but-noise_test-unit01_trial-3_predict-unit01.sbatch
sbatch 040_aug-all-but-noise_test-unit02_trial-3_predict-unit10.sbatch
sbatch 040_aug-all-but-noise_test-unit02_trial-3_predict-unit01.sbatch
sbatch 040_aug-all-but-noise_test-unit02_trial-3_predict-unit02.sbatch
sbatch 040_aug-all-but-noise_test-unit03_trial-3_predict-unit01.sbatch
sbatch 040_aug-all-but-noise_test-unit03_trial-3_predict-unit02.sbatch
sbatch 040_aug-all-but-noise_test-unit03_trial-3_predict-unit03.sbatch
sbatch 040_aug-all-but-noise_test-unit05_trial-3_predict-unit02.sbatch
sbatch 040_aug-all-but-noise_test-unit05_trial-3_predict-unit03.sbatch
sbatch 040_aug-all-but-noise_test-unit05_trial-3_predict-unit05.sbatch
sbatch 040_aug-all-but-noise_test-unit07_trial-3_predict-unit03.sbatch
sbatch 040_aug-all-but-noise_test-unit07_trial-3_predict-unit05.sbatch
sbatch 040_aug-all-but-noise_test-unit07_trial-3_predict-unit07.sbatch
sbatch 040_aug-all-but-noise_test-unit10_trial-3_predict-unit05.sbatch
sbatch 040_aug-all-but-noise_test-unit10_trial-3_predict-unit07.sbatch
sbatch 040_aug-all-but-noise_test-unit10_trial-3_predict-unit10.sbatch
|
package com.devaneios.turmadeelite.controllers;
import com.devaneios.turmadeelite.dto.*;
import com.devaneios.turmadeelite.entities.Teacher;
import com.devaneios.turmadeelite.security.guards.IsManager;
import com.devaneios.turmadeelite.security.guards.IsTeacher;
import com.devaneios.turmadeelite.services.TeacherService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import lombok.AllArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@RestController
@RequestMapping("/api/teachers")
@AllArgsConstructor
public class TeacherController {
private final TeacherService teacherService;
@Operation(summary = "Cadastrar um professor e enviar instruções para realizar o primeiro acesso do mesmo")
@ApiResponses(value = {
@ApiResponse(
responseCode = "201",
description = "Usuário professor criado com sucesso. Enviada instruções de primeiro accesso para o e-mail"
),
@ApiResponse(
responseCode = "409",
description = "E-mail já foi cadastrado"
)
})
@PostMapping
ResponseEntity<?> registerTeacher(@RequestBody TeacherCreateDTO teacherCreateDTO, Authentication authentication){
this.teacherService.createTeacherUser(
teacherCreateDTO.getEmail(),
teacherCreateDTO.getName(),
teacherCreateDTO.getLanguage(),
teacherCreateDTO.getIsActive(),
(String) authentication.getPrincipal());
return new ResponseEntity<>(HttpStatus.CREATED);
}
@Operation(summary = "Listar uma página de professores")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Página de professores retornada com sucesso"
)
})
@IsManager
@GetMapping
ResponseEntity<Page<SchoolUserViewDTO>> getTeacherById(
@RequestParam int size,
@RequestParam int pageNumber,
Authentication authentication
){
Page<Teacher> paginatedTeachers = this.teacherService.getPaginatedTeachers(
size,
pageNumber,
(String) authentication.getPrincipal()
);
Page<SchoolUserViewDTO> response = paginatedTeachers.map(SchoolUserViewDTO::new);
return ResponseEntity.ok(response);
}
@Operation(summary = "Visualizar uma lista de professores buscando pelo nome")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Professores encontrados com sucesso"
),
})
@GetMapping("/name/{name}")
ResponseEntity<List<SchoolUserViewDTO>> getTeachersByNameSimilarity(@PathVariable String name){
List<Teacher> paginatedTeacher = this.teacherService.getTeachersByNameSimilarity(name).orElse(new ArrayList<>());
List<SchoolUserViewDTO> response = paginatedTeacher.stream().map(SchoolUserViewDTO::new).collect(Collectors.toList());
return ResponseEntity.ok(response);
}
@Operation(summary = "Cadastrar um professor e enviar instruções para realizar o primeiro acesso do mesmo")
@ApiResponses(value = {
@ApiResponse(
responseCode = "201",
description = "Usuário professor criado com sucesso. Enviada instruções de primeiro accesso para o e-mail"
),
@ApiResponse(
responseCode = "409",
description = "E-mail já foi cadastrado"
)
})
@IsManager
@PutMapping("/{id}")
ResponseEntity<?> updateTeacher(
@RequestBody TeacherCreateDTO teacherCreateDTO,
@PathVariable Long id,
Authentication authentication
){
this.teacherService.updateTeacherUser(
teacherCreateDTO.getEmail(),
teacherCreateDTO.getName(),
teacherCreateDTO.getLanguage(),
teacherCreateDTO.getIsActive(),
id,
(String) authentication.getPrincipal());
return new ResponseEntity<>(HttpStatus.OK);
}
@Operation(summary = "Encontrar um professor pelo seu id e a escola na qual está associado")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Professor encontrado e retornado com sucesso"
),
@ApiResponse(
responseCode = "404",
description = "Professor com id especificado não encontrado"
),
})
@IsManager
@GetMapping("/{id}")
ResponseEntity<SchoolUserViewDTO> getTeacherById(@PathVariable Long id,Authentication authentication){
Teacher teacher = this.teacherService.findTeacherById(id,(String) authentication.getPrincipal());
return ResponseEntity.ok(new SchoolUserViewDTO(teacher));
}
@Operation(summary = "Encontrar professores da escola através do email ou de parte do email")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Professores encontrados e retornados com sucesso"
),
})
@IsManager
@GetMapping("/email/{email}")
@ResponseBody List<SchoolUserViewDTO> getTeachersByEmail(@PathVariable String email, Authentication authentication){
return this.teacherService
.findTeachersByEmailSubstring(
email,
(String) authentication.getPrincipal())
.stream()
.map(SchoolUserViewDTO::new)
.collect(Collectors.toList());
}
@Operation(summary = "Visualizar uma lista de atividades postadas e entregues por turma")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Atividades postadas e entregues encontradas com sucesso"
),
})
@IsTeacher
@GetMapping("/dash")
ResponseEntity<List<ActivityPostDeliveryDTO>> getPostDeliveryActivities(Authentication authentication){
return ResponseEntity.ok(this.teacherService.getPostDeliveryActivities((String) authentication.getPrincipal()));
}
@Operation(summary = "Visualizar uma lista de alunos ordenada por pontuação")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Pontuações de cada aluno encontradas com sucesso"
),
})
@IsTeacher
@GetMapping("/punctuations")
ResponseEntity<List<StudentPunctuationDTO>> getStudentPunctuations(Authentication authentication){
return ResponseEntity.ok(this.teacherService.getStudentPunctuations((String) authentication.getPrincipal()));
}
@Operation(summary = "Listar a quantidade de atividades postadas por professor")
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "Lista das atividades postadas por professor retornada com sucesso"
)
})
@IsManager
@GetMapping("activities-by-teacher")
ResponseEntity<List<ActivityByTeacherDTO>> getActivitiesByTeacher(Authentication authentication) throws IOException {
return ResponseEntity.ok(this.teacherService.getActivitiesByTeacher((String)authentication.getPrincipal()));
}
}
|
package Chapter1_2Low;
//Exercise 1.2.12
public class SmartDate2 {
@SuppressWarnings("unused")
private final int year;
@SuppressWarnings("unused")
private final int month;
@SuppressWarnings("unused")
private final int day;
private static final int YEARFIRSTTWO = 20;
private static final int DAYPERWEEK = 7;
public SmartDate2(int year, int month, int day) throws Exception {
if (year < 0 || month < 0 || day < 0) {
Exception exception = new Exception("年月日要大于0");
throw exception;
}
if (month > 12) {
Exception exception = new Exception("月份要小于等于12");
throw exception;
}
switch (month) {
case 3:
case 5:
case 7:
case 8:
case 10:
case 12:
case 1: {
if (day > 31) {
Exception exception = new Exception(month + "月小于31号");
throw exception;
}
}
break;
case 2: {
if (day > 29) {
Exception exception = new Exception(month + "月小于31号");
throw exception;
}
int leapYear = year % 4;
if (leapYear != 0) {
if (day > 28) {
throw new Exception(month + "月小于29号");
}
}
}
break;
case 4:
case 6:
case 9:
case 11: {
if (day > 30) {
throw new Exception(month + "月小于30号");
}
}
break;
default:
break;
}
this.day = day;
this.year = year;
this.month = month;
}
public String dayOfTheWeek() {
String resultWeek = "";
int tempMonth = this.month;
int tempYear = this.year;
int tempDay = this.day;
if (this.month == 1 || this.month == 2) {
tempMonth += 12;
tempYear--;
}
int y = tempYear - YEARFIRSTTWO * 100;
int floor1 = (int) Math.floor(y / 4);
int floor2 = (int) (YEARFIRSTTWO / 4);
int floor3 = (int) Math.floor(26 * (tempMonth + 1) / 10);
int w = y + floor1 + floor2 - 2 * YEARFIRSTTWO + floor3 + tempDay - 1;
int key = w % DAYPERWEEK;
if (key < 0) {
key = key + 7;
}
switch (key) {
case 0:
resultWeek = "星期日";
break;
case 1:
resultWeek = "星期一";
break;
case 2:
resultWeek = "星期二";
break;
case 3:
resultWeek = "星期三";
break;
case 4:
resultWeek = "星期四";
break;
case 5:
resultWeek = "星期五";
break;
case 6:
resultWeek = "星期六";
break;
default:
break;
}
return resultWeek;
}
public String toString() {
return "" + month + "/" + day + "/" + year;
}
public static void main(String[] args) throws Exception {
SmartDate2 date = new SmartDate2(2012, 2, 28);
String week = date.dayOfTheWeek();
System.out.println(date + " is: " + week);
}
}
|
#!/bin/sh
rsync -r --checksum --exclude=.DS_Store release/* ../../docs-merge/docs/src/assets/files/device-restore
|
<gh_stars>0
/**
* Classe de gestion des pages du ContactController
*
* <pre>
* Julien 13/02/15 Création
* </pre>
* @author Julien
* @version 1.0
* @package Rubizz
*/
function Contact()
{
// ==== Constructeur ====
} // Default
Contact.prototype = {
/**
* Traitements lancés en fin de chargement de la page
*/
ready: function () {
/* ==== Gestion du menu "pin" ==== */
$(document).on('click', 'div[id^=div_RBZ_pin_], span[id^=span_RBZ_pin_]', function() {
var arraySelector = $(this).attr('id').split('_');
var id = arraySelector[arraySelector.length - 1];
/* ---- Fermeture du menu actuellement ouvert si différent de celui du "pin" cliqué ---- */
$("div[id^=div_RBZ_menu_]").each(function() {
if ($(this).attr('id') != "div_RBZ_menu_" + id) {
if ($(this).hasClass('RBZ_displayed')) {
$(this).removeClass('RBZ_displayed');
$(this).hide();
}
}
});
/* ---- Ouverture et fermeture du menu correspondant au "pin" cliqué ---- */
if ($("#div_RBZ_menu_" + id).hasClass('RBZ_displayed')) {
$("#div_RBZ_menu_" + id).removeClass('RBZ_displayed');
$("#div_RBZ_menu_" + id).hide();
} else {
$("#div_RBZ_menu_" + id).addClass('RBZ_displayed');
$("#div_RBZ_menu_" + id).show();
}
});
$(document).on('click', 'div[id^=div_RBZ_pin2_]', function() {
var arraySelector = $(this).attr('id').split('_');
var id = arraySelector[arraySelector.length - 1];
/* ---- Fermeture du menu actuellement ouvert si différent de celui du "pin" cliqué ---- */
$("div[id^=div_RBZ_menu2_]").each(function() {
if ($(this).attr('id') != "div_RBZ_menu2_" + id) {
if ($(this).hasClass('RBZ_displayed')) {
$(this).removeClass('RBZ_displayed');
$(this).hide();
}
}
});
/* ---- Ouverture et fermeture du menu correspondant au "pin" cliqué ---- */
if ($("#div_RBZ_menu2_" + id).hasClass('RBZ_displayed')) {
$("#div_RBZ_menu2_" + id).removeClass('RBZ_displayed');
$("#div_RBZ_menu2_" + id).hide();
} else {
$("#div_RBZ_menu2_" + id).addClass('RBZ_displayed');
$("#div_RBZ_menu2_" + id).show();
}
});
}, // ready
handleProvider: function () {
$(document).on('click', '#a_RBZ_importGmail, #a_RBZ_importOutlook, #a_RBZ_importYahoo', function() {
var popup = window.open($(this).attr('hrefbis'), '', 'height=400,width=600');
if (window.focus) {newwindow.focus()}
return false;
});
$(document).on('click', '#a_RBZ_importEmail', function() {
goPopup.ajaxPopup($(this).attr('hrefbis'));
return false;
});
$(document).on("click", "#a_RBZ_addContact", function() {
Contact.prototype._contactId++;
var lsProto = $("#div_RBZ_emailsContainer").attr('data-prototype').replace(/__name__/g, Contact.prototype._contactId);
$("#div_RBZ_emailsContainer").append(lsProto);
return false;
});
$(document).on("click", "#button_RBZ_contactSubmit", function() {
$('#form_RBZ_addContact').submit();
return false;
});
/**
* Validation du formulaire dans le popup de contact
*/
$(document).on("submit", "#form_RBZ_addContact", function() {
var loData = new FormData($(this)[0]);
console.log('submit');
$.ajax({
url: $(this).attr('action'),
type: 'POST',
data: loData,
cache: false,
contentType: false,
processData: false,
success: function (psReturn) {
if ('OK' == psReturn) {
$("#div_RBZ_commonPopup").remove();
$("#div_RBZ_commonShadow").hide();
return false;
} else {
$("#div_RBZ_error").text('');
$("#div_RBZ_error").append(psReturn);
}
}
});
return false;
});
},
/* ==== Fermeture de la popup de bienvenue ==== */
closePopupWelcome: function () {
/* ---- Fermeture de la popup (welcome) ---- */
$(document).on('click', '#img_RBZ_welcomePopupClose', function() {
$('#div_RBZ_welcomePopupInside').remove();
return false;
});
},
_contactId: 0
}; // Contact.prototype
//==== Définition de l'objet Contact goContact ====
var goContact = new Contact();
|
# Sets reasonable macOS defaults.
#
# Or, in other words, set shit how I like in macOS.
#
# The original idea (and a couple settings) were grabbed from:
# https://github.com/mathiasbynens/dotfiles/blob/master/.macos
#
# Can be checked on oficial docs as well:
# https://developer.apple.com/documentation/devicemanagement
#
# Run ./set-defaults.sh and you'll be good to go.
# Hide Safari's bookmark bar.
defaults write com.apple.Safari ShowFavoritesBar -bool false
# Set up Safari for development.
defaults write com.apple.Safari IncludeInternalDebugMenu -bool true
defaults write com.apple.Safari IncludeDevelopMenu -bool true
defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true
defaults write com.apple.Safari "com.apple.Safari.ContentPageGroupIdentifier.WebKit2DeveloperExtrasEnabled" -bool true
defaults write NSGlobalDomain WebKitDeveloperExtras -bool true
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode2 -bool true
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint2 -bool true
# Save to disk (not to iCloud) by default
defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false
# Automatically quit printer app once the print jobs complete
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
###############################################################################
# Keyboard & Input #
###############################################################################
# Disable press-and-hold for keys in favor of key repeat.
defaults write -g ApplePressAndHoldEnabled -bool false
# Enable full keyboard access for all controls
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
# Set a blazingly fast keyboard repeat rate
defaults write NSGlobalDomain KeyRepeat -int 1
defaults write NSGlobalDomain InitialKeyRepeat -int 15
# Automatically illuminate built-in MacBook keyboard in low light
defaults write com.apple.BezelServices kDim -bool true
# Turn off keyboard illumination when computer is not used for 5 minutes
defaults write com.apple.BezelServices kDimTime -int 300
# Disable auto-correct
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
###############################################################################
# Trackpad, mouse, Bluetooth accessories #
###############################################################################
# Trackpad: enable tap to click for this user and for the login screen
defaults write com.apple.AppleMultitouchTrackpad Clicking -bool true
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# Trackpad: map bottom right corner to right-click
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadCornerSecondaryClick -int 2
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadRightClick -bool true
defaults -currentHost write NSGlobalDomain com.apple.trackpad.trackpadCornerClickBehavior -int 1
defaults -currentHost write NSGlobalDomain com.apple.trackpad.enableSecondaryClick -bool true
# Trackpad: swipe between pages with three fingers
defaults write NSGlobalDomain AppleEnableSwipeNavigateWithScrolls -bool true
defaults -currentHost write NSGlobalDomain com.apple.trackpad.threeFingerHorizSwipeGesture -int 1
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadThreeFingerHorizSwipeGesture -int 1
# Increase sound quality for Bluetooth headphones/headsets
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" -int 40
###############################################################################
# Screen #
###############################################################################
# Require password immediately after sleep or screen saver begins
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# Save screenshots to the desktop
defaults write com.apple.screencapture location -string "${HOME}/Desktop"
# Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF)
defaults write com.apple.screencapture type -string "png"
# Disable shadow in screenshots
defaults write com.apple.screencapture disable-shadow -bool true
# Enable subpixel font rendering on non-Apple LCDs
defaults write NSGlobalDomain AppleFontSmoothing -int 2
###############################################################################
# Finder #
###############################################################################
# Finder: show hidden files by default
defaults write com.apple.finder AppleShowAllFiles -bool true
# Finder: show all filename extensions
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Finder: show status bar
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar
defaults write com.apple.finder ShowPathbar -bool true
# Finder: allow text selection in Quick Look
defaults write com.apple.finder QLEnableTextSelection -bool true
# Display full POSIX path as Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# Keep folders on top when sorting by name
defaults write com.apple.finder _FXSortFoldersFirst -bool true
# When performing a search, search the current folder by default
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Avoid creating .DS_Store files on network or USB volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true
# Use AirDrop over every interface. srsly this should be a default.
defaults write com.apple.NetworkBrowser BrowseAllInterfaces 1
# Always open everything in Finder's list view. This is important.
defaults write com.apple.Finder FXPreferredViewStyle Nlsv
# Show the ~/Library folder.
chflags nohidden ~/Library
# Set the Finder prefs for showing a few different volumes on the Desktop.
defaults write com.apple.finder ShowExternalHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true
# Always open everything in Finder's list view.
# Use list view in all Finder windows by default
# Four-letter codes for the other view modes: `icnv`, `clmv`, `Flwv`
defaults write com.apple.finder FXPreferredViewStyle -string "Nlsv"
###############################################################################
# Dock #
###############################################################################
# Show indicator lights for open applications in the Dock
defaults write com.apple.dock show-process-indicators -bool true
# Set the icon size of Dock items to 28 pixels
defaults write com.apple.dock tilesize -int 28
# Set the orientation of the dock: bottom, left, right
defaults write com.apple.dock orientation "left"
# Don't show recently used applications in the Dock
defaults write com.Apple.Dock show-recents -bool false
# Enable highlight hover effect for the grid view of a stack (Dock)
defaults write com.apple.dock mouse-over-hilite-stack -bool true
# Change minimize/maximize window effect
defaults write com.apple.dock mineffect -string "scale"
# Minimize windows into their application’s icon
defaults write com.apple.dock minimize-to-application -bool true
###############################################################################
# Mail #
###############################################################################
# Display emails in threaded mode
defaults write com.apple.mail DraftsViewerAttributes -dict-add "DisplayInThreadedMode" -string "yes"
# Copy email addresses as `foo@example.com` instead of `Foo Bar <foo@example.com>` in Mail.app
defaults write com.apple.mail AddressesIncludeNameOnPasteboard -bool false
# Mark all messages as read when opening a conversation
defaults write com.apple.mail ConversationViewMarkAllAsRead -bool true
# Automatically check for new message (not every 5 minutes)
defaults write com.apple.mail AutoFetch -bool true
defaults write com.apple.mail PollTime -string "-1"
# Show most recent message at the top in conversations
defaults write com.apple.mail ConversationViewSortDescending -bool true
###############################################################################
# Calendar #
###############################################################################
# Show week numbers (10.8 only)
defaults write com.apple.iCal "Show Week Numbers" -bool true
# Week starts on monday
defaults write com.apple.iCal "first day of week" -int 1
###############################################################################
# Activity Monitor #
###############################################################################
# Show the main window when launching Activity Monitor
defaults write com.apple.ActivityMonitor OpenMainWindow -bool true
# Show all processes in Activity Monitor
defaults write com.apple.ActivityMonitor ShowCategory -int 0
# Sort Activity Monitor results by CPU usage
defaults write com.apple.ActivityMonitor SortColumn -string "CPUUsage"
defaults write com.apple.ActivityMonitor SortDirection -int 0
###############################################################################
# Software Updates #
###############################################################################
# Enable the automatic update check
defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool true
# Check for software updates daily, not just once per week
defaults write com.apple.SoftwareUpdate ScheduleFrequency -bool true
# Download newly available updates in background
defaults write com.apple.SoftwareUpdate AutomaticDownload -bool true
# Install System data files & security updates
defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -bool true
# Turn on app auto-update
defaults write com.apple.commerce AutoUpdate -bool true
# Allow the App Store to reboot machine on macOS updates
defaults write com.apple.commerce AutoUpdateRestartRequired -bool true
###############################################################################
# Kill affected applications #
###############################################################################
for app in "Activity Monitor" \
"Calendar" \
"Dock" \
"Finder"; do
killall "${app}" &>/dev/null
done
echo "Done. Note that some of these changes require a logout/restart to take effect."
|
<reponame>251205668/Magic_Shop
import { parseSpecValue } from "../../utils/sku"
import { Cart } from "../../model/cart"
// components/car-item/index.js
const cart = new Cart()
Component({
/**
* 组件的属性列表
*/
properties: {
cartItem:Object
},
observers:{
cartItem:function(cartItem){
if(!cartItem){
return
}
const specStr = parseSpecValue(cartItem.sku.specs)
const discount = cartItem.sku.discount_price ?true:false
const soldOut = cart.isSoldOut(cartItem)
const online = cart.isOnline(cartItem)
this.setData({
specStr,
discount,
soldOut,
online,
stock: cartItem.sku.stock,
skuCount: cartItem.count,
isChecked:cartItem.checked
})
}
},
/**
* 组件的初始数据
*/
data: {
specStr:String,
discount:Boolean,
soldOut:Boolean,
online:Boolean,
stock: 99,
skuCount:1,
isChecked:true
},
/**
* 组件的方法列表
*/
methods: {
onDelete(){
const skuId = this.properties.cartItem.skuId
cart.removeItem(skuId)
this.setData({
cartItem:null
})
this.triggerEvent("deleteItem",{skuId})
},
selectCheckBox(e){
const checked = e.detail.checked
this.setData({
isChecked:checked
})
cart.checkItem(this.properties.cartItem.skuId)
this.triggerEvent("checkItem",{})
},
onChangeCount(e){
const count = e.detail.count
const skuId = this.properties.cartItem.skuId
cart.replcaSkuCount(skuId,count)
this.triggerEvent("changeCount",{
count
})
},
todetail(){
const spuId = this.properties.cartItem.sku.spu_id
wx.navigateTo({
url: `/pages/detail/index?pid=${spuId}`
})
}
}
})
|
<filename>test/util/grpc_matcher.go
// Copyright 2021 The Rode Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package util
import (
"fmt"
"github.com/onsi/gomega/types"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
type haveGrpcStatusMatcher struct {
expected codes.Code
actual codes.Code
}
func HaveGrpcStatus(expected codes.Code) types.GomegaMatcher {
return &haveGrpcStatusMatcher{expected: expected}
}
func (h *haveGrpcStatusMatcher) Match(actual interface{}) (bool, error) {
statusError, err := toGrpcStatus(actual)
if err != nil {
return false, err
}
h.actual = statusError.Code()
return h.actual == h.expected, nil
}
func (h *haveGrpcStatusMatcher) FailureMessage(_ interface{}) string {
return fmt.Sprintf("Expected gRPC status code to be %[1]s (%[1]d), but was %[2]s (%[2]d)", h.expected, h.actual)
}
func (h *haveGrpcStatusMatcher) NegatedFailureMessage(_ interface{}) string {
return fmt.Sprintf("Expected gRPC status code not to equal %[1]s (%[1]d)", h.actual)
}
func toGrpcStatus(actual interface{}) (*status.Status, error) {
if actual == nil {
return nil, fmt.Errorf("expected a gRPC status, but was nil")
}
actualError, ok := actual.(error)
if !ok {
return nil, fmt.Errorf("expected %v to be an error, but was of type %[1]T", actual)
}
statusError, ok := status.FromError(actualError)
if !ok {
return nil, fmt.Errorf("'%v' was an error, but not a gRPC status", actualError)
}
return statusError, nil
}
|
public void sortStudentsByGrade(List<Student> students) {
Collections.sort(students, new Comparator<Student>() {
@Override
public int compare(Student s1, Student s2) {
return s1.grade.compareTo(s2.grade);
}
});
} |
# remove old configs
rm -r bspwm rofi fish polybar dunst kitty sxhkd
# copy new configs from .config
cp -r ~/.config/bspwm .
cp -r ~/.config/cmus .
cp -r ~/.config/rofi .
cp -r ~/.config/polybar .
cp -r ~/.config/dunst .
cp -r ~/.config/kitty .
cp -r ~/.config/sxhkd .
# make an automatic commit
git add . && git commit -m "This is an automatic commit to keep the repo up to date"
|
def gcd(a, b):
if a == 0:
return b
return gcd(b % a, a)
a = 15
b = 20
print("GCD of", a, "and", b, "is", gcd(a, b)) |
<reponame>deLibertate/Insurances<gh_stars>0
package dao;
import data.Client;
public interface IClientDao<Client> {
void create(Client entity);
Client read(long id);
void update(Client entity);
void delete(long id);
void insert(Client man, int id);
}
|
<gh_stars>100-1000
/*
* Copyright © 2021 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*
*/
import {
blsAggregate,
blsAggregateVerify,
blsFastAggregateVerify,
blsSign,
blsSkToPk,
blsVerify,
blsPopProve,
blsPopVerify,
} from '../../src/bls_lib';
import { getAllFiles, hexToBuffer, loadSpecFile } from '../helpers';
interface EthSignSpec {
input: {
privkey: string;
message: string;
};
output: string | null;
}
interface EthVerifySpec {
input: {
pubkey: string;
message: string;
signature: string;
};
output: boolean;
}
interface EthAggrSpec {
input: string[];
output: string;
}
interface EthAggrVerifySpec {
input: { pubkeys: string[]; messages: string[]; signature: string };
output: boolean;
}
interface EthFastAggrVerifySpec {
input: { pubkeys: string[]; message: string; signature: string };
output: boolean;
}
describe('bls_lib', () => {
describe('blsSkToPk', () => {
describe.each(getAllFiles(['bls_specs/sk_to_pk']))('%s', ({ path }) => {
it('should convert to valid pk', () => {
const { input, output } = loadSpecFile<{ input: string; output: string }>(path);
expect(blsSkToPk(hexToBuffer(input)).toString('hex')).toEqual(
hexToBuffer(output).toString('hex'),
);
});
});
});
describe('blsSign', () => {
// Signing with the zero private key is not a use case according to the BLS specifications
describe.each(getAllFiles(['eth2_bls_specs/sign', 'bls_specs/sign'], /sign_case_zero_privkey/))(
'%s',
({ path }) => {
it('should generate valid signature', () => {
const {
input: { privkey, message },
output,
} = loadSpecFile<EthSignSpec>(path);
const signature = blsSign(hexToBuffer(privkey), hexToBuffer(message));
expect(signature.toString('hex')).toEqual(hexToBuffer(output).toString('hex'));
});
},
);
});
describe('blsVerify', () => {
describe.each(getAllFiles(['eth2_bls_specs/verify', 'bls_specs/verify']))('%s', ({ path }) => {
it('should verify signatures', () => {
const {
input: { pubkey, message, signature },
output,
} = loadSpecFile<EthVerifySpec>(path);
const verify = blsVerify(hexToBuffer(pubkey), hexToBuffer(message), hexToBuffer(signature));
expect(verify).toEqual(output);
});
});
});
describe('blsAggregate', () => {
describe.each(getAllFiles(['eth2_bls_specs/aggregate', 'bls_specs/aggregate']))(
'%s',
({ path }) => {
it('should aggregate signatures', () => {
const { input, output } = loadSpecFile<EthAggrSpec>(path);
const signature = blsAggregate(input.map(hexToBuffer));
if (signature) {
expect(signature.toString('hex')).toEqual(hexToBuffer(output).toString('hex'));
} else {
// In one of eth2 specs, they refer null as INVALID case
const expectedOutput = output ?? false;
expect(signature).toEqual(expectedOutput);
}
});
},
);
});
describe('blsAggregateVerify', () => {
describe.each(getAllFiles(['eth2_bls_specs/aggregate_verify']))('%s', ({ path }) => {
it('should verify messages', () => {
const {
input: { pubkeys, messages, signature },
output,
} = loadSpecFile<EthAggrVerifySpec>(path);
const verify = blsAggregateVerify(
pubkeys.map(hexToBuffer),
messages.map(hexToBuffer),
hexToBuffer(signature),
);
expect(verify).toEqual(output);
});
});
});
describe('blsFastAggregateVerify', () => {
// The ignored test case "fast_aggregate_verify_infinity_pubkey" contains pk at infinity (identify point)
// Since implementation standard https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-04#section-3.3.4
// specifies to not validate public keys in "FastAggregateVerify"
// so we why our implementation returns "true" and eth2 specs mentioned it as "false"
describe.each(
getAllFiles(
['eth2_bls_specs/fast_aggregate_verify', 'bls_specs/fast_aggregate_verify'],
/fast_aggregate_verify_infinity_pubkey/,
),
)('%s', ({ path }) => {
it('should verify message', () => {
const {
input: { pubkeys, message, signature },
output,
} = loadSpecFile<EthFastAggrVerifySpec>(path);
const verify = blsFastAggregateVerify(
pubkeys.map(hexToBuffer),
hexToBuffer(message),
hexToBuffer(signature),
);
expect(verify).toEqual(output);
});
});
});
describe('blsPopProve', () => {
describe.each(getAllFiles(['bls_specs/pop_prove']))('%s', ({ path }) => {
it('should create valid proof of possession', () => {
const { input, output } = loadSpecFile<{ input: string; output: string }>(path);
expect(blsPopProve(hexToBuffer(input)).toString('hex')).toEqual(
hexToBuffer(output).toString('hex'),
);
});
});
});
describe('blsPopVerify', () => {
describe.each(getAllFiles(['bls_specs/pop_verify']))('%s', ({ path }) => {
it('should verify proof of possession', () => {
const {
input: { pk, proof },
output,
} = loadSpecFile<{ input: { pk: string; proof: string }; output: boolean }>(path);
expect(blsPopVerify(hexToBuffer(pk), hexToBuffer(proof))).toEqual(output);
});
});
});
});
|
<gh_stars>1-10
package com.google.developers.event.http;
import com.google.api.client.auth.oauth2.AuthorizationCodeFlow;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.extensions.appengine.auth.oauth2.AbstractAppEngineAuthorizationCodeServlet;
import com.google.api.client.http.*;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.util.store.DataStoreFactory;
import com.google.api.services.plus.Plus;
import com.google.api.services.plus.model.Person;
import com.google.appengine.api.datastore.*;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.developers.api.CellFeedProcessor;
import com.google.developers.api.GoogleOAuth2;
import com.google.developers.api.SpreadsheetManager;
import com.google.developers.event.ChapterSpreadsheet;
import com.google.developers.event.DevelopersSharedModule;
import com.google.gdata.data.spreadsheet.CellEntry;
import com.google.gdata.util.ServiceException;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
/**
* Created by renfeng on 8/3/15.
*/
@Singleton
public class OAuth2RevokeServlet extends AbstractAppEngineAuthorizationCodeServlet
implements ChapterSpreadsheet {
private static final Logger logger = LoggerFactory
.getLogger(OAuth2RevokeServlet.class);
private final HttpTransport transport;
private final JsonFactory jsonFactory;
private final OAuth2Utils oauth2Utils;
private final DataStoreFactory dataStoreFactory;
@Inject
public OAuth2RevokeServlet(
HttpTransport transport, JsonFactory jsonFactory,
OAuth2Utils oauth2Utils, DataStoreFactory dataStoreFactory) {
this.transport = transport;
this.jsonFactory = jsonFactory;
this.oauth2Utils = oauth2Utils;
this.dataStoreFactory = dataStoreFactory;
}
@Override
protected void doDelete(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
SpreadsheetManager spreadsheetManager = new SpreadsheetManager(getCredential());
/*
* get G+ ID
* https://developers.google.com/+/web/api/rest/latest/people/get
*/
// AuthorizationCodeFlow authFlow = initializeFlow();
// Credential credential = authFlow.loadCredential(getUserId(req));
Credential credential = getCredential();
// Build the Plus object using the credentials
Plus plus = new Plus.Builder(transport, jsonFactory, credential)
.setApplicationName(GoogleOAuth2.APPLICATION_NAME).build();
// Make the API call
Person profile = plus.people().get("me").execute();
final String gplusId = profile.getId();
logger.trace("https://plus.google.com/" + gplusId);
final ThreadLocal<CellEntry> cellEntryThreadLocal = new ThreadLocal<>();
CellFeedProcessor processor = new CellFeedProcessor(spreadsheetManager.getService()) {
Map<String, CellEntry> cellMap = new HashMap<>();
@Override
protected boolean processDataRow(Map<String, String> valueMap, URL cellFeedURL)
throws IOException, ServiceException {
if (gplusId.equals(valueMap.get(GPLUS_ID))) {
cellEntryThreadLocal.set(cellMap.get(REFRESH_TOKEN));
return false;
}
cellMap = new HashMap<>();
return true;
}
@Override
protected void processDataColumn(CellEntry cell, String columnName) {
cellMap.put(columnName, cell);
}
};
try {
processor.process(spreadsheetManager.getWorksheet(DevelopersSharedModule.getMessage("chapter")),
GPLUS_ID, REFRESH_TOKEN, CHAPTER_PAGE);
} catch (ServiceException e) {
logger.error("failed to load refresh token for chapter, " + gplusId, e);
}
CellEntry cellEntry = cellEntryThreadLocal.get();
if (cellEntry != null) {
String token = cellEntry.getCell().getInputValue();
cellEntry.changeInputValueLocal(null);
try {
cellEntry.update();
HttpRequestFactory factory = transport.createRequestFactory();
GenericUrl url = new GenericUrl("https://accounts.google.com/o/oauth2/revoke?token=" + token);
HttpRequest request = factory.buildGetRequest(url);
request.setThrowExceptionOnExecuteError(false);
HttpResponse response = request.execute();
if (response.getStatusCode() == 200) {
/*
* The token can be an access token or a refresh token.
* If the token is an access token and it has a corresponding refresh token,
* the refresh token will also be revoked.
*
* If the revocation is successfully processed, then the status code of the response is 200.
* For error conditions, a status code 400 is returned along with an error code.
*
* https://developers.google.com/identity/protocols/OAuth2WebServer
*/
}
resp.setStatus(response.getStatusCode());
} catch (ServiceException e) {
logger.error("failed to save refresh token for chapter, " + gplusId, e);
}
}
// Get the Datastore Service
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
UserService userService = UserServiceFactory.getUserService();
String email = userService.getCurrentUser().getEmail();
Query q = new Query("Person").setFilter(new Query.FilterPredicate("name", Query.FilterOperator.EQUAL, email));
// Use PreparedQuery interface to retrieve results
PreparedQuery pq = datastore.prepare(q);
for (Entity result : pq.asIterable()) {
datastore.delete(result.getKey());
}
}
@Override
protected AuthorizationCodeFlow initializeFlow() throws ServletException, IOException {
return oauth2Utils.initializeFlow();
}
@Override
protected String getRedirectUri(HttpServletRequest req) throws ServletException, IOException {
return oauth2Utils.getRedirectUri(req);
}
}
|
package com.huatuo.net.thread;
import java.util.ArrayList;
import java.util.HashMap;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.content.Context;
import android.os.Handler;
import com.huatuo.base.MyApplication;
import com.huatuo.dictionary.MsgId;
import com.huatuo.net.http.ActionResponse;
import com.huatuo.net.http.HttpAgent;
public class GetMingXiList implements Runnable {
private Handler mHandler;
private Context mContext;
private ArrayList<JSONObject> mingXiList;
private String pageStart;
private String pageOffset;
private JSONObject outObj;
public GetMingXiList(Context mContext, Handler mHandler,String pageStart,String pageOffset) {
this.mContext = mContext;
this.mHandler = mHandler;
mingXiList = new ArrayList<JSONObject>();
this.pageStart = pageStart;
this.pageOffset = pageOffset;
}
@Override
public void run() {
try {
HashMap<String, String> inJsonObject = new HashMap<String, String>();
inJsonObject.put("userID", MyApplication.getUserID());
inJsonObject.put("pageStart", pageStart);
inJsonObject.put("pageOffset", pageOffset);
HttpAgent httpAgent = new HttpAgent("user/bill/get", inJsonObject, mContext);
ActionResponse response = httpAgent.sendRequest(null);
int code = response.getCode();
initRsultData(response);
if (code == 0) {
mHandler.sendEmptyMessage(MsgId.DOWN_DATA_S);
} else if (code == MsgId.NET_NOT_CONNECT) {
mHandler.sendEmptyMessage(MsgId.NET_NOT_CONNECT);
} else {
mHandler.sendEmptyMessage(MsgId.DOWN_DATA_F);
}
} catch (Exception e) {
// LogUtil.e(Tag, "LoginInvokeItem run" + e.getMessage());
e.printStackTrace();
}
}
/**
* 初始化返回结果数据
* */
private void initRsultData(ActionResponse actionRespons) {
outObj = actionRespons.getRsbody();
// 技师列表
JSONArray mingXiListArray = outObj.optJSONArray("data");
JSONObject json;
if (mingXiListArray == null || mingXiListArray.length() == 0) {
return;
} else {
for (int i = 0; i < mingXiListArray.length(); i++) {
try {
json = mingXiListArray.getJSONObject(i);
} catch (JSONException e) {
e.printStackTrace();
continue;
}
mingXiList.add(json);
}
}
}
/**
* 返回明细列表
* */
public ArrayList<JSONObject> getMingXiList() {
return mingXiList;
}
public JSONObject getOutObj(){
return outObj;
}
}
|
const express = require('express');
const router = express.Router();
const bodyParser = require('body-parser');
const { getNetworkInfo, getChannels } = require('../lnd');
const { groupChannels, getLocalChannel } = require('../routing');
router.use(bodyParser.json());
router.get('/get_ln_info', async (req, res, next) => {
try {
const info = await getNetworkInfo();
return res.status(200).json(info);
} catch (e) {
return res.status(500).json({
message: e.message,
});
}
});
router.get('/get_src_channels', async (req, res, next) => {
try {
const channelsData = await getChannels();
const group = groupChannels(channelsData.channels);
return res.status(200).json(group.a);
} catch (e) {
return res.status(500).json({
message: e.message,
});
}
});
router.get('/get_dst_channels', async (req, res, next) => {
try {
const channelsData = await getChannels();
const group = groupChannels(channelsData.channels);
return res.status(200).json(group.b);
} catch (e) {
return res.status(500).json({
message: e.message,
});
}
});
router.get('/get_local_channel', async (req, res, next) => {
if (typeof req.query.channel_id === 'undefined') {
return res.status(500).json({
message: 'The channel id is required',
});
}
try {
const channel = await getLocalChannel(req.query.channel_id);
if (channel) {
return res.status(200).json(channel);
}
return res.status(500).json({
message: 'Channel not found',
});
} catch (e) {
return res.status(500).json({
message: e.message,
});
}
});
module.exports = router;
|
<reponame>maohuang81/newrelic-cli
/*
* Copyright 2017-2018 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package get
import (
"context"
"fmt"
"os"
"github.com/spf13/cobra"
"github.com/IBM/newrelic-cli/newrelic"
"github.com/IBM/newrelic-cli/tracker"
"github.com/IBM/newrelic-cli/utils"
)
var monitorCmd = &cobra.Command{
Use: "monitor",
Short: "Display a single monitor by id.",
Example: "nr get monitor <id>",
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
var err = fmt.Errorf("length of [flags] should be 1 instead of %d", len(args))
fmt.Println(err)
os.Exit(1)
return err
}
return nil
},
Run: func(cmd *cobra.Command, args []string) {
id := args[0]
monitor, err, _ := GetMonitorByID(id)
var printer utils.Printer
printer = &utils.JSONPrinter{}
var output string
flags := cmd.Flags()
if flags.Lookup("output") != nil {
output, err = cmd.Flags().GetString("output")
if output == "yaml" {
printer = &utils.YAMLPrinter{}
}
}
if err != nil {
fmt.Println(err)
os.Exit(1)
return
}
printer.Print(monitor, os.Stdout)
os.Exit(0)
},
}
func GetMonitorByID(id string) (*newrelic.Monitor, error, tracker.ReturnValue) {
fmt.Printf("Enter GetMonitorByID() func, monitor id: %s\n", id)
client, err := utils.GetNewRelicClient("synthetics")
if err != nil {
fmt.Println(err)
ret := tracker.ToReturnValue(false, tracker.OPERATION_NAME_GET_MONITOR_BY_ID, err, tracker.ERR_CREATE_NR_CLINET, "")
return nil, err, ret
}
monitor, resp, err := client.SyntheticsMonitors.GetByID(context.Background(), id)
if err != nil {
fmt.Println(err)
ret := tracker.ToReturnValue(false, tracker.OPERATION_NAME_GET_MONITOR_BY_ID, err, tracker.ERR_REST_CALL, "")
return nil, err, ret
}
tracker.AppendRESTCallResult(client.SyntheticsMonitors, tracker.OPERATION_NAME_GET_MONITOR_BY_ID, resp.StatusCode, "monitor id: "+id+", monitor name: "+(*monitor.Name))
// if err != nil || resp.StatusCode >= 400 {
// fmt.Printf("%v:%v\n", resp.Status, err)
// return nil, err
// }
if *monitor.Type == "SCRIPT_BROWSER" || *monitor.Type == "SCRIPT_API" {
monitorID := monitor.ID
var id string = ""
id = *monitorID
scriptText, resp, err := client.SyntheticsScript.GetByID(context.Background(), id)
tracker.AppendRESTCallResult(client.SyntheticsScript, tracker.OPERATION_NAME_GET_MONITOR_SCRIPT, resp.StatusCode, "monitor id: "+id+", monitor name: "+(*monitor.Name))
if resp.StatusCode == 404 {
var statusCode = resp.StatusCode
fmt.Printf("Response status code: %d. Get one monitor script, monitor id '%s', monitor name '%s'\n", statusCode, id, *monitor.Name)
s := new(newrelic.Script)
s.ScriptText = new(string)
scriptText = s
} else {
if err != nil {
fmt.Println(err)
// var st *newrelic.Script
// st = &newrelic.Script{}
// scriptText = st
ret := tracker.ToReturnValue(false, tracker.OPERATION_NAME_GET_MONITOR_BY_ID, err, tracker.ERR_REST_CALL, "")
return nil, err, ret
}
if resp.StatusCode >= 400 {
var statusCode = resp.StatusCode
fmt.Printf("Response status code: %d. Get one monitor script, monitor id '%s', monitor name '%s'\n", statusCode, id, *monitor.Name)
ret := tracker.ToReturnValue(false, tracker.OPERATION_NAME_GET_MONITOR_BY_ID, tracker.ERR_REST_CALL_NOT_2XX, tracker.ERR_REST_CALL_NOT_2XX, "monitor id: "+id+", monitor name: "+(*monitor.Name))
return nil, err, ret
}
}
monitor.Script = scriptText
}
ret := tracker.ToReturnValue(true, tracker.OPERATION_NAME_GET_MONITOR_BY_ID, nil, nil, "")
return monitor, err, ret
}
var isAllMonitorsFetched bool = false
var allMonitors []*newrelic.Monitor
func IsMonitorNameExists(monitorName string) (bool, *newrelic.Monitor, error, tracker.ReturnValue) {
// var allMonitorList []*newrelic.Monitor
if isAllMonitorsFetched == false {
allMonitorList, err, returnValue := GetMonitors()
if returnValue.IsContinue == false {
return false, nil, err, returnValue
}
allMonitors = allMonitorList
isAllMonitorsFetched = true
}
ret := tracker.ToReturnValue(true, tracker.OPERATION_NAME_CHECK_MONITOR_NAME_EXISTS, nil, nil, "")
for _, monitor := range allMonitors {
if *monitor.Name == monitorName {
return true, monitor, nil, ret
}
}
return false, nil, nil, ret
}
func GetMonitorByName(monitorName string) (*newrelic.Monitor, error, tracker.ReturnValue) {
isExist, monitor, err, ret := IsMonitorNameExists(monitorName)
if ret.IsContinue == false {
return nil, err, ret
}
ret = tracker.ToReturnValue(true, tracker.OPERATION_NAME_GET_MONITOR_BY_NAME, nil, nil, "")
if isExist == true {
return monitor, err, ret
} else {
return nil, err, ret
}
}
func init() {
GetCmd.AddCommand(monitorCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// userCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
}
|
from xbmcswift2.plugin import Plugin
class PluginManager:
def __init__(self):
self.plugins = []
def register_plugin(self, plugin):
"""
Register a plugin in the manager.
Args:
plugin: Plugin object to be registered.
"""
self.plugins.append(plugin)
def get_plugin(self, plugin_name):
"""
Retrieve a plugin by name.
Args:
plugin_name: Name of the plugin to retrieve.
Returns:
Plugin object with the matching name, or None if not found.
"""
for plugin in self.plugins:
if plugin.name == plugin_name:
return plugin
return None |
#! /bin/bash -e
#
# Xcode build script to generate the CBL-C exported symbols list.
cd "$SRCROOT/src/exports/"
TEMP_FILE="$DERIVED_FILE_DIR/exports.txt"
RESULT="$DERIVED_FILE_DIR/CBL.exp"
echo "Generating $RESULT"
cat CBL_Exports.txt Fleece_Exports.txt Dart_Exports.txt Fleece_Apple_Exports.txt >"$TEMP_FILE"
if [ "$CONFIGURATION" == "Debug_EE" ] || [ "$CONFIGURATION" == "Release_EE" ]; then
cat CBL_EE_Exports.txt >>"$TEMP_FILE"
fi
awk '/^[A-Za-z_]/ { print "_" $0; next }' <"$TEMP_FILE" >"$RESULT"
|
require 'rails_helper'
RSpec.describe FlatReasonsForRejectionPresenter, type: :presenter do
let(:application_choice) do
create(
:application_choice,
:with_structured_rejection_reasons,
)
end
describe '.build_from_structured_rejection_reasons.new' do
it 'creates an object based on the provided rejected ApplicationChoice' do
flat_rejection_reasons = FlatReasonsForRejectionPresenter.build_from_structured_rejection_reasons(ReasonsForRejection.new(application_choice.structured_rejection_reasons))
expect(flat_rejection_reasons).to eq(
{
'Something you did' => true,
'Didn’t reply to our interview offer' => true,
'Didn’t attend interview' => true,
'Something you did other reason - details' => 'Persistent scratching',
'Candidate behaviour - what to improve' => 'Not scratch so much',
'Quality of application' => true,
'Personal statement' => true,
'Personal statement - what to improve' => 'Use a spellchecker',
'Subject knowledge' => true,
'Subject knowledge - what to improve' => 'Claiming to be the \'world\'s leading expert\' seemed a bit strong',
'Quality of application - what to improve' => 'Study harder',
'Quality of application other reason - details' => 'Lights on but nobody home',
'Qualifications' => true,
'No Maths GCSE grade 4 (C) or above, or valid equivalent' => false,
'No English GCSE grade 4 (C) or above, or valid equivalent' => true,
'No Science GCSE grade 4 (C) or above, or valid equivalent (for primary applicants)' => false,
'No degree' => false,
'Qualifications other reason - details' => 'All the other stuff',
'Performance at interview' => true,
'Performance at interview - what to improve' => 'Be fully dressed',
'Course full' => false,
'They offered you a place on another course' => false,
'Honesty and professionalism' => true,
'Information given on application form false or inaccurate' => true,
'Information given on application form false or inaccurate - details' => 'Fake news',
'Evidence of plagiarism in personal statement or elsewhere' => false,
'Evidence of plagiarism in personal statement or elsewhere - details' => nil,
'References didn’t support application' => true,
'References didn’t support application - details' => 'Clearly not a popular student',
'Honesty and professionalism other reason - details' => nil,
'Safeguarding issues' => true,
'Information disclosed by candidate makes them unsuitable to work with children' => false,
'Information disclosed by candidate makes them unsuitable to work with children - details' => nil,
'Information revealed by our vetting process makes the candidate unsuitable to work with children' => false,
'Information revealed by our vetting process makes the candidate unsuitable to work with children - details' => nil,
'Safeguarding issues other reason - details' => 'We need to run further checks',
'Additional advice' => false,
'Future applications' => false,
'why are you rejecting this application details' => nil,
},
)
end
end
describe '.build_top_level_reasons' do
it 'creates a string containing the rejection reasons' do
rejection_export_line = FlatReasonsForRejectionPresenter.build_top_level_reasons(application_choice.structured_rejection_reasons)
expect(rejection_export_line).to eq(
"Something you did\nHonesty and professionalism\nPerformance at interview\nQualifications\nQuality of application\nSafeguarding issues",
)
end
end
end
|
<filename>recipes/rabbitmq-server.rb<gh_stars>10-100
#
# Cookbook:: openstack-ops-messaging
# Recipe:: rabbitmq-server
#
# Copyright:: 2013-2021, Chef Software, Inc.
# Copyright:: 2013-2021, AT&T Services, Inc.
# Copyright:: 2013-2021, <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class ::Chef::Recipe
include ::Openstack
end
user = node['openstack']['mq']['user']
pass = get_password 'user', user
vhost = node['openstack']['mq']['vhost']
bind_mq = node['openstack']['bind_service']['mq']
bind_mq = bind_mq
bind_mq_address = bind_address bind_mq
# Used by OpenStack#rabbit_servers/#rabbit_server
node.default['openstack']['mq']['listen'] = bind_mq_address
if node['openstack']['mq']['rabbitmq']['use_ssl']
if node['rabbitmq']['ssl_port'] != bind_mq['port']
node.default['rabbitmq']['ssl_port'] = bind_mq['port']
else
Chef::Log.error "Unable to listen on the port #{bind_mq['port']} for RabbitMQ TCP, which is listened on by SSL!"
end
else
node.default['rabbitmq']['port'] = bind_mq['port']
end
node.default['rabbitmq']['address'] = bind_mq_address
node.default['rabbitmq']['nodename'] = "#{user}@#{node['hostname']}"
# Clustering
if node['openstack']['mq']['cluster']
node.default['rabbitmq']['clustering']['enable'] = node['openstack']['mq']['cluster']
node.default['rabbitmq']['erlang_cookie'] = get_password 'service', 'rabbit_cookie'
if node['openstack']['mq']['search_for_cluster_disk_nodes']
qs = "recipes:openstack-ops-messaging\\:\\:rabbitmq-server AND chef_environment:#{node.chef_environment}"
node.default['rabbitmq']['clustering']['use_auto_clustering'] = true
node.default['rabbitmq']['clustering']['cluster_nodes'] =
search(:node, qs).sort_by { |n| n['hostname'] }.map do |n|
{ name: "#{user}@#{n['hostname']}" }
end
end
end
include_recipe 'rabbitmq'
if node['openstack']['mq']['rabbitmq']['enable_mgmt_console']
include_recipe 'rabbitmq::mgmt_console'
else
rabbitmq_plugin 'rabbitmq_management' do
action :disable
end
end
rabbitmq_user 'add openstack rabbit user' do
user user
password <PASSWORD>
sensitive true
end
rabbitmq_user 'change openstack rabbit user password' do
user user
password <PASSWORD>
sensitive true
action :change_password
end
rabbitmq_vhost 'add openstack rabbit vhost' do
vhost vhost
sensitive true
end
rabbitmq_user 'set openstack user permissions' do
user user
vhost vhost
permissions '.* .* .*'
sensitive true
action :set_permissions
end
# Necessary for graphing.
rabbitmq_user 'set rabbit administrator tag' do
user user
tag 'administrator'
sensitive true
action :set_tags
end
|
# Schema migrator.
require 'miguel/schema'
require 'miguel/dumper'
module Miguel
# Class for generating database migration from one schema to another.
class Migrator
private
# Separate items in before and after arrays into old items, same items and new items.
def separate( before, after )
# Note that we have to use ==, so we can't use & and - operators which use eql?.
same = after.select{ |x| before.find{ |y| x == y } }
[
before.reject{ |x| same.find{ |y| x == y } },
same,
after.reject{ |x| same.find{ |y| x == y } },
]
end
# Iterate over matching pairs of named items.
def each_pair( name, from_items, to_items )
for from, to in from_items.zip( to_items )
fail "invalid #{name} pair #{from.name} -> #{to.name}" unless from.name == to.name
yield from, to
end
end
# Convert foreign keys from given tables into [ table name, foreign key ] pairs for easier comparison.
def prepare_keys( tables )
result = []
for table in tables
for key in table.foreign_keys
result << [ table.name, key ]
end
end
result
end
# Convert [ table name, foreign key ] pairs into hash of foreign keys per table.
def split_keys( table_keys )
result = {}
for name, key in table_keys
( result[ name ] ||= [] ) << key
end
result
end
# Generate code for altering given foreign keys.
def dump_foreign_keys( out, table_keys, &block )
for name, keys in split_keys( table_keys )
out.dump "alter_table #{name.inspect}" do
keys.each &block
end
end
end
# Generate code for adding given foreign keys.
def dump_add_foreign_keys( out, table_keys )
dump_foreign_keys( out, table_keys ) do |key|
out << "add_foreign_key #{key.out_columns}, #{key.out_table_name}#{key.out_canonic_opts}"
end
end
# Generate code for dropping given foreign keys.
def dump_drop_foreign_keys( out, table_keys )
dump_foreign_keys( out, table_keys ) do |key|
out << "drop_foreign_key #{key.out_columns} # #{key.out_table_name}#{key.out_canonic_opts}"
end
end
# Generate code for adding given tables.
def dump_add_tables( out, tables )
for table in tables
out.dump "create_table #{table.out_name}" do
for column in table.columns
column.dump( out )
end
for index in table.indexes
index.dump( out )
end
# No foreign keys here - those are added in a separate pass later.
end
end
end
# Generate code for dropping given tables.
def dump_drop_tables( out, tables )
for table in tables
out << "drop_table #{table.out_name}"
end
end
# Generate code for dropping given indexes.
def dump_drop_indexes( out, indexes )
for index in indexes
out << "drop_index #{index.out_columns}#{index.out_canonic_opts(' # ')}"
end
end
# Generate code for adding given indexes.
def dump_add_indexes( out, indexes )
for index in indexes
out << "add_index #{index.out_columns}#{index.out_canonic_opts}"
end
end
# Generate code for dropping given columns.
def dump_drop_columns( out, columns )
for column in columns
if column.primary_key_constraint?
out << "drop_constraint #{column.out_name}, :type => :primary_key#{column.out_opts(' # ')}"
else
out << "drop_column #{column.out_name} # #{column.out_type}#{column.out_opts}"
end
end
end
# Generate code for adding given columns.
def dump_add_columns( out, columns )
for column in columns
if column.type == :primary_key
out << "add_primary_key #{column.out_name}#{column.out_opts}"
else
out << "add_column #{column.out_name}, #{column.out_type}#{column.out_default_opts}"
end
end
end
# Generate code for altering given column.
def dump_alter_column( out, from, to )
if from.allow_null != to.allow_null && to.allow_null
out << "set_column_allow_null #{to.out_name}"
end
if from.canonic_type != to.canonic_type || from.type_opts != to.type_opts
out << "set_column_type #{to.out_name}, #{to.out_type}#{to.out_opts}"
end
if from.default != to.default
out << "set_column_default #{to.out_name}, #{to.out_default}"
end
if from.allow_null != to.allow_null && ! to.allow_null
out << "set_column_not_null #{to.out_name}"
end
end
# Generate code for altering given columns.
def dump_alter_columns( out, from_columns, to_columns )
each_pair( :column, from_columns, to_columns ) do |from, to|
dump_alter_column( out, from, to )
end
end
# Generate code for altering given table.
def dump_alter_table( out, from, to )
old_indexes, same_indexes, new_indexes = separate( from.indexes, to.indexes )
from_names = from.column_names
to_names = to.column_names
old_names, same_names, new_names = separate( from_names, to_names )
old_columns = from.named_columns( old_names )
new_columns = to.named_columns( new_names )
from_columns = from.named_columns( same_names )
to_columns = to.named_columns( same_names )
from_columns, same_columns, to_columns = separate( from_columns, to_columns )
return if [ old_indexes, new_indexes, old_columns, new_columns, to_columns ].all?{ |x| x.empty? }
out.dump "alter_table #{to.out_name}" do
dump_drop_indexes( out, old_indexes )
dump_drop_columns( out, old_columns )
dump_alter_columns( out, from_columns, to_columns )
dump_add_columns( out, new_columns )
dump_add_indexes( out, new_indexes )
end
end
# Generate code for altering given tables.
def dump_alter_tables( out, from_tables, to_tables )
each_pair( :table, from_tables, to_tables ) do |from, to|
dump_alter_table( out, from, to )
end
end
public
# Generate code for changing one schema to another.
def changes( from, to, out = Dumper.new )
from_keys = prepare_keys( from.tables )
to_keys = prepare_keys( to.tables )
old_keys, same_keys, new_keys = separate( from_keys, to_keys )
from_names = from.table_names
to_names = to.table_names
old_names, same_names, new_names = separate( from_names, to_names )
old_tables = from.named_tables( old_names )
new_tables = to.named_tables( new_names )
from_tables = from.named_tables( same_names )
to_tables = to.named_tables( same_names )
dump_drop_foreign_keys( out, old_keys )
dump_drop_tables( out, old_tables )
dump_alter_tables( out, from_tables, to_tables )
dump_add_tables( out, new_tables )
dump_add_foreign_keys( out, new_keys )
out
end
# Generate one way Sequel migration.
def change_migration( from, to, out = Dumper.new )
out.dump "Sequel.migration" do
out.dump "change" do
changes( from, to, out )
end
end
end
# Generate both ways Sequel migration.
def full_migration( from, to, out = Dumper.new )
out.dump "Sequel.migration" do
out.dump "up" do
changes( from, to, out )
end
out.dump "down" do
changes( to, from, out )
end
end
end
alias migration full_migration
end
end
# EOF #
|
#!/bin/bash
dieharder -d 0 -g 45 -S 2676169160
|
<filename>src/app/tabs/tabs.component.ts
import {AfterViewInit, Component, OnDestroy, OnInit} from '@angular/core';
import {DealWithTabService, Tab} from '../deal-with-tab.service';
import {Router} from '@angular/router';
@Component({
selector: 'app-tabs',
templateUrl: './tabs.component.html',
styleUrls: ['./tabs.component.css']
})
export class TabsComponent implements OnInit, OnDestroy {
showedTabs = [];
constructor(private router:Router,private dealWithTabService:DealWithTabService) { }
ngOnInit() {
ngEvents.on('getTabList',(tabList)=>{
this.getTabList(tabList)
});
}
getTabList(tabList) {
this.showedTabs = tabList;
}
deleteTabCache(tab) {
this.dealWithTabService.deleteTabCache(tab);
}
ngOnDestroy() {
ngEvents.off('getTabList');
}
jump(tab:Tab){
this.router.navigate([tab.module],{queryParams:tab.queryParams});
}
}
|
#!/bin/bash -eu
#
# Copyright 2015 The Bazel Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# TODO(bazel-team) test that modifying the source in a non-interface
# changing way results in the same -interface.jar.
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
## Inputs
JAVAC=$1
shift
JAVA=$1
shift
JAR=$1
shift
JAVAP=$1
shift
IJAR=$1
shift
LANGTOOLS8=$1
shift
UNZIP=$1
shift
ZIP=$1
shift
ZIP_COUNT=$1
shift
## Test framework
source ${DIR}/testenv.sh || { echo "testenv.sh not found!" >&2; exit 1; }
function cleanup() {
rm -fr "${TEST_TMPDIR:-sentinel}"/*
}
trap cleanup EXIT
## Tools
# Ensure that tooling path is absolute if not in PATH.
[[ "$JAVAC" =~ ^(/|[^/]+$) ]] || JAVAC="$PWD/$JAVAC"
[[ "$JAR" =~ ^(/|[^/]+$) ]] || JAR="$PWD/$JAR"
[[ "$IJAR" =~ ^(/|[^/]+$) ]] || IJAR="$PWD/$IJAR"
[[ "$UNZIP" =~ ^(/|[^/]+$) ]] || UNZIP="$PWD/$UNZIP"
[[ "$ZIP" =~ ^(/|[^/]+$) ]] || ZIP="$PWD/$ZIP"
[[ "$JAVAP" =~ ^(/|[^/]+$) ]] || JAVAP="$PWD/$JAVAP"
[[ "$ZIP_COUNT" =~ ^(/|[^/]+$) ]] || ZIP_COUNT="$PWD/$ZIP_COUNT"
IJAR_SRCDIR=$(dirname ${IJAR})
A_JAR=$TEST_TMPDIR/A.jar
A_INTERFACE_JAR=$TEST_TMPDIR/A-interface.jar
A_ZIP_JAR=$TEST_TMPDIR/A_zip.jar
A_ZIP_INTERFACE_JAR=$TEST_TMPDIR/A_zip-interface.jar
W_JAR=$TEST_TMPDIR/W.jar
BOTTLES_JAR=$TEST_TMPDIR/bottles.jar
JAR_WRONG_CENTRAL_DIR=$IJAR_SRCDIR/test/libwrongcentraldir.jar
IJAR_WRONG_CENTRAL_DIR=$TEST_TMPDIR/wrongcentraldir_interface.jar
OBJECT_JAVA=$IJAR_SRCDIR/test/Object.java
OBJECT_JAR=$TEST_TMPDIR/object.jar
OBJECT_IJAR=$TEST_TMPDIR/object_interface.jar
TYPEANN2_JAR=$IJAR_SRCDIR/test/libtypeannotations2.jar
TYPEANN2_IJAR=$TEST_TMPDIR/typeannotations2_interface.jar
TYPEANN2_JAVA=$IJAR_SRCDIR/test/TypeAnnotationTest2.java
INVOKEDYNAMIC_JAR=$IJAR_SRCDIR/test/libinvokedynamic.jar
INVOKEDYNAMIC_IJAR=$TEST_TMPDIR/invokedynamic_interface.jar
METHODPARAM_JAR=$IJAR_SRCDIR/test/libmethodparameters.jar
METHODPARAM_IJAR=$TEST_TMPDIR/methodparameters_interface.jar
NESTMATES_JAR=$IJAR_SRCDIR/test/nestmates/nestmates.jar
NESTMATES_IJAR=$TEST_TMPDIR/nestmates_interface.jar
RECORDS_JAR=$IJAR_SRCDIR/test/records/records.jar
RECORDS_IJAR=$TEST_TMPDIR/records_interface.jar
SEALED_JAR=$IJAR_SRCDIR/test/sealed/sealed.jar
SEALED_IJAR=$TEST_TMPDIR/sealed_interface.jar
SOURCEDEBUGEXT_JAR=$IJAR_SRCDIR/test/source_debug_extension.jar
SOURCEDEBUGEXT_IJAR=$TEST_TMPDIR/source_debug_extension.jar
CENTRAL_DIR_LARGEST_REGULAR=$IJAR_SRCDIR/test/largest_regular.jar
CENTRAL_DIR_SMALLEST_ZIP64=$IJAR_SRCDIR/test/smallest_zip64.jar
CENTRAL_DIR_ZIP64=$IJAR_SRCDIR/test/definitely_zip64.jar
KEEP_FOR_COMPILE=$IJAR_SRCDIR/test/keep_for_compile_lib.jar
#### Setup
# set_file_length FILE SIZE
#
# Sets the file size for FILE, truncating if necessary, creating a
# sparse file if possible, preserving original contents if they fit.
function set_file_length() {
perl -e 'open(FH, ">>$ARGV[0]") && truncate(FH, $ARGV[1]) or fail $!' "$@" &&
[[ "$(statfmt $1)" == "$2" ]] ||
fail "set_file_length failed"
}
# check_consistent_file_contents FILE
#
# Checks that all files created with the given filename have identical contents.
expected_output=""
function check_consistent_file_contents() {
local actual="$(cat $1 | ${MD5SUM} | awk '{ print $1; }')"
local filename="$(echo $1 | ${MD5SUM} | awk '{ print $1; }')"
local expected="$actual"
if (echo "${expected_output}" | grep -q "^${filename} "); then
echo "${expected_output}" | grep -q "^${filename} ${actual}$" || {
ls -l "$1"
fail "output file contents differ"
}
else
expected_output="$expected_output$filename $actual
"
fi
}
function set_up() {
mkdir -p $TEST_TMPDIR/classes
}
function tear_down() {
rm -fr $TEST_TMPDIR/classes
}
#### Tests
function test_output_bigger_than_input() {
# Tests that ijar does not crash when output ijar is bigger than the input jar
$JAVAC -g -d $TEST_TMPDIR/classes \
$IJAR_SRCDIR/test/WellCompressed*.java ||
fail "javac failed"
$JAR cf $W_JAR -C $TEST_TMPDIR/classes . || fail "jar failed"
W_INTERFACE_JAR=$TEST_TMPDIR/W-interface.jar
$IJAR $W_JAR $W_INTERFACE_JAR || fail "ijar failed"
# Check that the interface jar is bigger than the original jar.
W_JAR_SIZE=$(statfmt $W_JAR)
W_INTERFACE_JAR_SIZE=$(statfmt $W_INTERFACE_JAR)
[[ $W_INTERFACE_JAR_SIZE -gt $W_JAR_SIZE ]] || fail "interface jar should be bigger"
}
function test_class_more_64k() {
# Tests that ijar can handle class bodies longer than 64K
# First, generate the input file
BOTTLES_JAVA=$TEST_TMPDIR/BottlesOnTheWall.java
echo "public class BottlesOnTheWall {" > $BOTTLES_JAVA
for i in $(seq 1 16384); do
echo " public int getBottleOnTheWall${i}() { return ${i}; }" >> $BOTTLES_JAVA
done
echo "}" >> $BOTTLES_JAVA
$JAVAC -g -d $TEST_TMPDIR/classes $BOTTLES_JAVA || fail "javac failed"
BOTTLES_INTERFACE_JAR=$TEST_TMPDIR/bottles-interface.jar
# Test ijar calls
for flag0 in '' '0'; do
$JAR c"${flag0}"f $BOTTLES_JAR -C $TEST_TMPDIR/classes . || fail "jar failed"
$IJAR $BOTTLES_JAR $BOTTLES_INTERFACE_JAR || fail "ijar failed"
check_consistent_file_contents $BOTTLES_INTERFACE_JAR
done
# Check that ijar can produce class files with a body longer than 64K by
# calling ijar itself on the output file to make sure that it is valid
BOTTLES_INTERFACE_INTERFACE_JAR=$TEST_TMPDIR/bottles-interface-interface.jar
$IJAR $BOTTLES_INTERFACE_JAR $BOTTLES_INTERFACE_INTERFACE_JAR ||
fail "ijar cannot produce class files with body longer than 64K"
}
function test_ijar_output() {
# Numerous check on the output created by ijar.
# Compiles A.java, builds A.jar and A-interface.jar
$JAVAC -g -d $TEST_TMPDIR/classes $IJAR_SRCDIR/test/A.java ||
fail "javac failed"
$JAR cf $A_JAR -C $TEST_TMPDIR/classes . || fail "jar failed"
$IJAR $A_JAR $A_INTERFACE_JAR || fail "ijar failed."
# Check that the number of entries is 5:
# A, A.PrivateInner, A.PublicInner, A.MyAnnotation,
# A.RuntimeAnnotation
# (Note: even private inner classes are retained, so we don't need to change
# the types of members.)
local expected=5
local lines
lines=$($JAR tvf $A_INTERFACE_JAR | wc -l)
check_eq $expected $lines "Interface jar should have $expected entries!"
# Check that no private class members are found:
lines=$($JAVAP -private -classpath $A_JAR A | grep -c priv || true)
check_eq 2 $lines "Input jar should have 2 private members!"
lines=$($JAVAP -private -classpath $A_INTERFACE_JAR A | grep -c priv || true)
check_eq 0 $lines "Interface jar should have no private members!"
lines=$($JAVAP -private -classpath $A_INTERFACE_JAR A | grep -c clinit || true)
check_eq 0 $lines "Interface jar should have no class initializers!"
# Check that no code is found:
lines=$($JAVAP -c -private -classpath $A_JAR A | grep -c Code: || true)
check_eq 5 $lines "Input jar should have 5 method bodies!"
lines=$($JAVAP -c -private -classpath $A_INTERFACE_JAR A | grep -c Code: || true)
check_eq 0 $lines "Interface jar should have no method bodies!"
# Check that constants from code are no longer present:
$JAVAP -c -private -classpath $A_JAR A | grep -sq foofoofoofoo ||
fail "Input jar should have code constants!"
$JAVAP -c -private -classpath $A_INTERFACE_JAR A | grep -sq foofoofoofoo &&
fail "Interface jar should have no code constants!"
# Check (important, this!) that the interface jar is still sufficient
# for compiling:
$JAVAC -Xlint -classpath $A_INTERFACE_JAR -g -d $TEST_TMPDIR/classes \
$IJAR_SRCDIR/test/B.java 2>$TEST_log || fail "Can't compile B!"
# Test compilation of B yielded deprecation message:
expect_log 'deprecatedMethod.*in A has been deprecated' \
"ijar has lost @Deprecated annotation!"
# Run the dynamic checks in B.main().
$JAVA -classpath $TEST_TMPDIR/classes B || exit 1
# Check idempotence of ijar transformation:
A_INTERFACE_INTERFACE_JAR=$TEST_TMPDIR/A-interface-interface.jar
$IJAR $A_INTERFACE_JAR $A_INTERFACE_INTERFACE_JAR || fail "ijar failed."
cmp $A_INTERFACE_JAR $A_INTERFACE_INTERFACE_JAR ||
fail "ijar transformation is not idempotent"
# Check that -interface.jar contains nothing but .class files:
check_eq 0 $($JAR tf $A_INTERFACE_JAR | grep -cv \\.class$ || true) \
"Interface jar should contain only .class files!"
# Check that -interface.jar timestamps are normalized:
check_eq 0 $(TZ=UTC $JAR tvf $A_INTERFACE_JAR |
grep -cv 'Fri Jan 01 00:00:00 UTC 2010' || true) \
"Interface jar contained non-zero timestamps!"
# Check that compile-time constants in A are still annotated as such in ijar:
$JAVAP -classpath $TEST_TMPDIR/classes -c B | grep -sq 'ldc2_w.*123' ||
fail "ConstantValue not propagated to class B!"
# Check that a jar compressed with zip results in the same interface jar as a
# jar compressed with jar
rm -fr $TEST_TMPDIR/classes
mkdir -p $TEST_TMPDIR/classes || fail "mkdir $TEST_TMPDIR/classes failed"
$JAVAC -g -d $TEST_TMPDIR/classes $IJAR_SRCDIR/test/A.java ||
fail "javac failed"
$JAR cf $A_JAR $TEST_TMPDIR/classes/A.class || fail "jar failed"
$ZIP $A_ZIP_JAR $TEST_TMPDIR/classes/A.class || fail "zip failed"
$IJAR $A_JAR $A_INTERFACE_JAR || fail "ijar failed"
$IJAR $A_ZIP_JAR $A_ZIP_INTERFACE_JAR || fail "ijar failed"
cmp $A_INTERFACE_JAR $A_ZIP_INTERFACE_JAR || \
fail "ijars from jar and zip are different"
}
function do_test_large_file() {
# Compiles A.java, builds A.jar and A-interface.jar
$JAVAC -g -d $TEST_TMPDIR/classes $IJAR_SRCDIR/test/A.java ||
fail "javac failed"
# First a check without large file to have something to compare to.
for flag0 in '' '0'; do
$JAR c"${flag0}"f $A_JAR -C $TEST_TMPDIR/classes . || fail "jar failed"
$IJAR $A_JAR $A_INTERFACE_JAR || fail "ijar failed."
check_consistent_file_contents $A_INTERFACE_JAR
done
# Then create larges files
extra_args=""
if [[ -n "${1-}" ]]; then
for file in $(find $TEST_TMPDIR/classes -name '*.class'); do
set_file_length "$file" "$1"
done
fi
if [[ -n "${2-}" ]]; then
set_file_length $TEST_TMPDIR/zeroes.data "$2"
extra_args="-C $TEST_TMPDIR zeroes.data"
fi
for flag0 in '' '0'; do
$JAR c"${flag0}"f $A_JAR $extra_args -C $TEST_TMPDIR/classes . || fail "jar failed"
$IJAR $A_JAR $A_INTERFACE_JAR || fail "ijar failed."
check_consistent_file_contents $A_INTERFACE_JAR
done
}
function test_large_files() {
# Ensure input files larger than INITIAL_BUFFER_SIZE work.
# TODO(martinrb): remove maximum .class file size limit (MAX_BUFFER_SIZE)
for size in $((1024*1024)) $((15*1024*1024)); do
do_test_large_file $size
done
}
# Create a huge (~2.2Gb) input jar to test "large file" correctness
function test_z_2gb_plus_data_file() {
# This is slow because only writing a 2.2Gb file on a SSD drive is >10s and
# jaring it takes >16s.
# The z letter in the function name is to ensure that method is last in the
# method list so it has more chance to be alone on a shard.
do_test_large_file '' $((22*102*1024*1024))
}
# Create an output jar with upper bound on size > 2GB
function test_upper_bound_up_2gb() {
DIR=$TEST_TMPDIR/ManyLargeClasses
mkdir -p $DIR/classes
for i in $(seq 200); do
printf "class C${i} {}\n" > $DIR/C${i}.java
done
(cd $DIR && $JAVAC -d classes *.java)
for i in $(seq 200); do
set_file_length $DIR/classes/C${i}.class $((15*1024*1024))
done
$JAR cf $DIR/ManyLargeClasses.jar -C $DIR/classes . || fail "jar failed"
$IJAR $DIR/ManyLargeClasses.jar $DIR/ManyLargeClasses.ijar || fail "ijar failed."
}
function test_empty_jar() {
# Regression test for jar file without classes (javac doesn't like an empty ijar).
>$TEST_TMPDIR/empty
$ZIP $TEST_TMPDIR/noclasses.jar $TEST_TMPDIR/empty >/dev/null 2>&1
$IJAR $TEST_TMPDIR/noclasses.jar || fail "ijar failed"
$UNZIP -qql $TEST_TMPDIR/noclasses-interface.jar 2>/dev/null | grep -q . ||
fail "noclasses-interface.jar is completely empty!"
$JAVAC -classpath $TEST_TMPDIR/noclasses-interface.jar \
-d $TEST_TMPDIR/classes \
$IJAR_SRCDIR/test/A.java ||
fail "javac noclasses-interface.jar failed"
rm $TEST_TMPDIR/{empty,noclasses.jar,noclasses-interface.jar}
}
function test_wrong_centraldir() {
# Check that a JAR file can be parsed even if the central directory file count
# is wrong
$IJAR $JAR_WRONG_CENTRAL_DIR $IJAR_WRONG_CENTRAL_DIR || fail "ijar failed"
IJAR_FILES=$($UNZIP -qql $IJAR_WRONG_CENTRAL_DIR | wc -l | xargs echo)
if [[ $IJAR_FILES != 2 ]]; then
fail "ijar removed files"
fi
}
function test_type_annotation() {
# Check that constant pool references used by JSR308 type annotations are
# preserved
$IJAR $TYPEANN2_JAR $TYPEANN2_IJAR || fail "ijar failed"
$JAVAP -classpath $TYPEANN2_IJAR -v Util >& $TEST_log || fail "javap failed"
expect_log "RuntimeVisibleTypeAnnotations" "RuntimeVisibleTypeAnnotations not preserved!"
cp $TYPEANN2_JAVA $TEST_TMPDIR/TypeAnnotationTest2.java
$JAVAC $TEST_TMPDIR/TypeAnnotationTest2.java -cp $TYPEANN2_IJAR ||
fail "javac failed"
}
function test_invokedynamic() {
# Check that ijar works on classes with invokedynamic
$IJAR $INVOKEDYNAMIC_JAR $INVOKEDYNAMIC_IJAR || fail "ijar failed"
lines=$($JAVAP -c -private -classpath $INVOKEDYNAMIC_JAR ClassWithLambda | grep -c Code: || true)
check_eq 4 $lines "Input jar should have 4 method bodies!"
lines=$($JAVAP -c -private -classpath $INVOKEDYNAMIC_IJAR ClassWithLambda | grep -c Code: || true)
check_eq 0 $lines "Interface jar should have no method bodies!"
}
function test_object_class() {
# Check that Object.class can be processed
mkdir -p $TEST_TMPDIR/java/lang
cp $OBJECT_JAVA $TEST_TMPDIR/java/lang/.
$JAVAC -source 8 -target 8 $TEST_TMPDIR/java/lang/Object.java || fail "javac failed"
$JAR cf $OBJECT_JAR -C $TEST_TMPDIR java/lang/Object.class || fail "jar failed"
$IJAR $OBJECT_JAR $OBJECT_IJAR || fail "ijar failed"
}
function test_corrupted_end_of_centraldir() {
# Check that the tool detects and reports a corrupted end of central directory
# record condition
CORRUPTED_JAR=$TEST_TMPDIR/corrupted.jar
# First make the jar one byte longer
cp $JAR_WRONG_CENTRAL_DIR $CORRUPTED_JAR
chmod +w $CORRUPTED_JAR
echo >> $CORRUPTED_JAR
echo "Abort trap is expected" # Show on the log that we expect failure.
$IJAR $CORRUPTED_JAR 2> $TEST_log && fail "ijar should have failed" || status=$?
check_ne 0 $status
expect_log "missing end of central directory record"
# Then make the jar one byte shorter than the original one
let "NEW_SIZE = `statfmt $CORRUPTED_JAR` - 2"
set_file_length $CORRUPTED_JAR $NEW_SIZE
$IJAR $CORRUPTED_JAR 2> $TEST_log && fail "ijar should have failed" || status=$?
check_ne 0 $status
expect_log "missing end of central directory record"
}
function test_inner_class_argument() {
cd $TEST_TMPDIR
mkdir -p a b c
cat > a/A.java <<EOF
package a;
public class A {
public static class A2 {
public int n;
}
}
EOF
cat > b/B.java <<EOF
package b;
import a.A;
public class B {
public static void b(A.A2 arg) {
System.out.println(arg.n);
}
}
EOF
cat > c/C.java <<EOF
package c;
import b.B;
public class C {
public static void c() {
B.b(null);
}
}
EOF
$JAVAC a/A.java b/B.java
$JAR cf lib.jar {a,b}/*.class
$JAVAC -cp lib.jar c/C.java
}
function test_inner_class_pruning() {
cd $TEST_TMPDIR
mkdir -p lib/l {one,two,three}/a
cat > lib/l/L.java <<EOF
package l;
public class L {
public static class I {
public static class J {
public static int number() {
return 3;
}
}
public static int number() {
return 2;
}
}
}
EOF
cat > one/a/A.java <<EOF
package a;
public class A {
public static void message() {
System.out.println("hello " + 1);
}
}
EOF
cat > two/a/A.java <<EOF
package a;
import l.L;
public class A {
public static void message() {
System.out.println("hello " + L.I.number());
}
}
EOF
cat > three/a/A.java <<EOF
package a;
import l.L;
public class A {
public static void message() {
System.out.println("hello " + L.I.J.number());
}
}
EOF
$JAVAC lib/l/L.java
(cd lib; $JAR cf lib.jar l/*.class)
$JAVAC one/a/A.java
(cd one; $JAR cf one.jar a/*.class)
$JAVAC two/a/A.java -classpath lib/lib.jar
(cd two; $JAR cf two.jar a/*.class)
$JAVAC three/a/A.java -classpath lib/lib.jar
(cd three; $JAR cf three.jar a/*.class)
$IJAR one/one.jar one/one-ijar.jar
$IJAR one/one.jar two/two-ijar.jar
$IJAR one/one.jar three/three-ijar.jar
cmp one/one-ijar.jar two/two-ijar.jar
cmp one/one-ijar.jar three/three-ijar.jar
}
function test_method_parameters_attribute() {
# Check that Java 8 MethodParameters attributes are preserved
$IJAR $METHODPARAM_JAR $METHODPARAM_IJAR || fail "ijar failed"
$JAVAP -classpath $METHODPARAM_IJAR -v methodparameters.Test >& $TEST_log \
|| fail "javap failed"
expect_log "MethodParameters" "MethodParameters not preserved!"
}
function test_nestmates_attribute() {
# Check that Java 11 NestMates attributes are preserved
$IJAR $NESTMATES_JAR $NESTMATES_IJAR || fail "ijar failed"
$JAVAP -classpath $NESTMATES_IJAR -v NestTest >& $TEST_log \
|| fail "javap failed"
expect_log "NestMembers" "NestMembers not preserved!"
$JAVAP -classpath $NESTMATES_IJAR -v 'NestTest$P' >& $TEST_log \
|| fail "javap failed"
expect_log "NestHost" "NestHost not preserved!"
}
function test_records_attribute() {
ls $IJAR $RECORDS_JAR
# Check that Java 16 Records attributes are preserved
$IJAR $RECORDS_JAR $RECORDS_IJAR || fail "ijar failed"
$JAVAP -classpath $RECORDS_IJAR -v RecordTest >& $TEST_log \
|| fail "javap failed"
expect_log "Record" "Records not preserved!"
}
function test_sealed_attribute() {
ls $IJAR $SEALED_JAR
# Check that Java 16 PermittedSubclasses attributes are preserved
$IJAR $SEALED_JAR $SEALED_IJAR || fail "ijar failed"
$JAVAP -classpath $SEALED_IJAR -v SealedTest >& $TEST_log \
|| fail "javap failed"
expect_log "PermittedSubclasses" "PermittedSubclasses not preserved!"
}
function test_source_debug_extension_attribute() {
# Check that SourceDebugExtension attributes are dropped without a warning
$IJAR $SOURCEDEBUGEXT_JAR $SOURCEDEBUGEXT_IJAR >& $TEST_log || fail "ijar failed"
expect_not_log "skipping unknown attribute"
$JAVAP -classpath $SOURCEDEBUGEXT_IJAR -v sourcedebugextension.Test >& $TEST_log \
|| fail "javap failed"
expect_not_log "SourceDebugExtension" "SourceDebugExtension preserved!"
}
function test_keep_for_compile() {
$IJAR --strip_jar $KEEP_FOR_COMPILE $TEST_TMPDIR/keep.jar \
|| fail "ijar failed"
lines=$($JAVAP -classpath $TEST_TMPDIR/keep.jar -c -p \
functions.car.CarInlineUtilsKt |
grep -c "// Method kotlin/jvm/internal/Intrinsics.checkParameterIsNotNull" ||
true)
check_eq 2 $lines "Output jar should have kept method body"
}
function test_central_dir_largest_regular() {
$IJAR $CENTRAL_DIR_LARGEST_REGULAR $TEST_TMPDIR/ijar.jar || fail "ijar failed"
$ZIP_COUNT $TEST_TMPDIR/ijar.jar 65535 || fail
}
function test_central_dir_smallest_zip64() {
$IJAR $CENTRAL_DIR_SMALLEST_ZIP64 $TEST_TMPDIR/ijar.jar || fail "ijar failed"
$ZIP_COUNT $TEST_TMPDIR/ijar.jar 65536 || fail
}
function test_central_dir_zip64() {
$IJAR $CENTRAL_DIR_ZIP64 $TEST_TMPDIR/ijar.jar || fail "ijar failed"
$ZIP_COUNT $TEST_TMPDIR/ijar.jar 70000 || fail
}
run_suite "ijar tests"
|
#!/bin/bash
#/*
# * This file is part of TangoMan Provisions package.
# *
# * Copyright (c) 2021 "Matthias Morin" <mat@tangoman.io>
# *
# * This source file is subject to the MIT license that is bundled
# * with this source code in the file LICENSE.
# */
#/**
# * KeePassXC
# *
# * Password manager
# *
# * @category security
# * @link https://keepassxc.org
# */
CURDIR=$(dirname "$(realpath "${BASH_SOURCE[0]}")")
# shellcheck source=/dev/null
. "${CURDIR}/../tools/src/colors/colors.sh"
alert_primary 'Install KeePassXC'
echo_info 'sudo apt-get install --assume-yes keepassxc'
sudo apt-get install --assume-yes keepassxc
# create shortcut on desktop
echo_info 'cp -p /usr/share/applications/org.keepassxc.KeePassXC.desktop ~/Desktop'
cp -p /usr/share/applications/org.keepassxc.KeePassXC.desktop ~/Desktop
|
<filename>src/main/java/com/doodl6/demo/pattern/Interpreter.java
package com.doodl6.demo.pattern;
import java.util.Arrays;
import java.util.stream.Collectors;
/**
* 解释器模式
*/
public class Interpreter {
private final NumberExpression expression = new NumberExpression();
public void plusNumber(String info) {
Integer[] numberArray = expression.interpret(info);
System.out.println("解析到的数字数组为:" + Arrays.stream(numberArray).map(String::valueOf).collect(Collectors.joining(",")));
System.out.println("计算总和为:" + Arrays.stream(numberArray).reduce(Integer::sum).get());
}
public interface Expression {
Integer[] interpret(String info);
}
public static class NumberExpression implements Expression {
@Override
public Integer[] interpret(String info) {
return Arrays.stream(info.split(" ")).map(Integer::new).toArray(Integer[]::new);
}
}
public static void main(String[] args) {
Interpreter interpreter = new Interpreter();
interpreter.plusNumber("111 333 44 5 6 7 8");
}
}
|
python3 ../../../main_pretrain.py \
--dataset cifar100 \
--backbone resnet18 \
--data_dir ~/workspace/trung_database/datasets/ \
--max_epochs 200 \
--gpus 0 \
--accelerator gpu \
--precision 16 \
--optimizer sgd \
--lars \
--grad_clip_lars \
--eta_lars 0.02 \
--scheduler warmup_cosine \
--lr 0.6 \
--min_lr 0.0006 \
--classifier_lr 0.1 \
--weight_decay 1e-6 \
--batch_size 256 \
--num_workers 4 \
--crop_size 32 \
--brightness 0.8 \
--contrast 0.8 \
--saturation 0.8 \
--hue 0.2 \
--gaussian_prob 0.0 0.0 \
--num_crops_per_aug 1 1 \
--name swav_res18 \
--project CIFAR100-200ep \
--entity kaistaim \
--wandb \
--save_checkpoint \
--method swav \
--proj_hidden_dim 2048 \
--queue_size 3840 \
--proj_output_dim 128 \
--num_prototypes 3000 \
--epoch_queue_starts 50 \
--freeze_prototypes_epochs 2 \
--knn_eval \
--lam 0.1 \
--tau_decor 0.1 \
--our_loss False \
|
import importlib
def main():
worker_class = "uvicorn.workers.UvicornWorker" # Configuration setting
# Split the fully qualified class name to get the module and class names
module_name, class_name = worker_class.rsplit('.', 1)
# Dynamically import the module
module = importlib.import_module(module_name)
# Get the worker class from the imported module
worker_class = getattr(module, class_name)
# Instantiate the worker class
worker_instance = worker_class()
# Call the execute_task() method and print the output
output = worker_instance.execute_task()
print(output)
if __name__ == "__main__":
main() |
#!/bin/bash
#
# Copyright 2017,2018 Istio Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Init script downloads or updates envoy and the go dependencies. Called from Makefile, which sets
# the needed environment variables.
set -o errexit
set -o nounset
set -o pipefail
# TODO(nmittler): Remove these variables and require that this script be run from the Makefile
# Set GOPATH to match the expected layout
GO_TOP=$(cd "$(dirname "$0")"/../../../..; pwd)
export OUT_DIR=${OUT_DIR:-${GO_TOP}/out}
# Current version is 2.9.1, with 2.10RC available
# 2.7.2 was released in Nov 2017.
# 2.10 adds proper support for CRD - we will test with it
# For pre-2.10,
HELM_VER=${HELM_VER:-v2.9.1}
#HELM_VER=${HELM_VER:-v2.10.0-rc.1}
export GOPATH=${GOPATH:-$GO_TOP}
# Normally set by Makefile
export ISTIO_BIN=${ISTIO_BIN:-${GOPATH}/bin}
# Set the architecture. Matches logic in the Makefile.
export GOARCH=${GOARCH:-'amd64'}
# Determine the OS. Matches logic in the Makefile.
LOCAL_OS=${OSTYPE}
case $LOCAL_OS in
"linux"*)
LOCAL_OS='linux'
;;
"darwin"*)
LOCAL_OS='darwin'
;;
*)
echo "This system's OS ${LOCAL_OS} isn't recognized/supported"
exit 1
;;
esac
export GOOS=${GOOS:-${LOCAL_OS}}
# test scripts seem to like to run this script directly rather than use make
export ISTIO_OUT=${ISTIO_OUT:-${ISTIO_BIN}}
# install helm if not present, it must be the local version.
if [ ! -f "${ISTIO_OUT}/version.helm.${HELM_VER}" ] ; then
TD=$(mktemp -d)
# Install helm. Please keep it in sync with .circleci
cd "${TD}" && \
curl -Lo "${TD}/helm.tgz" "https://storage.googleapis.com/kubernetes-helm/helm-${HELM_VER}-${LOCAL_OS}-amd64.tar.gz" && \
tar xfz helm.tgz && \
mv ${LOCAL_OS}-amd64/helm "${ISTIO_OUT}/helm-${HELM_VER}" && \
cp "${ISTIO_OUT}/helm-${HELM_VER}" "${ISTIO_OUT}/helm" && \
rm -rf "${TD}" && \
touch "${ISTIO_OUT}/version.helm.${HELM_VER}"
fi
|
<filename>day25-springboot/src/main/java/cn/st/Application.java
package cn.st;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @description:
* @author: st
* @create: 2021-02-24 16:54
**/
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
|
#!/bin/sh
koopa_print() {
# """
# Print a string.
# @note Updated 2020-07-05.
#
# printf vs. echo
# - http://www.etalabs.net/sh_tricks.html
# - https://unix.stackexchange.com/questions/65803
# - https://www.freecodecamp.org/news/
# how-print-newlines-command-line-output/
# """
local string
if [ "$#" -eq 0 ]
then
printf '\n'
return 0
fi
for string in "$@"
do
printf '%b\n' "$string"
done
return 0
}
|
#!/bin/bash
set -e
cd public
git fetch
git pull
cd ..
# Uncomment the following line to force push
# rm -rf public/*
echo -e "\033[0;32mDeploying updates to GitHub...\033[0m"
# Build the project.
hugo -t academic # if using a theme, replace with `hugo -t <YOURTHEME>`
# Go To Public folder
cd public/
# Add changes to git.
git add .
# Commit changes.
msg="rebuilding site `date`"
if [ $# -eq 1 ]
then msg="$1"
fi
git commit -m "$msg"
# Push source and build repos.
git push origin master
# Come Back up to the Project Root
cd ..
|
#!/bin/bash
#set -x
export JAVA_OPTS=-Xmx28G
export LD_LIBRARY_PATH=/home/ubuntu/lib/BIDMat/lib:/usr/local/lib
export PATH=$1
export ALL_LIBS=$2
scala -cp $ALL_LIBS Twitter 41652230 $3 $4 $5 $6 machines
|
import java.util.ArrayList;
class District {
private int districtNo;
private int totalPop;
private Location centerOfPop;
private ArrayList<Town> towns;
public District(int num) {
this.districtNo = num;
this.totalPop = 0;
this.centerOfPop = null;
this.towns = new ArrayList<Town>();
}
public void addTown(Town town) {
town.setDistrict(districtNo);
if (centerOfPop == null) this.centerOfPop = town.getLocation();
else {
double newLat = (this.centerOfPop.getLat() * totalPop + town.getLocation().getLat() * town.getPopulation()) / (totalPop + town.getPopulation());
double newLng = (this.centerOfPop.getLng() * totalPop + town.getLocation().getLng() * town.getPopulation()) / (totalPop + town.getPopulation());
centerOfPop.setLat(newLat);
centerOfPop.setLng(newLng);
}
totalPop += town.getPopulation();
towns.add(town);
}
public int getDistrictNo() {
return districtNo;
}
public int getTotalPop() {
return totalPop;
}
public Location getCenterOfPop() {
return centerOfPop;
}
public String toString(){
// String s = "\nDistrict " + districtNo + ". Total Population: " + totalPop + "\n ALL TOWNS:";
// for(Town t:towns){
// s += "\n -" + t.getName() + "| Population: " + t.getPopulation();
// }
String s = "";
for(Town t:towns){
s += "\n" + t.getName() + "," + districtNo+","+t.getLocation().getLat()+","+t.getLocation().getLng()+","+t.getPopulation();
}
return s;
}
public double distTo(Town town){
//uses the Location.distTo(Location) method in Location.java
return centerOfPop.distTo(town.getLocation());
}
}
|
<reponame>benoitc/pypy
# a couple of support functions which
# help with generating Python source.
# XXX This module provides a similar, but subtly different, functionality
# XXX several times over, which used to be scattered over four modules.
# XXX We should try to generalize and single out one approach to dynamic
# XXX code compilation.
import sys, os, inspect, new
import autopath, py
def render_docstr(func, indent_str='', closing_str=''):
""" Render a docstring as a string of lines.
The argument is either a docstring or an object.
Note that we don't use a sequence, since we want
the docstring to line up left, regardless of
indentation. The shorter triple quotes are
choosen automatically.
The result is returned as a 1-tuple."""
if not isinstance(func, str):
doc = func.__doc__
else:
doc = func
if doc is None:
return None
doc = doc.replace('\\', r'\\')
compare = []
for q in '"""', "'''":
txt = indent_str + q + doc.replace(q[0], "\\"+q[0]) + q + closing_str
compare.append(txt)
doc, doc2 = compare
doc = (doc, doc2)[len(doc2) < len(doc)]
return doc
class NiceCompile(object):
""" Compiling parameterized strings in a way that debuggers
are happy. We provide correct line numbers and a real
__file__ attribute.
"""
def __init__(self, namespace_or_filename):
if type(namespace_or_filename) is str:
srcname = namespace_or_filename
else:
srcname = namespace_or_filename.get('__file__')
if not srcname:
# assume the module was executed from the
# command line.
srcname = os.path.abspath(sys.argv[-1])
self.srcname = srcname
if srcname.endswith('.pyc') or srcname.endswith('.pyo'):
srcname = srcname[:-1]
if os.path.exists(srcname):
self.srcname = srcname
self.srctext = file(srcname).read()
else:
# missing source, what to do?
self.srctext = None
def __call__(self, src, args=None):
""" instance NiceCompile (src, args) -- formats src with args
and returns a code object ready for exec. Instead of <string>,
the code object has correct co_filename and line numbers.
Indentation is automatically corrected.
"""
if self.srctext:
try:
p = self.srctext.index(src)
except ValueError:
msg = "Source text not found in %s - use a raw string" % self.srcname
raise ValueError(msg)
prelines = self.srctext[:p].count("\n") + 1
else:
prelines = 0
# adjust indented def
for line in src.split('\n'):
content = line.strip()
if content and not content.startswith('#'):
break
# see if first line is indented
if line and line[0].isspace():
# fake a block
prelines -= 1
src = 'if 1:\n' + src
if args is not None:
src = '\n' * prelines + src % args
else:
src = '\n' * prelines + src
c = compile(src, self.srcname, "exec")
# preserve the arguments of the code in an attribute
# of the code's co_filename
if self.srcname:
srcname = MyStr(self.srcname)
if args is not None:
srcname.__sourceargs__ = args
c = newcode_withfilename(c, srcname)
return c
def getsource(object):
""" similar to inspect.getsource, but trying to
find the parameters of formatting generated methods and
functions.
"""
name = inspect.getfile(object)
if hasattr(name, '__source__'):
src = str(name.__source__)
else:
try:
src = inspect.getsource(object)
except Exception: # catch IOError, IndentationError, and also rarely
return None # some other exceptions like IndexError
if hasattr(name, "__sourceargs__"):
return src % name.__sourceargs__
return src
## the following is stolen from py.code.source.py for now.
## XXX discuss whether and how to put this functionality
## into py.code.source.
#
# various helper functions
#
class MyStr(str):
""" custom string which allows to add attributes. """
def newcode(fromcode, **kwargs):
names = [x for x in dir(fromcode) if x[:3] == 'co_']
for name in names:
if name not in kwargs:
kwargs[name] = getattr(fromcode, name)
return new.code(
kwargs['co_argcount'],
kwargs['co_nlocals'],
kwargs['co_stacksize'],
kwargs['co_flags'],
kwargs['co_code'],
kwargs['co_consts'],
kwargs['co_names'],
kwargs['co_varnames'],
kwargs['co_filename'],
kwargs['co_name'],
kwargs['co_firstlineno'],
kwargs['co_lnotab'],
kwargs['co_freevars'],
kwargs['co_cellvars'],
)
def newcode_withfilename(co, co_filename):
newconstlist = []
cotype = type(co)
for c in co.co_consts:
if isinstance(c, cotype):
c = newcode_withfilename(c, co_filename)
newconstlist.append(c)
return newcode(co, co_consts = tuple(newconstlist),
co_filename = co_filename)
# ____________________________________________________________
import __future__
def compile2(source, filename='', mode='exec', flags=
__future__.generators.compiler_flag, dont_inherit=0):
"""
A version of compile() that caches the code objects it returns.
It uses py.code.compile() to allow the source to be displayed in tracebacks.
"""
key = (source, filename, mode, flags)
try:
co = compile2_cache[key]
#print "***** duplicate code ******* "
#print source
except KeyError:
#if DEBUG:
co = py.code.compile(source, filename, mode, flags)
#else:
# co = compile(source, filename, mode, flags)
compile2_cache[key] = co
return co
compile2_cache = {}
# ____________________________________________________________
def compile_template(source, resultname):
"""Compiles the source code (a string or a list/generator of lines)
which should be a definition for a function named 'resultname'.
The caller's global dict and local variable bindings are captured.
"""
if not isinstance(source, py.code.Source):
if isinstance(source, str):
lines = [source]
else:
lines = list(source)
lines.append('')
source = py.code.Source('\n'.join(lines))
caller = sys._getframe(1)
locals = caller.f_locals
if locals is caller.f_globals:
localnames = []
else:
localnames = locals.keys()
localnames.sort()
values = [locals[key] for key in localnames]
source = source.putaround(
before = "def container(%s):" % (', '.join(localnames),),
after = "# no unindent\n return %s" % resultname)
d = {}
exec source.compile() in caller.f_globals, d
container = d['container']
return container(*values)
# ____________________________________________________________
def func_with_new_name(func, newname, globals=None):
"""Make a renamed copy of a function."""
if globals is None:
globals = func.func_globals
f = new.function(func.func_code, globals,
newname, func.func_defaults,
func.func_closure)
if func.func_dict:
f.func_dict = {}
f.func_dict.update(func.func_dict)
return f
def func_renamer(newname):
"""A function decorator which changes the name of a function."""
def decorate(func):
return func_with_new_name(func, newname)
return decorate
PY_IDENTIFIER = ''.join([(('0' <= chr(i) <= '9' or
'a' <= chr(i) <= 'z' or
'A' <= chr(i) <= 'Z') and chr(i) or '_')
for i in range(256)])
PY_IDENTIFIER_MAX = 120
def valid_identifier(stuff):
stuff = str(stuff).translate(PY_IDENTIFIER)
if not stuff or ('0' <= stuff[0] <= '9'):
stuff = '_' + stuff
return stuff[:PY_IDENTIFIER_MAX]
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
def has_varargs(func):
func = getattr(func, 'func_code', func)
return (func.co_flags & CO_VARARGS) != 0
def has_varkeywords(func):
func = getattr(func, 'func_code', func)
return (func.co_flags & CO_VARKEYWORDS) != 0
def nice_repr_for_func(fn, name=None):
mod = getattr(fn, '__module__', None)
if name is None:
name = getattr(fn, '__name__', None)
cls = getattr(fn, 'class_', None)
if name is not None and cls is not None:
name = "%s.%s" % (cls.__name__, name)
try:
firstlineno = fn.func_code.co_firstlineno
except AttributeError:
firstlineno = -1
return "(%s:%d)%s" % (mod or '?', firstlineno, name or 'UNKNOWN')
|
import React, { useState } from 'react';
import { BrowserRouter, Link, Route } from 'react-router-dom';
function BooksSearchApp() {
const [search, setSearch] = useState('');
const handleChange = e => {
setSearch(e.target.value);
};
const handleSubmit = e => {
e.preventDefault();
};
return (
<BrowserRouter>
<h1>Books search</h1>
<form onSubmit={handleSubmit}>
<input
type="text"
placeholder="Search"
value={search}
onChange={handleChange}
/>
<Link to={`/search?q=${search}`}>Search</Link>
</form>
<Route path="/search" component={BookSearchResults} />
</BrowserRouter>
);
}
export default BooksSearchApp;
function BookSearchResults() {
const params = new URLSearchParams(window.location.search);
const query = params.get('q');
return <div>List of books matching {query}!</div>;
} |
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.amazon.sync;
import java.sql.Timestamp;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.ofbiz.base.util.*;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.condition.EntityCondition;
import org.ofbiz.entity.condition.EntityConditionList;
import org.ofbiz.entity.condition.EntityOperator;
import org.ofbiz.entity.util.EntityListIterator;
import org.ofbiz.entity.util.EntityUtil;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.ServiceUtil;
import org.opentaps.amazon.AmazonConstants;
import org.opentaps.amazon.AmazonUtil;
import org.opentaps.common.util.UtilMessage;
/**
* Batch updating services to synchronize Opentaps model with Amazon.
* These services populate and flag as updated the AmazonProduct and
* AmazonProductPrice. These serve as an alternative to using ECAs
* to update the Amazon flag tables.
*/
public final class AmazonSyncServices {
private AmazonSyncServices() { }
private static final String MODULE = AmazonSyncServices.class.getName();
public static final String batchUpdateAmazonService = "opentaps.amazon.batchUpdateAmazon";
/**
* Batch updating service that initializes new products
* in the Opentaps Amazon system and checks if any Amazon
* published products should be updated. This does not
* handle price or inventory updates.
*
* @param dctx a <code>DispatchContext</code> value
* @param context the service context <code>Map</code>
* @return the service response <code>Map</code>
*/
public static Map<String, Object> batchUpdateAmazonProducts(DispatchContext dctx, Map<String, Object> context) {
Delegator delegator = dctx.getDelegator();
GenericValue userLogin = (GenericValue) context.get("userLogin");
Timestamp now = UtilDateTime.nowTimestamp();
Locale locale = (Locale) context.get("locale");
try {
List<GenericValue> runs = delegator.findByAnd("AmazonBatchUpdateHistory", UtilMisc.toMap("serviceName", batchUpdateAmazonService), UtilMisc.toList("completedTimestamp DESC"));
GenericValue lastRunHistory = EntityUtil.getFirst(runs);
// get the last time this service was completed, otherwise we're creating all amazon records from scratch
Timestamp lastRun = null;
if (lastRunHistory == null) {
Debug.logInfo("First batch update for Amazon model.", MODULE);
} else {
lastRun = lastRunHistory.getTimestamp("completedTimestamp");
Debug.logInfo("Last batch update for Amazon model completed on [" + lastRun + "]. Starting next batch update.", MODULE);
}
// If the service has never been run, assume the earliest possible date
if (lastRun == null) {
lastRun = new Timestamp(0);
}
// build a static condition for searching ProductContent images by the configured types
EntityCondition baseConditions = EntityConditionList.makeCondition(EntityOperator.AND,
EntityCondition.makeCondition("productContentTypeId", EntityOperator.IN, AmazonConstants.imageTypes.values()),
EntityCondition.makeCondition("lastUpdatedStamp", EntityOperator.GREATER_THAN_EQUAL_TO, lastRun),
EntityUtil.getFilterByDateExpr()
);
// create or update AmazonProduct and AmazonProductImage
// find all products
EntityListIterator iterator = delegator.findListIteratorByCondition("Product", null, null, null);
GenericValue product;
while ((product = iterator.next()) != null) {
String productId = product.getString("productId");
Timestamp salesDiscontinuationDate = product.getTimestamp("salesDiscontinuationDate");
Timestamp lastModified = product.getTimestamp("lastUpdatedStamp");
GenericValue amazonProduct = product.getRelatedOne("AmazonProduct");
GenericValue amazonProductImage = delegator.findByPrimaryKey("AmazonProductImage", UtilMisc.toMap("productId", productId));
boolean discontinue = UtilValidate.isNotEmpty(salesDiscontinuationDate) && salesDiscontinuationDate.before(now);
// we'll update the Amazon model if the product has been discontinued or created/updated since the last run
if (amazonProduct != null) {
if (AmazonUtil.isAmazonProductDeleted(amazonProduct)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProduct_ProductDeleted", locale), MODULE);
continue;
} else if (discontinue) {
AmazonUtil.markAmazonProductAsDeleted(amazonProduct);
} else if (lastModified.after(lastRun)) {
AmazonUtil.markAmazonProductAsUpdated(amazonProduct);
// Assume the images changed
AmazonUtil.createOrUpdateAmazonProductImage(delegator, productId, amazonProductImage);
} else {
// check if the ProductContent changed
List<EntityCondition> contentConditions = UtilMisc.toList(EntityCondition.makeCondition("productId", EntityOperator.EQUALS, productId), baseConditions);
List<GenericValue> contents = delegator.findByAnd("ProductContent", contentConditions);
if (UtilValidate.isNotEmpty(contents)) {
AmazonUtil.createOrUpdateAmazonProductImage(delegator, productId, amazonProductImage);
}
}
amazonProduct.store();
} else {
// TODO: Evaluate whether this is a good idea or not... companies will each have their own logic which governs whether a product should be posted to Amazon. Maybe we need an AMZN_PROD_PENDING statusId?
// delegator.storeAll(AmazonUtil.createAmazonProductRecords(delegator, productId));
}
}
iterator.close();
// mark service as completed
GenericValue history = delegator.makeValue("AmazonBatchUpdateHistory",
UtilMisc.toMap(
"historyId", delegator.getNextSeqId("AmazonBatchUpdateHistory"),
"serviceName", batchUpdateAmazonService,
"userLoginId", userLogin.get("userLoginId"),
"completedTimestamp", UtilDateTime.nowTimestamp())
);
history.create();
} catch (GeneralException e) {
return UtilMessage.createAndLogServiceError(e, MODULE);
}
return ServiceUtil.returnSuccess();
}
/**
* Updates AmazonProductPrice statusId to reflect changes to corresponding ProductPrice records.
* @param dctx a <code>DispatchContext</code> value
* @param context the service context <code>Map</code>
* @return the service response <code>Map</code>
*/
public static Map<String, Object> updateAmazonProductPrices(DispatchContext dctx, Map<String, Object> context) {
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String productId = (String) context.get("productId");
String productStoreGroupId = (String) context.get("productStoreGroupId");
Map<String, Object> result = ServiceUtil.returnSuccess();
try {
// Ignore if the productStoreGroup isn't correct for Amazon
if (!AmazonConstants.priceProductStoreGroup.equalsIgnoreCase(productStoreGroupId)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductPriceUpdate_WrongStoreGroup", UtilMisc.toMap("productId", productId), locale), MODULE);
return result;
}
// Ignore if no AmazonProductPrice record exists
GenericValue amazonProductPrice = delegator.findByPrimaryKey("AmazonProductPrice", UtilMisc.toMap("productId", productId));
if (UtilValidate.isEmpty(amazonProductPrice)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductPriceUpdate_NoRecord", UtilMisc.toMap("productId", productId), locale), MODULE);
return result;
}
// Ignore if the AmazonProduct is marked deleted
if (AmazonUtil.isAmazonProductDeleted(delegator, productId)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductPriceUpdate_ProductDeleted", UtilMisc.toMap("productId", productId), locale), MODULE);
return result;
}
AmazonUtil.markAmazonProductPriceAsUpdated(amazonProductPrice);
amazonProductPrice.store();
} catch (GenericEntityException e) {
return UtilMessage.createAndLogServiceError(e, MODULE);
}
return ServiceUtil.returnSuccess();
}
/**
* Updates AmazonProductInventory statusId to reflect changes to corresponding product inventory levels or ProductFacility records.
* @param dctx a <code>DispatchContext</code> value
* @param context the service context <code>Map</code>
* @return the service response <code>Map</code>
*/
public static Map<String, Object> updateAmazonProductInventory(DispatchContext dctx, Map<String, Object> context) {
Delegator delegator = dctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String productId = (String) context.get("productId");
String facilityId = (String) context.get("facilityId");
Map<String, Object> result = ServiceUtil.returnSuccess();
try {
// Ignore if no AmazonProductInventory record exists
GenericValue amazonProductInventory = delegator.findByPrimaryKey("AmazonProductInventory", UtilMisc.toMap("productId", productId));
if (UtilValidate.isEmpty(amazonProductInventory)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductInventoryUpdate_NoRecord", UtilMisc.toMap("productId", productId), locale), MODULE);
return result;
}
// Ignore if the AmazonProduct is marked deleted
if (AmazonUtil.isAmazonProductDeleted(delegator, productId)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductInventoryUpdate_ProductDeleted", UtilMisc.toMap("productId", productId), locale), MODULE);
return result;
}
// Sanity check on the Amazon setup
GenericValue productStore = delegator.findByPrimaryKey("ProductStore", UtilMisc.toMap("productStoreId", AmazonConstants.productStoreId));
if (productStore == null) {
// this is a failure for this service, but should not cause a global rollback
return UtilMessage.createAndLogServiceFailure(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_ProductStoreNotConfigured", UtilMisc.toMap("productStoreId", AmazonConstants.productStoreId), locale), MODULE);
} else if (UtilValidate.isEmpty(productStore.getString("inventoryFacilityId"))) {
// this is a serious configuration error. If there is an amazon store but it has no facility, then it should cause a global rollback
return UtilMessage.createAndLogServiceError(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_InvalidAmazonProductStore", UtilMisc.toMap("productStoreId", AmazonConstants.productStoreId), locale), MODULE);
}
// Ignore if the facility is incorrect for Amazon
if (!productStore.getString("inventoryFacilityId").equals(facilityId)) {
Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductInventoryUpdate_WrongFacility", UtilMisc.toMap("productId", productId), locale), MODULE);
return result;
}
AmazonUtil.markAmazonProductInventoryAsUpdated(amazonProductInventory);
amazonProductInventory.store();
} catch (GenericEntityException e) {
return UtilMessage.createAndLogServiceError(e, MODULE);
}
return ServiceUtil.returnSuccess();
}
}
|
public class BinarySearch {
public static int binarySearch(int[] array, int target) {
int leftIndex = 0;
int rightIndex = array.length - 1;
while (leftIndex <= rightIndex) {
int midIndex = (leftIndex + rightIndex) / 2;
if (array[midIndex] == target) {
return midIndex;
}
else if (array[midIndex] > target) {
rightIndex = midIndex - 1;
}
else {
leftIndex = midIndex + 1;
}
}
return -1;
}
public static void main(String[] args) {
int[] array = {1, 3, 5, 7, 8, 9, 10};
int target = 3;
int result = binarySearch(array, target);
if (result == -1) {
System.out.println("Target not found");
}
else {
System.out.println("Target found at index: " + result);
}
}
} |
package usecase
import (
"encoding/json"
"os"
"time"
"github.com/omurilo/codebank/domain"
"github.com/omurilo/codebank/dto"
"github.com/omurilo/codebank/infrastructure/kafka"
)
type UseCaseTransaction struct {
TransactionRepository domain.TransactionRepository
KafkaProducer kafka.KafkaProducer
}
func NewUseCaseTransaction(transactionRepository domain.TransactionRepository) UseCaseTransaction {
return UseCaseTransaction{TransactionRepository: transactionRepository}
}
func (u UseCaseTransaction) ProcessTransaction(transactionDto dto.Transaction) (domain.Transaction, error) {
creditCard := u.hydrateCreditCard(transactionDto)
ccBalanceAndLimit, err := u.TransactionRepository.GetCreditCard(*creditCard)
if err != nil {
return domain.Transaction{}, err
}
creditCard.ID = ccBalanceAndLimit.ID
creditCard.Limit = ccBalanceAndLimit.Limit
creditCard.Balance = ccBalanceAndLimit.Balance
t := u.newTransaction(transactionDto, ccBalanceAndLimit)
t.ProcessAndValidate(creditCard)
err = u.TransactionRepository.SaveTransaction(*t, *creditCard)
if err != nil {
return *t, err
}
transactionDto.ID = t.ID
transactionDto.CreatedAt = t.CreatedAt
transactionDto.Status = t.Status
transactionJson, err := json.Marshal(transactionDto)
if err != nil {
return domain.Transaction{}, nil
}
err = u.KafkaProducer.Publish(string(transactionJson), os.Getenv("KafkaTransactionsTopic"))
if err != nil {
return domain.Transaction{}, nil
}
return *t, nil
}
func (u UseCaseTransaction) hydrateCreditCard(transactionDto dto.Transaction) *domain.CreditCard {
creditCard := domain.NewCreditCard()
creditCard.Name = transactionDto.Name
creditCard.Number = transactionDto.Number
creditCard.ExpirationMonth = transactionDto.ExpirationMonth
creditCard.ExpirationYear = transactionDto.ExpirationYear
creditCard.CVV = transactionDto.CVV
return creditCard
}
func (u UseCaseTransaction) newTransaction(transactionDto dto.Transaction, creditCard domain.CreditCard) *domain.Transaction {
transaction := domain.NewTransaction()
transaction.CreditCardId = creditCard.ID
transaction.Amount = transactionDto.Amount
transaction.Store = transactionDto.Store
transaction.Description = transactionDto.Description
transaction.CreatedAt = time.Now()
return transaction
}
|
#!/usr/bin/env bash
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PACKAGE_NAME="gqlpycgen"
with_rednose='--rednose --force-color'
with_coverage="--cover-html-dir=${BASE_DIR}/htmlcov --with-coverage --cover-html --cover-package=${PACKAGE_NAME} --cover-erase --cover-branches"
with_doctest='--with-doctest'
test -z $1 || dotests="--tests=${1}"
exec nosetests ${with_rednose} -s -v ${with_doctest} ${with_coverage} --where ${BASE_DIR}/tests ${dotests}
# py.test --cov-report term-missing --cov-report html --cov=$PACKAGE_NAME tests
|
<reponame>Joejhona/locojoe<filename>src/store/modules/auth.js
/* eslint-disable promise/param-names */
import { AUTH_REQUEST, AUTH_ERROR, AUTH_SUCCESS, AUTH_LOGOUT } from '../actions/auth'
import { USER_REQUEST } from '../actions/user'
import { getTokenUP } from '@/services/GraphqlService'
import { apitoken } from '@/services/api'
//import Router from '@/router'
const state = { token: localStorage.getItem('user-token') || '', status: '', hasLoadedOnce: false }
//const state = { token: '', status: '', hasLoadedOnce: false }
const getters = {
isAuthenticated: state => !!state.token,
authStatus: state => state.status,
}
const actions = {
[AUTH_REQUEST]: ({commit, dispatch}, user) => {
return new Promise((resolve, reject) => {
commit(AUTH_REQUEST)
getTokenUP({username:user.username,password:<PASSWORD>})
.then(resp => {
let token = resp.data.data.tokenAuth.token
console.log(token)
//var tokenA = resp.data.data.tokenAuth.token
localStorage.setItem('user-token', token)
apitoken.defaults.headers.common['Authorization'] = `JWT ${token}`
commit(AUTH_SUCCESS, token)
dispatch(USER_REQUEST)
resolve(resp)
//Router.push('/indexjoe')
})
.catch(err => {
commit(AUTH_ERROR, err)
localStorage.removeItem('user-token')
reject(err)
})
})
},
[AUTH_LOGOUT]: ({commit, dispatch}) => {
return new Promise((resolve, reject) => {
commit(AUTH_LOGOUT)
localStorage.removeItem('user-token')
resolve()
})
}
}
const mutations = {
[AUTH_REQUEST]: (state) => {
state.status = 'loading'
},
[AUTH_SUCCESS]: (state, resp) => {
state.status = 'success'
state.token = resp
state.hasLoadedOnce = true
},
[AUTH_ERROR]: (state) => {
state.status = 'error'
state.hasLoadedOnce = true
},
[AUTH_LOGOUT]: (state) => {
state.token = ''
}
}
export default {
state,
getters,
actions,
mutations,
} |
import arbolDistanciaGenetica2 as ae
import random
import math
INCREMENTOVIDA = 0.05
class Individuo():
def __init__(self, nodo, especie, edad, sexo, cromosoma):
self.nodo = nodo
self.cinta = None
self.edad = edad
self.sexo = sexo # 1 = Es macho
self.felicidad = 0
self.especie = especie
self.energia = cromosoma["constitucion"]
self.inventario = 0
self.cromosoma = cromosoma
#{
#'fuerza' : cromosoma["fuerza"],
#'destreza' : cromosoma["destreza"],
#'constitucion' : cromosoma["constitucion"],
#'velocidad' : cromosoma["velocidad"],
#'inteligencia' : cromosoma["inteligencia"],
#'percepcion' : cromosoma["percepcion"],
#'esperanzaVida' : cromosoma["esperanzaVida"],
#'fecundidad' : cromosoma["fecundidad"],
#'madurezSexual' cromosoma["madurezSexual"]}
self.habilidades = self.crearHabilidades()
def calcularDistancias(self,individuo):
if individuo.especie is self.especie:
return self.especie.getDistancia(self.nodo.indice, individuo.nodo.indice)
return 0
def fMutar(self, atributo):
aux = atributo+random.randint(-2, 2)
return aux if aux >=0 else 0
#Habilidades
def crearHabilidades(self):
return {
"Silflar": 0,
"iniciativa": self.cromosoma["percepcion"] if self.edad >= self.cromosoma["madurezSexual"] else 0,
"Defender": 0}
def getBonus(self, habilidad):
i = 10
level = 0
while i < habilidad:
i += i
level+= 1
return level
def getCapacidadCarga(self):
return (self.cromosoma["fuerza"] + self.cromosoma["constitucion"]+min(self.edad,10))
def getCapacidadEnergia(self):
return (self.cromosoma["constitucion"]*2+min(self.edad,10))
#Reproducción
#TODOS LOS METODOS DE ACCUION DEBEN RESTAR LA ENERGIA NECESARIA E INCREMENTAR LA felicidad
def isFertil(self):
return ((not self.cinta) and (self.edad >= self.cromosoma["madurezSexual"]))
def danzaDelVientre(self):
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
self.energia -= 4
print (self.nodo.indice)
print ("DdV")
def procrear(self,individuo):
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
if self.isFertil() and individuo.isFertil():
self.energia -= 4
self.felicidad += 10
#individuo.energia -= 4 Esto se resta en la accion de danza vientre
individuo.felicidad += 10
if self.sexo: # Es macho
individuo.cinta = (self, individuo.edad + 4) # el segundo numero es la edad de parir
self.cinta = 1 # No queremos machos promiscuos
else:
self.cinta = (individuo, self.edad + 4)
individuo.cinta = 1
else:
self.energia -= 4
print (self.nodo.indice)
print ("procrear")
def cruce(self, cromosoma1, cromosoma2):
""" Hay que añadir mutación y quiza deberíamos generalizar más porque en parir la cria se crea con todos los atributos"""
genes = {}
keys = list(cromosoma1.keys())
for key in keys:
if random.randrange(0,2):
genes[key] = self.fMutar(cromosoma1[key])
else:
genes[key] = self.fMutar(cromosoma2[key])
return genes
def parir(self): # Habrá que cambiarlo para camadas
"""
Se usa el padre que esta en cinta y se generan tantas crias como en la fecundidad
Se devuelven listas ya que a este metodo se le llama desde nuevo lo que sea"""
crias = []
if self.cinta:
print (self.nodo.indice)
self.felicidad += 10
if self.sexo:
self.cinta = None
elif self.cinta[1] == self.edad:
for i in range(0, math.ceil((self.cromosoma["fecundidad"]+self.cinta[0].cromosoma["fecundidad"])/2)):
nodo = ae.Nodo(self.nodo, self.cinta[0].nodo, self.especie)
print ("PARIENDO")
print(nodo.indice)
genes = self.cruce(self.cromosoma, self.cinta[0].cromosoma)
crias.append(Individuo(nodo, self.especie, 0, random.randrange(0,2), genes))
self.cinta = None
return crias
#Alimentación
def comer(self):
"""Plantear niveles de urgencia para ver cuanto gasta"""
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
merienda = min(self.getCapacidadEnergia()-self.energia, self.inventario)
self.energia += merienda if not self.cinta else merienda/2
self.inventario -= merienda
self.felicidad += 2
print (self.nodo.indice)
print ("comer")
def darComida(self, receptor):
"""Quiza se debería regular más la cantidad de comida entregada"""
if (not self.isAlive()) or (not receptor.isAlive()):
self.felicidad -= 2*self.calcularDistancias(receptor)
print (self.nodo.indice)
print ("Ha muerto")
return None
self.energia -= 2
espacio = receptor.getCapacidadCarga()-receptor.inventario
regalo = min(espacio,self.inventario)
self.inventario -= regalo
receptor.inventario += regalo
self.felicidad += regalo*self.calcularDistancias(receptor)
print (self.nodo.indice)
print ("DarComida")
def llorar(self):
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
self.energia -= 1
print (self.nodo.indice)
print ("llorar")
# Vivir
def isAlive(self):
"""Aqui se ve si la edad le pasa factura"""
if self.energia > 0:
muerte = (self.edad - self.cromosoma["esperanzaVida"])*INCREMENTOVIDA
if muerte >= random.random():
#self.nodo.muerteFamiliar()
self.energia = -1
return False
return True
else:
#self.nodo.muerteFamiliar()
return False
def crecer(self):
self.edad+=1
self.felicidad -= 5
if self.edad == self.cromosoma["madurezSexual"]:
self.habilidades["iniciativa"] += self.cromosoma["percepcion"]
self.felicidad = 50
def vivir(self):
self.crecer()
return self.parir()
def serComido(self, dolor):
"""En esta versión los conejos que son cazados son completamente devorados
El dolor es un bonus por aprovechar bien la carne"""
bonus = 5 if self.cinta != (0 or 1) else 0
comida = self.cromosoma["constitucion"] + min(self.edad, self.cromosoma["esperanzaVida"]/3)+dolor+self.energia
self.energia =-1
return comida
def migrar(self, viaje, viajes):
"""Usaremos una lista de viajes, los nomadas son vulnerables completamente
Puede pasar que uno migre y sus hijos nazcan en el nodo anterior :("""
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
self.energia -= 2
viajes.append(viaje)
print (self.nodo.indice)
print ("migrar")
# Acciones
def getDecision(self, opciones, orgia, llorones, victimas):
"""Debe dar una tupla con el individuo y la acción que se meterá en una lista"""
decisiones = [] # Son tuplas (opcion,fitness)
for opcion in opciones:
if opcion[0] == None:
pass
elif opcion[0] == "Reproducirse": #Reproducirse y DdV van por lo mismo
decisiones.append(self.wannaFuck(opcion))
elif opcion[0] == "DanzaDelVientre":
decisiones.append(self.wannaFuck(opcion))
elif opcion[0] == "Comer":
decisiones.append(self.wannaEat(opcion))
elif opcion[0] == "Silflar":
decisiones.append(self.wannaSilflar(opcion))
elif opcion[0] == "DarComida":
decisiones.append(self.wannaDarComida(opcion))
elif opcion[0] == "Defender":
decisiones.append(self.wannaProtec(opcion))
elif opcion[0] == "Esconder":
decisiones.append(self.wannaHide(opcion))
elif opcion[0] == "AlarmaHambre":
decisiones.append(self.wannaCry(opcion))
elif opcion[0] == "Descansar":
decisiones.append(self.wannaDescansar(opcion))
elif opcion[0] == "Migrar":
decisiones.append(self.wannaMigrar(opcion))
elif opcion[0] == "Combatir":
decisiones.append(self.wannaCombatir(opcion))
decision = self.compararOpciones(decisiones)
if decision[0] == "DanzaDelVientre":
orgia.append(self)
elif decision[0] == "AlarmaHambre":
llorones.append(self)
elif decision[0] == "Silflar" and isinstance(self, Zorro):
victimas.append(decision[1])
# Hay que añadir las cosas de los lobos, aun me falta añadir como funcionaria todo con herencia
return decision
def actuar(self, decision, viajes):
"""Podriamos haber añadido aqui mejor la comprobación de que estamos vivos"""
if decision[0] == "Reproducirse": #Reproducirse y DdVientre van por lo mismo
self.procrear(decision[1])
elif decision[0] == "DanzaDelVientre":
self.danzaDelVientre()
elif decision[0] == "Comer":
self.comer()
elif decision[0] == "Silflar":
self.silflar(decision[1])
elif decision[0] == "DarComida":
self.darComida(decision[1])
elif decision[0] == "Defender":
self.defender(decision[1])
elif decision[0] == "Esconder":
self.esconder()
elif decision[0] == "AlarmaHambre":
self.llorar()
elif decision[0] == "Migrar":
self.migrar(decision[1], viajes)
elif decision[0] == "Combatir":
self.combatir(decision[1])
def compararOpciones(self,opciones):
opciones.sort(key=lambda opciones: opciones[1], reverse=True)
#print ("Opciones")
#print (opciones)
return opciones[0][0]
def wannaFuck(self, opcion):
""" Habría que ver como evaluar si queremos reproducirnos"""
#if self.energia > getCapacidadEnergia*0.70:
# return (opcion, 5) # Por el culo te la hinco, jaja
#return (opcion, 2)
if self.energia > self.getCapacidadEnergia()*0.60 and self.inventario > self.getCapacidadCarga()*0.60:
return (opcion, 9)
return (opcion, 0)
def wannaEat(self, opcion):
""" Habría que ver como evaluar si queremos reproducirnos"""
if self.energia < self.getCapacidadEnergia()*0.25:
return (opcion, 10)
if self.energia < self.getCapacidadEnergia()*0.5:
return (opcion, 5)
if self.energia < self.getCapacidadEnergia()*0.75:
return (opcion, 2)
return (opcion, 0)
def wannaSilflar(self, opcion):
""" Habría que ver como evaluar si queremos reproducirnos"""
if self.inventario < self.getCapacidadCarga()*0.25:
return (opcion, 8)
if self.inventario < self.getCapacidadCarga()*0.5:
return (opcion, 3)
if self.inventario < self.getCapacidadCarga()*0.75:
return (opcion, 1)
# Falta añadir como motivan los llorones a buscar comida, por ahora somos felices recogiendo comida
return (opcion, 0)
def wannaDarComida(self, opcion): # llega en opcion[1] una lista de llorones
opciones = []
for lloron in opcion[1]:
opciones.append(self.evaluarCuidarLloron(lloron))
opciones.sort(key=lambda opciones: opciones[1], reverse=True)
decision = opciones[0]
return decision
def wannaCry(self, opcion):
""" Habría que ver como evaluar si queremos reproducirnos"""
return (opcion, 5) # Por el culo te la hinco, jaja
def wannaDescansar(self,opcion):
return (opcion, 0)
def wannaMigrar(self,opcion):
if self.felicidad < 20:
return (opcion, 3)
return (opcion, 1)
def evaluarCuidarLloron(self, lloron):
recompensa = self.energia * self.calcularDistancias(lloron)*2
return (("DarComida", lloron), recompensa)
class Conejo(Individuo):
"""docstring for Conejo"""
def __init__(self, nodo, especie, edad, sexo, cromosoma):
super(Conejo, self).__init__(nodo, especie, edad, sexo, cromosoma)
self.escondido = False
def evaluarCuidarVictima(self, victima):
distancia = self.calcularDistancias(victima)
if distancia == 1:
return (("Defender", victima), 0)
recompensa = self.energia * self.calcularDistancias(victima)*3 #No vamos a tener en cuenta la edad porque aumenta mucho la complejidad
return (("Defender", victima), recompensa)
def wannaProtec(self, opcion):
opciones = []
for victima in opcion[1]:
opciones.append(self.evaluarCuidarVictima(victima))
opciones.sort(key=lambda opciones: opciones[1], reverse=True)
decision = opciones[0]
return decision
def wannaHide(self,opcion):
return (opcion, 11)
def silflar(self, victima):
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
self.energia -= 2
self.felicidad += 2
comida = victima.serComido(self.cromosoma["destreza"]+ self.cromosoma["inteligencia"]*2+self.getBonus(self.habilidades["Silflar"]))
self.habilidades["Silflar"] += comida
self.inventario = min(self.inventario+comida, self.getCapacidadCarga())
print (self.nodo.indice)
print ("silflar")
def parir(self): # Habrá que cambiarlo para camadas
"""
Se usa el padre que esta en cinta y se generan tantas crias como en la fecundidad
Se devuelven listas ya que a este metodo se le llama desde nuevo lo que sea"""
crias = []
self.escondido = False # ESTO NO DEBERIA ESTAR AQUI
if self.cinta:
self.felicidad += 10
if self.sexo:
self.cinta = None
elif self.cinta[1] == self.edad:
for i in range(0, math.ceil((self.cromosoma["fecundidad"]+self.cinta[0].cromosoma["fecundidad"])/2)):
nodo = ae.Nodo(self.nodo, self.cinta[0].nodo, self.especie)
print ("PARIENDO")
print(nodo.indice)
genes = self.cruce(self.cromosoma, self.cinta[0].cromosoma)
crias.append(Conejo(nodo, self.especie, 0, random.randrange(0,2), genes))
self.cinta = None
return crias
def esconder(self):
self.escondido = True
def defender(self, receptor):
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
self.esconder()
receptor.esconder()
self.energia -= 5
self.felicidad += 5*self.calcularDistancias(receptor)
print (self.nodo.indice)
print ("defender")
class Zorro(Individuo):
"""docstring for Zorro"""
def __init__(self, nodo, especie, edad, sexo, cromosoma):
super(Zorro, self).__init__(nodo, especie, edad, sexo, cromosoma)
def silflar(self, victima):
"""Para los zorros es cazar"""
if not self.isAlive():
print (self.nodo.indice)
print ("Ha muerto")
return None
self.energia -= 2
if victima.escondido:
print ("Sa escapado")
return
self.felicidad += 2
comida = victima.serComido(self.cromosoma["destreza"]+self.getBonus(self.habilidades["Silflar"]))
self.habilidades["Silflar"] += comida
self.inventario = min(self.inventario+comida, self.getCapacidadCarga())
print (self.nodo.indice)
print ("silflar")
def parir(self): # Habrá que cambiarlo para camadas
"""
Se usa el padre que esta en cinta y se generan tantas crias como en la fecundidad
Se devuelven listas ya que a este metodo se le llama desde nuevo lo que sea"""
crias = []
if self.cinta:
self.felicidad += 10
if self.sexo:
self.cinta = None
elif self.cinta[1] == self.edad:
for i in range(0, math.ceil((self.cromosoma["fecundidad"]+self.cinta[0].cromosoma["fecundidad"])/2)):
nodo = ae.Nodo(self.nodo, self.cinta[0].nodo, self.especie)
print ("PARIENDO")
print(nodo.indice)
genes = self.cruce(self.cromosoma, self.cinta[0].cromosoma)
crias.append(Zorro(nodo, self.especie, 0, random.randrange(0,2), genes))
self.cinta = None
return crias
class DawkinsEEE(Individuo):
"""docstring for DawkinsEEE"""
def __init__(self, nodo, especie, edad, sexo, cromosoma):
super(DawkinsEEE, self).__init__(nodo, especie, edad, sexo, cromosoma)
#0 es paloma
#1 es halcon
def vivir(self):
self.felicidad = 0
return self.parir()
def parir(self):
nodo = ae.Nodo(None, None, self.especie)
genes = self.cromosoma
return [DawkinsEEE(nodo, self.especie, self.cromosoma["madurezSexual"], random.randrange(0,2), genes)]
def combatir(self,victimas):
#print("combate")
#print(str(self.cromosoma["estrategia"]) + " VS " + str(victima.cromosoma["estrategia"]))
#print(str(self.felicidad) + " " + str(victima.felicidad))
for victima in victimas:
if self.cromosoma["estrategia"] == 0:
if victima.cromosoma["estrategia"] == 0: # paloma VS paloma
if random.randrange(0,2):
self.felicidad += 40
victima.felicidad -= 10
else:
victima.felicidad += 40
self.felicidad -= 10
else: # paloma VS halcón
victima.felicidad += 50
else:
if victima.cromosoma["estrategia"] == 0: # halcón VS paloma
self.felicidad += 50
else: # halcón VS halcón
if random.randrange(0,2):
self.felicidad += 50
victima.felicidad -= 100
else:
victima.felicidad += 50
self.felicidad -= 100
print(str(self.felicidad) + " " + str(victima.felicidad))
def wannaCombatir(self, opcion):
return (opcion, 100)
|
#!/bin/sh
echo "backup script is not running(loading data to container...)"
cd /srv
tar -xvf cloudant.tar
rm cloudant.tar
exit
|
#!/usr/bin/env bash
# vim:tw=0:ts=2:sw=2:et:norl:ft=sh
# Author: Landon Bouma (landonb @ retrosoft . com)
# Project: https://github.com/landonb/home-fries#🍟
# License: MIT
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
function is_mount_type_crypt () {
local curious_path="$1"
local is_crypt
lsblk --output TYPE,MOUNTPOINT |
grep crypt |
grep "^crypt \\+${curious_path}\$" \
> /dev/null \
&& is_crypt=0 || is_crypt=1
return ${is_crypt}
}
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
main () {
:
}
main "$@"
unset -f main
|
<reponame>YaasinAJDI/MaterialJournal
package com.ajdi.yassin.materialjournal.utils;
import android.content.Context;
import androidx.annotation.NonNull;
import com.ajdi.yassin.materialjournal.data.source.NotesRepository;
import com.ajdi.yassin.materialjournal.data.source.local.AppDatabase;
import com.ajdi.yassin.materialjournal.data.source.local.NotesLocalDataSource;
/**
* Enables injection of production implementations for
* {@link NotesRepository} at compile time.
*
* @author <NAME>
*/
public class Injection {
public static NotesRepository provideNotesRepository(@NonNull Context context) {
AppDatabase database = AppDatabase.getInstance(context);
return NotesRepository.getInstance(NotesLocalDataSource.getInstance(database.notesDao()));
}
}
|
package jsonresult
import (
"encoding/hex"
"github.com/incognitochain/incognito-chain/common"
"github.com/incognitochain/incognito-chain/common/base58"
)
type GetPublicKeyFromPaymentAddress struct {
PublicKeyInBase58Check string
PublicKeyInBytes []int
PublicKeyInHex string
}
func (obj *GetPublicKeyFromPaymentAddress) Init(publicKeyInBytes []byte) {
obj.PublicKeyInBase58Check = base58.Base58Check{}.Encode(publicKeyInBytes, common.ZeroByte)
obj.PublicKeyInHex = hex.EncodeToString(publicKeyInBytes)
obj.PublicKeyInBytes = make([]int, 0)
for _, v := range publicKeyInBytes {
obj.PublicKeyInBytes = append(obj.PublicKeyInBytes, int(v))
}
}
|
<gh_stars>1-10
package git
import (
"github.com/lightsing/makehttps/config"
"github.com/sirupsen/logrus"
"os/exec"
)
func Update(config config.GitConfig) error {
// git pull --rebase --stat origin master
logrus.Infof("Updating %s:%s", gitNameRegex.FindString(config.Upstream), config.Branch)
args := []string{"pull", "--rebase", "--stat", "origin", config.Branch}
cmd := exec.Command("git", args...)
cmd.Dir = config.Path
go pipeStdout(cmd.StderrPipe())
go pipeStdout(cmd.StdoutPipe())
return cmd.Run()
}
|
#!/bin/bash
set -euo pipefail
MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/r2dbc-h2-maven-repository" ./mvnw -P${PROFILE} clean dependency:list test -Dsort -B
|
<filename>src/main/java/pulse/ui/frames/dialogs/FormattedInputDialog.java
package pulse.ui.frames.dialogs;
import static java.awt.BorderLayout.SOUTH;
import static java.awt.Toolkit.getDefaultToolkit;
import static javax.swing.BorderFactory.createEmptyBorder;
import static javax.swing.JOptionPane.ERROR_MESSAGE;
import static javax.swing.JOptionPane.YES_NO_OPTION;
import static javax.swing.JOptionPane.showOptionDialog;
import static javax.swing.SwingUtilities.getWindowAncestor;
import static pulse.ui.Messages.getString;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.text.ParseException;
import javax.swing.AbstractAction;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFormattedTextField;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSeparator;
import javax.swing.KeyStroke;
import javax.swing.SwingConstants;
import javax.swing.text.DefaultFormatterFactory;
import pulse.math.Segment;
import pulse.properties.NumericProperty;
import pulse.properties.NumericPropertyFormatter;
import pulse.ui.components.controllers.ConfirmAction;
@SuppressWarnings("serial")
public class FormattedInputDialog extends JDialog {
private final static int WIDTH = 550;
private final static int HEIGHT = 130;
private JFormattedTextField ftf;
private ConfirmAction confirmAction;
public FormattedInputDialog(NumericProperty p) {
this.setDefaultCloseOperation(HIDE_ON_CLOSE);
this.setMinimumSize(new Dimension(WIDTH, HEIGHT));
setLocationRelativeTo(null);
setTitle("Choose " + p.getAbbreviation(false));
var northPanel = new JPanel();
northPanel.setBorder(createEmptyBorder(5, 5, 5, 5));
northPanel.setLayout(new GridLayout());
northPanel.add(new JLabel(p.getDescriptor(true)));
northPanel.add(new JSeparator());
northPanel.add(ftf = initFormattedTextField(p));
add(northPanel, BorderLayout.CENTER);
//
var btnPanel = new JPanel();
var okBtn = new JButton("OK");
var cancelBtn = new JButton("Cancel");
btnPanel.add(okBtn);
btnPanel.add(cancelBtn);
add(btnPanel, SOUTH);
//
cancelBtn.addActionListener(event -> {
close();
});
okBtn.addActionListener(event -> {
confirmAction.onConfirm();
close();
});
}
private void close() {
this.setVisible(false);
}
private JFormattedTextField initFormattedTextField(NumericProperty p) {
var numFormatter = new NumericPropertyFormatter(p, true, false);
var inputTextField = new JFormattedTextField(numFormatter);
inputTextField.setValue(p);
inputTextField.setHorizontalAlignment(SwingConstants.CENTER);
// React when the user presses Enter while the editor is
// active. (Tab is handled as specified by
// JFormattedTextField's focusLostBehavior property.)
inputTextField.getInputMap().put(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), "check"); //$NON-NLS-1$
inputTextField.getActionMap().put("check", new AbstractAction() { //$NON-NLS-1$
@Override
public void actionPerformed(ActionEvent e) {
if (!inputTextField.isEditValid()) { // The text is invalid.
if (userSaysRevert(inputTextField, numFormatter, p)) { // reverted
inputTextField.postActionEvent(); // inform the editor
}
} else
try { // The text is valid,
inputTextField.commitEdit(); // so use it.
inputTextField.postActionEvent(); // stop editing
} catch (ParseException exc) {
}
}
});
inputTextField.setColumns(10);
return inputTextField;
}
public void setConfirmAction(ConfirmAction confirmAction) {
this.confirmAction = confirmAction;
}
public ConfirmAction getConfirmAction() {
return confirmAction;
}
public Number value() {
return (Number) ((NumericProperty) ftf.getValue()).getValue();
}
private static boolean userSaysRevert(JFormattedTextField inputTextField, NumericPropertyFormatter numFormatter,
NumericProperty p) {
getDefaultToolkit().beep();
inputTextField.selectAll();
Object[] options = {getString("SimpleInputFrame.Edit"), //$NON-NLS-1$
getString("SimpleInputFrame.Revert")}; //$NON-NLS-1$
var answer = showOptionDialog(getWindowAncestor(inputTextField),
"The value must be a " + p.getValue().getClass().getSimpleName() + " between " //$NON-NLS-1$
+ numFormatter.getBounds().getMinimum() + " and " //$NON-NLS-1$
+ numFormatter.getBounds().getMaximum() + ".\n" //$NON-NLS-1$
+ getString("SimpleInputFrame.MessageLine1") //$NON-NLS-1$
+ getString("SimpleInputFrame.MessageLine2"), //$NON-NLS-1$
"Invalid Text Entered", //$NON-NLS-1$
YES_NO_OPTION, ERROR_MESSAGE, null, options, options[1]);
if (answer == 1) { // Revert!
inputTextField.setValue(inputTextField.getValue());
return true;
}
return false;
}
}
|
<reponame>kelvinatsplunk/splunk-operator
// Copyright (c) 2018-2020 Splunk Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package reconcile
import (
"context"
"reflect"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
logf "sigs.k8s.io/controller-runtime/pkg/log"
//stdlog "log"
//"github.com/go-logr/stdr"
"github.com/splunk/splunk-operator/pkg/splunk/resources"
)
// kubernetes logger used by splunk.reconcile package
var log = logf.Log.WithName("splunk.reconcile")
// simple stdout logger, used for debugging
//var log = stdr.New(stdlog.New(os.Stderr, "", stdlog.LstdFlags|stdlog.Lshortfile)).WithName("splunk.reconcile")
// The ResourceObject type implements methods of runtime.Object and GetObjectMeta()
type ResourceObject interface {
runtime.Object
GetObjectMeta() metav1.Object
}
// The ControllerClient interfaces implements methods of the Kubernetes controller-runtime client
type ControllerClient interface {
client.Client
}
// CreateResource creates a new Kubernetes resource using the REST API.
func CreateResource(client ControllerClient, obj ResourceObject) error {
scopedLog := log.WithName("CreateResource").WithValues(
"name", obj.GetObjectMeta().GetName(),
"namespace", obj.GetObjectMeta().GetNamespace())
err := client.Create(context.TODO(), obj)
if err != nil && !errors.IsAlreadyExists(err) {
scopedLog.Error(err, "Failed to create resource")
return err
}
scopedLog.Info("Created resource")
return nil
}
// UpdateResource updates an existing Kubernetes resource using the REST API.
func UpdateResource(client ControllerClient, obj ResourceObject) error {
scopedLog := log.WithName("UpdateResource").WithValues(
"name", obj.GetObjectMeta().GetName(),
"namespace", obj.GetObjectMeta().GetNamespace())
err := client.Update(context.TODO(), obj)
if err != nil && !errors.IsAlreadyExists(err) {
scopedLog.Error(err, "Failed to update resource")
return err
}
scopedLog.Info("Updated resource")
return nil
}
// MergePodUpdates looks for material differences between a Pod's current
// config and a revised config. It merges material changes from revised to
// current. This enables us to minimize updates. It returns true if there
// are material differences between them, or false otherwise.
func MergePodUpdates(current *corev1.PodTemplateSpec, revised *corev1.PodTemplateSpec, name string) bool {
result := MergePodSpecUpdates(¤t.Spec, &revised.Spec, name)
if MergePodMetaUpdates(¤t.ObjectMeta, &revised.ObjectMeta, name) {
result = true
}
return result
}
// MergePodMetaUpdates looks for material differences between a Pod's current
// meta data and a revised meta data. It merges material changes from revised to
// current. This enables us to minimize updates. It returns true if there
// are material differences between them, or false otherwise.
func MergePodMetaUpdates(current *metav1.ObjectMeta, revised *metav1.ObjectMeta, name string) bool {
scopedLog := log.WithName("MergePodMetaUpdates").WithValues("name", name)
result := false
// check Annotations
if !reflect.DeepEqual(current.Annotations, revised.Annotations) {
scopedLog.Info("Container Annotations differ", "current", current.Annotations, "revised", revised.Annotations)
current.Annotations = revised.Annotations
result = true
}
// check Labels
if !reflect.DeepEqual(current.Labels, revised.Labels) {
scopedLog.Info("Container Labels differ", "current", current.Labels, "revised", revised.Labels)
current.Labels = revised.Labels
result = true
}
return result
}
// MergePodSpecUpdates looks for material differences between a Pod's current
// desired spec and a revised spec. It merges material changes from revised to
// current. This enables us to minimize updates. It returns true if there
// are material differences between them, or false otherwise.
func MergePodSpecUpdates(current *corev1.PodSpec, revised *corev1.PodSpec, name string) bool {
scopedLog := log.WithName("MergePodUpdates").WithValues("name", name)
result := false
// check for changes in Affinity
if resources.CompareByMarshall(current.Affinity, revised.Affinity) {
scopedLog.Info("Pod Affinity differs",
"current", current.Affinity,
"revised", revised.Affinity)
current.Affinity = revised.Affinity
result = true
}
// check for changes in SchedulerName
if current.SchedulerName != revised.SchedulerName {
scopedLog.Info("Pod SchedulerName differs",
"current", current.SchedulerName,
"revised", revised.SchedulerName)
current.SchedulerName = revised.SchedulerName
result = true
}
// check for changes in container images; assume that the ordering is same for pods with > 1 container
if len(current.Containers) != len(revised.Containers) {
scopedLog.Info("Pod Container counts differ",
"current", len(current.Containers),
"revised", len(revised.Containers))
current.Containers = revised.Containers
result = true
} else {
for idx := range current.Containers {
// check Image
if current.Containers[idx].Image != revised.Containers[idx].Image {
scopedLog.Info("Pod Container Images differ",
"current", current.Containers[idx].Image,
"revised", revised.Containers[idx].Image)
current.Containers[idx].Image = revised.Containers[idx].Image
result = true
}
// check Ports
if resources.CompareContainerPorts(current.Containers[idx].Ports, revised.Containers[idx].Ports) {
scopedLog.Info("Pod Container Ports differ",
"current", current.Containers[idx].Ports,
"revised", revised.Containers[idx].Ports)
current.Containers[idx].Ports = revised.Containers[idx].Ports
result = true
}
// check VolumeMounts
if resources.CompareVolumeMounts(current.Containers[idx].VolumeMounts, revised.Containers[idx].VolumeMounts) {
scopedLog.Info("Pod Container VolumeMounts differ",
"current", current.Containers[idx].VolumeMounts,
"revised", revised.Containers[idx].VolumeMounts)
current.Containers[idx].VolumeMounts = revised.Containers[idx].VolumeMounts
result = true
}
// check Resources
if resources.CompareByMarshall(¤t.Containers[idx].Resources, &revised.Containers[idx].Resources) {
scopedLog.Info("Pod Container Resources differ",
"current", current.Containers[idx].Resources,
"revised", revised.Containers[idx].Resources)
current.Containers[idx].Resources = revised.Containers[idx].Resources
result = true
}
}
}
return result
}
|
<reponame>bernardobranco/ucl-search-engine
import time
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.db.models import Count
from engine.models import WebPage
import hashlib
class Command(BaseCommand):
def handle(self, *args, **options):
# total_links = len(WebPage.objects.only('id','url').all())
# num_uncrawled = len(WebPage.objects.only('id','url').filter(status=0))
# num_crawled = total_links - num_uncrawled
# print('Main stats:')
# print('[%d %d %d]' % (total_links, num_uncrawled, num_crawled))
ctyp = '{'
nums = list()
num_html = len(WebPage.objects.only('id').filter(content_type__contains='text/html'))
print('NUM HTML', num_html)
results = WebPage.objects.values('content_type').annotate(num=Count('content_type')).order_by('-num')
for res in results:
ctyp += ", '%s'" % res['content_type']
nums.append(res['num'])
ctyp += '}'
cstat = '{'
nums = list()
#num_html = len(WebPage.objects.only('id').filter(status='text/html'))
#print('NUM HTML', num_html)
num_2 = len(WebPage.objects.only('id').filter(status__startswith=2))
num_4 = len(WebPage.objects.only('id').filter(status__startswith=4))
num_5 = len(WebPage.objects.only('id').filter(status__startswith=5))
print([num_2, num_4, num_5])
results = WebPage.objects.values('status').annotate(num=Count('status')).order_by('-num')
for res in results:
cstat += ", '%s'" % res['status']
nums.append(res['num'])
cstat += '}'
print(cstat)
print(nums)
#WebPage.objects.values('status').annotate(num=Count('status'))
#19:06 -> 41889
|
##############
# Install deps
##############
apt-get update
apt-get -y install bind9
##
## Makefile for dns proxy zones
##
cat <<"__EOF__" > /etc/bind/named.conf.default-zones
// prime the server with knowledge of the root servers
zone "." {
type hint;
file "/etc/bind/db.root";
};
// be authoritative for the localhost forward and reverse zones, and for
// broadcast zones as per RFC 1912
zone "localhost" {
type master;
file "/etc/bind/db.local";
};
zone "${domain}" {
type forward;
forward first;
forwarders { ${ip}; };
};
zone "${region}.amazonaws.com" {
type forward;
forward first;
forwarders { 10.0.0.2; };
};
__EOF__
chmod 644 /etc/bind/named.conf.default-zones
##
## Makefile for dns proxy options
##
cat <<"__EOF__" > /etc/bind/named.conf.options
options {
directory "/var/cache/bind";
dnssec-validation auto;
auth-nxdomain no; # conform to RFC1035
listen-on-v6 { any; };
recursion yes; # enables resursive queries
allow-recursion { trusted; }; # allows recursive queries from "trusted" clients
listen-on { any; }; # ns1 private IP address - listen on private network only
allow-transfer { none; }; # disable zone transfers by default
forwarders {
8.8.8.8;
8.8.4.4;
10.0.0.2;
${ip};
};
};
acl "trusted" {
any;
};
__EOF__
chmod 644 /etc/bind/named.conf.options
##
## Makefile for bind defaults
##
cat <<"__EOF__" > /etc/default/bind9
# run resolvconf?
RESOLVCONF=no
# startup options for the server
OPTIONS="-u bind -4"
__EOF__
chmod 644 /etc/default/bind9
service bind9 restart |
import pyquery
class State:
pass # Define the State class if not already defined
class HTMLParser:
def __init__(self, html_content):
self.html_content = html_content
self.state = State()
@property
def query(self):
if not hasattr(self.state, 'pyquery'):
self.state.pyquery = pyquery.PyQuery(self.html_content)
return self.state.pyquery |
import UIKit
class ViewController: UIViewController {
let button = UIButton()
override func viewDidLoad() {
super.viewDidLoad()
setupViews()
}
func setupViews() {
view.backgroundColor = .white
view.addSubview(button)
button.translatesAutoresizingMaskIntoConstraints = false
button.setTitle("Press Me", for: .normal)
button.setTitleColor(.blue, for: .normal)
NSLayoutConstraint.activate([
button.centerYAnchor.constraint(equalTo: view.centerYAnchor),
button.centerXAnchor.constraint(equalTo: view.centerXAnchor)
])
button.addTarget(self, action: #selector(didTapButton), for: .touchUpInside)
}
@objc func didTapButton() {
print("Button pressed")
let alert = UIAlertController(title: "Success!", message: "You pressed the button!", preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Ok", style: .default, handler: nil))
present(alert, animated: true, completion: nil)
}
} |
<filename>FreeRTOS/Demo/WIN32-MingW/code_coverage_additions.c<gh_stars>1-10
/*
* FreeRTOS Kernel V10.2.1
* Copyright (C) 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* http://www.FreeRTOS.org
* http://aws.amazon.com/freertos
*
* 1 tab == 4 spaces!
*/
/*
* Contains sundry tests to exercise code that is not touched by the standard
* demo tasks (which are predominantly test tasks). Some tests are incldued
* here because they can only be executed when configASSERT() is not defined.
*/
#include "FreeRTOS.h"
#include "task.h"
#include "timers.h"
#include "event_groups.h"
#include "semphr.h"
#include "stream_buffer.h"
#include "message_buffer.h"
/*-----------------------------------------------------------*/
/*
* Try creating static objects with one of the mandatory parameters set to NULL.
* This can't be done in the standard demos as asserts() will get hit.
*/
static BaseType_t prvStaticAllocationsWithNullBuffers( void );
/*
* Code coverage analysis is performed with tracing turned off, so this
* function executes the trace specific utility functions that would not
* otherwise be executed..
*/
static BaseType_t prvTraceUtils( void );
/*
* The queue peek standard demo does not cover the case where an attempt to peek
* times out, so test that case.
*/
static BaseType_t prvPeekTimeout( void );
/*
* Calls various interrupt safe functions designed to query the state of a
* queue.
*/
static BaseType_t prvQueueQueryFromISR( void );
/*
* Hits a few paths in tasks state and status query functions not otherwise hit
* by standard demo and test files.
*/
static BaseType_t prvTaskQueryFunctions( void );
/*
* None of the standard demo tasks use the task tags - exercise them here.
*/
static BaseType_t prvTaskTags( void );
/*
* Exercises a few of the query functions that are not otherwise exercised in
* the standard demo and test functions.
*/
static BaseType_t prvTimerQuery( void );
/*-----------------------------------------------------------*/
static BaseType_t prvStaticAllocationsWithNullBuffers( void )
{
uint32_t ulReturned = 0;
BaseType_t xReturn = pdPASS;
UBaseType_t uxDummy = 10;
/* Don't expect to create any of the objects as a NULL parameter is always
passed in place of a required buffer. Hence if all passes then none of the
|= will be against 0, and ulReturned will still be zero at the end of this
function. */
ulReturned |= ( uint32_t ) xEventGroupCreateStatic( NULL );
/* Try creating a task twice, once with puxStackBuffer NULL, and once with
pxTaskBuffer NULL. */
ulReturned |= ( uint32_t ) xTaskCreateStatic( NULL, /* Task to run, not needed as the task is not created. */
"Dummy", /* Task name. */
configMINIMAL_STACK_SIZE,
NULL,
tskIDLE_PRIORITY,
NULL,
( StaticTask_t * ) &xReturn ); /* Dummy value just to pass a non NULL value in - won't get used. */
ulReturned |= ( uint32_t ) xTaskCreateStatic( NULL, /* Task to run, not needed as the task is not created. */
"Dummy", /* Task name. */
configMINIMAL_STACK_SIZE,
NULL,
tskIDLE_PRIORITY,
( StackType_t * ) &xReturn, /* Dummy value just to pass a non NULL value in - won't get used. */
NULL );
ulReturned |= ( uint32_t ) xQueueCreateStatic( uxDummy,
uxDummy,
( uint8_t * ) &xReturn, /* Dummy value just to pass a non NULL value in - won't get used. */
NULL );
/* Try creating a stream buffer twice, once with pucStreamBufferStorageArea
set to NULL, and once with pxStaticStreamBuffer set to NULL. */
ulReturned |= ( uint32_t ) xStreamBufferCreateStatic( uxDummy,
uxDummy,
NULL,
( StaticStreamBuffer_t * ) &xReturn ); /* Dummy value just to pass a non NULL value in - won't get used. */
ulReturned |= ( uint32_t ) xStreamBufferCreateStatic( uxDummy,
uxDummy,
( uint8_t * ) &xReturn, /* Dummy value just to pass a non NULL value in - won't get used. */
NULL );
/* Try to create a task with a stack that is too large to be allocated. */
if( xTaskCreate( NULL, "TooLarge", configTOTAL_HEAP_SIZE, NULL, tskIDLE_PRIORITY, NULL ) != errCOULD_NOT_ALLOCATE_REQUIRED_MEMORY )
{
xReturn = pdFAIL;
}
if( ulReturned != 0 )
{
/* Something returned a non-NULL value. */
xReturn = pdFAIL;
}
return xReturn;
}
/*-----------------------------------------------------------*/
static BaseType_t prvTraceUtils( void )
{
EventGroupHandle_t xEventGroup;
QueueHandle_t xQueue;
BaseType_t xReturn = pdPASS;
const UBaseType_t xNumber = ( UBaseType_t ) 100, xQueueLength = ( UBaseType_t ) 1;
UBaseType_t uxValue;
TaskHandle_t xTaskHandle;
StreamBufferHandle_t xStreamBuffer;
MessageBufferHandle_t xMessageBuffer;
/* Exercise the event group trace utilities. */
xEventGroup = xEventGroupCreate();
if( xEventGroup != NULL )
{
vEventGroupSetNumber( xEventGroup, xNumber );
if( uxEventGroupGetNumber( NULL ) != 0 )
{
xReturn = pdFAIL;
}
if( uxEventGroupGetNumber( xEventGroup ) != xNumber )
{
xReturn = pdFAIL;
}
vEventGroupDelete( xEventGroup );
}
else
{
xReturn = pdFAIL;
}
/* Exercise the queue trace utilities. */
xQueue = xQueueCreate( xQueueLength, ( UBaseType_t ) sizeof( uxValue ) );
if( xQueue != NULL )
{
vQueueSetQueueNumber( xQueue, xNumber );
if( uxQueueGetQueueNumber( xQueue ) != xNumber )
{
xReturn = pdFAIL;
}
if( ucQueueGetQueueType( xQueue ) != queueQUEUE_TYPE_BASE )
{
xReturn = pdFAIL;
}
vQueueDelete( xQueue );
}
else
{
xReturn = pdFAIL;
}
/* Exercise the task trace utilities. Value of 100 is arbitrary, just want
to check the value that is set is also read back. */
uxValue = 100;
xTaskHandle = xTaskGetCurrentTaskHandle();
vTaskSetTaskNumber( xTaskHandle, uxValue );
if( uxTaskGetTaskNumber( xTaskHandle ) != uxValue )
{
xReturn = pdFAIL;
}
if( uxTaskGetTaskNumber( NULL ) != 0 )
{
xReturn = pdFAIL;
}
/* Timer trace util functions are exercised in prvTimerQuery(). */
/* Exercise the stream buffer utilities. Try creating with a trigger level
of 0, it should then get capped to 1. */
xStreamBuffer = xStreamBufferCreate( sizeof( uint32_t ), 0 );
if( xStreamBuffer != NULL )
{
vStreamBufferSetStreamBufferNumber( xStreamBuffer, uxValue );
if( uxStreamBufferGetStreamBufferNumber( xStreamBuffer ) != uxValue )
{
xReturn = pdFALSE;
}
if( ucStreamBufferGetStreamBufferType( xStreamBuffer ) != 0 )
{
/* "Is Message Buffer" flag should have been 0. */
xReturn = pdFALSE;
}
vStreamBufferDelete( xStreamBuffer );
}
else
{
xReturn = pdFALSE;
}
xMessageBuffer = xMessageBufferCreate( sizeof( uint32_t ) );
if( xMessageBuffer != NULL )
{
if( ucStreamBufferGetStreamBufferType( xMessageBuffer ) == 0 )
{
/* "Is Message Buffer" flag should have been 1. */
xReturn = pdFALSE;
}
vMessageBufferDelete( xMessageBuffer );
}
else
{
xReturn = pdFALSE;
}
return xReturn;
}
/*-----------------------------------------------------------*/
static BaseType_t prvPeekTimeout( void )
{
QueueHandle_t xHandle;
const UBaseType_t xQueueLength = 1;
BaseType_t xReturn = pdPASS;
TickType_t xBlockTime = ( TickType_t ) 2;
UBaseType_t uxReceived;
/* Create the queue just to try peeking it while it is empty. */
xHandle = xQueueCreate( xQueueLength, ( UBaseType_t ) sizeof( xQueueLength ) );
if( xHandle != NULL )
{
if( uxQueueMessagesWaiting( xHandle ) != 0 )
{
xReturn = pdFAIL;
}
/* Ensure peeking from the queue times out as the queue is empty. */
if( xQueuePeek( xHandle, &uxReceived, xBlockTime ) != pdFALSE )
{
xReturn = pdFAIL;
}
vQueueDelete( xHandle );
}
else
{
xReturn = pdFAIL;
}
return xReturn;
}
/*-----------------------------------------------------------*/
static BaseType_t prvQueueQueryFromISR( void )
{
BaseType_t xReturn = pdPASS, xValue = 1;
const UBaseType_t xISRQueueLength = ( UBaseType_t ) 1;
const char *pcISRQueueName = "ISRQueue";
QueueHandle_t xISRQueue = NULL;
xISRQueue = xQueueCreate( xISRQueueLength, ( UBaseType_t ) sizeof( BaseType_t ) );
if( xISRQueue != NULL )
{
vQueueAddToRegistry( xISRQueue, pcISRQueueName );
if( strcmp( pcQueueGetName( xISRQueue ), pcISRQueueName ) )
{
xReturn = pdFAIL;
}
/* Expect the queue to be empty here. */
if( uxQueueMessagesWaitingFromISR( xISRQueue ) != 0 )
{
xReturn = pdFAIL;
}
if( xQueueIsQueueEmptyFromISR( xISRQueue ) != pdTRUE )
{
xReturn = pdFAIL;
}
if( xQueueIsQueueFullFromISR( xISRQueue ) != pdFALSE )
{
xReturn = pdFAIL;
}
/* Now fill the queue - it only has one space. */
if( xQueueSendFromISR( xISRQueue, &xValue, NULL ) != pdPASS )
{
xReturn = pdFAIL;
}
/* Check it now reports as full. */
if( uxQueueMessagesWaitingFromISR( xISRQueue ) != 1 )
{
xReturn = pdFAIL;
}
if( xQueueIsQueueEmptyFromISR( xISRQueue ) != pdFALSE )
{
xReturn = pdFAIL;
}
if( xQueueIsQueueFullFromISR( xISRQueue ) != pdTRUE )
{
xReturn = pdFAIL;
}
vQueueDelete( xISRQueue );
}
else
{
xReturn = pdFAIL;
}
return xReturn;
}
/*-----------------------------------------------------------*/
static BaseType_t prvTaskQueryFunctions( void )
{
static TaskStatus_t xStatus, *pxStatusArray;
TaskHandle_t xTimerTask, xIdleTask;
BaseType_t xReturn = pdPASS;
UBaseType_t uxNumberOfTasks, uxReturned, ux;
uint32_t ulTotalRunTime1, ulTotalRunTime2;
const uint32_t ulRunTimeTollerance = ( uint32_t ) 0xfff;
/* Obtain task status with the stack high water mark and without the
state. */
vTaskGetInfo( NULL, &xStatus, pdTRUE, eRunning );
if( uxTaskGetStackHighWaterMark( NULL ) != xStatus.usStackHighWaterMark )
{
xReturn = pdFAIL;
}
if( uxTaskGetStackHighWaterMark2( NULL ) != ( configSTACK_DEPTH_TYPE ) xStatus.usStackHighWaterMark )
{
xReturn = pdFAIL;
}
/* Now obtain a task status without the high water mark but with the state,
which in the case of the idle task should be Read. */
xTimerTask = xTimerGetTimerDaemonTaskHandle();
vTaskSuspend( xTimerTask ); /* Should never suspend Timer task normally!. */
vTaskGetInfo( xTimerTask, &xStatus, pdFALSE, eInvalid );
if( xStatus.eCurrentState != eSuspended )
{
xReturn = pdFAIL;
}
if( xStatus.uxBasePriority != uxTaskPriorityGetFromISR( xTimerTask ) )
{
xReturn = pdFAIL;
}
if( xStatus.uxBasePriority != ( configMAX_PRIORITIES - 1 ) )
{
xReturn = pdFAIL;
}
xTaskResumeFromISR( xTimerTask );
vTaskGetInfo( xTimerTask, &xStatus, pdTRUE, eInvalid );
if( ( xStatus.eCurrentState != eReady ) && ( xStatus.eCurrentState != eBlocked ) )
{
xReturn = pdFAIL;
}
if( uxTaskGetStackHighWaterMark( xTimerTask ) != xStatus.usStackHighWaterMark )
{
xReturn = pdFAIL;
}
if( uxTaskGetStackHighWaterMark2( xTimerTask ) != ( configSTACK_DEPTH_TYPE ) xStatus.usStackHighWaterMark )
{
xReturn = pdFAIL;
}
/* Attempting to abort a delay in the idle task should be guaranteed to
fail as the idle task should never block. */
xIdleTask = xTaskGetIdleTaskHandle();
if( xTaskAbortDelay( xIdleTask ) != pdFAIL )
{
xReturn = pdFAIL;
}
/* Create an array of task status objects large enough to hold information
on the number of tasks at this time - note this may change at any time if
higher priority tasks are executing and creating tasks. */
uxNumberOfTasks = uxTaskGetNumberOfTasks();
pxStatusArray = ( TaskStatus_t * ) pvPortMalloc( uxNumberOfTasks * sizeof( TaskStatus_t ) );
if( pxStatusArray != NULL )
{
/* Pass part of the array into uxTaskGetSystemState() to ensure it doesn't
try using more space than there is available. */
uxReturned = uxTaskGetSystemState( pxStatusArray, uxNumberOfTasks / ( UBaseType_t ) 2, NULL );
if( uxReturned != ( UBaseType_t ) 0 )
{
xReturn = pdFAIL;
}
/* Now do the same but passing in the complete array size, this is done
twice to check for a difference in the total run time. */
uxTaskGetSystemState( pxStatusArray, uxNumberOfTasks, &ulTotalRunTime1 );
memset( ( void * ) pxStatusArray, 0xaa, uxNumberOfTasks * sizeof( TaskStatus_t ) );
uxReturned = uxTaskGetSystemState( pxStatusArray, uxNumberOfTasks, &ulTotalRunTime2 );
if( ( ulTotalRunTime2 - ulTotalRunTime1 ) > ulRunTimeTollerance )
{
xReturn = pdFAIL;
}
/* Basic santity check of array contents. */
for( ux = 0; ux < uxReturned; ux++ )
{
if( pxStatusArray[ ux ].eCurrentState >= ( UBaseType_t ) eInvalid )
{
xReturn = pdFAIL;
}
if( pxStatusArray[ ux ].uxCurrentPriority >= ( UBaseType_t ) configMAX_PRIORITIES )
{
xReturn = pdFAIL;
}
}
vPortFree( pxStatusArray );
}
else
{
xReturn = pdFAIL;
}
return xReturn;
}
/*-----------------------------------------------------------*/
static BaseType_t prvDummyTagFunction( void *pvParameter )
{
return ( BaseType_t ) pvParameter;
}
/*-----------------------------------------------------------*/
static BaseType_t prvTaskTags( void )
{
BaseType_t xReturn = pdPASS, xParameter = ( BaseType_t ) 0xDEADBEEF;
TaskHandle_t xTask;
/* First try with the handle of a different task. Use the timer task for
convenience. */
xTask = xTimerGetTimerDaemonTaskHandle();
vTaskSetApplicationTaskTag( xTask, prvDummyTagFunction );
if( xTaskGetApplicationTaskTag( xTask ) != prvDummyTagFunction )
{
xReturn = pdFAIL;
}
else
{
if( xTaskCallApplicationTaskHook( xTask, ( void * ) xParameter ) != xParameter )
{
xReturn = pdFAIL;
}
if( xTaskCallApplicationTaskHook( xTask, ( void * ) NULL ) != pdFAIL )
{
xReturn = pdFAIL;
}
}
/* Try FromISR version too. */
if( xTaskGetApplicationTaskTagFromISR( xTask ) != prvDummyTagFunction )
{
xReturn = pdFAIL;
}
/* Now try with a NULL handle, so using this task. */
vTaskSetApplicationTaskTag( NULL, NULL );
if( xTaskGetApplicationTaskTag( NULL ) != NULL )
{
xReturn = pdFAIL;
}
if( xTaskGetApplicationTaskTagFromISR( NULL ) != NULL )
{
xReturn = pdFAIL;
}
vTaskSetApplicationTaskTag( NULL, prvDummyTagFunction );
if( xTaskGetApplicationTaskTag( NULL ) != prvDummyTagFunction )
{
xReturn = pdFAIL;
}
else
{
if( xTaskCallApplicationTaskHook( NULL, ( void * ) xParameter ) != xParameter )
{
xReturn = pdFAIL;
}
if( xTaskCallApplicationTaskHook( NULL, ( void * ) NULL ) != pdFAIL )
{
xReturn = pdFAIL;
}
}
/* Try FromISR version too. */
if( xTaskGetApplicationTaskTagFromISR( NULL ) != prvDummyTagFunction )
{
xReturn = pdFAIL;
}
vTaskSetApplicationTaskTag( NULL, NULL );
if( xTaskGetApplicationTaskTag( NULL ) != NULL )
{
xReturn = pdFAIL;
}
return xReturn;
}
/*-----------------------------------------------------------*/
static BaseType_t prvTimerQuery( void )
{
TimerHandle_t xTimer;
BaseType_t xReturn = pdPASS;
const char *pcTimerName = "TestTimer";
const TickType_t xTimerPeriod = ( TickType_t ) 100;
const UBaseType_t uxTimerNumber = ( UBaseType_t ) 55;
xTimer = xTimerCreate( pcTimerName,
xTimerPeriod,
pdFALSE,
( void * ) xTimerPeriod,
NULL ); /* Not actually going to start timer so NULL callback is ok. */
if( xTimer != NULL )
{
if( xTimerGetPeriod( xTimer ) != xTimerPeriod )
{
xReturn = pdFAIL;
}
if( strcmp( pcTimerGetName( xTimer ), pcTimerName ) != 0 )
{
xReturn = pdFAIL;
}
vTimerSetTimerNumber( xTimer, uxTimerNumber );
if( uxTimerGetTimerNumber( xTimer ) != uxTimerNumber )
{
xReturn = pdFAIL;
}
xTimerDelete( xTimer, portMAX_DELAY );
}
else
{
xReturn = pdFAIL;
}
return xReturn;
}
/*-----------------------------------------------------------*/
BaseType_t xRunCodeCoverageTestAdditions( void )
{
BaseType_t xReturn = pdPASS;
xReturn &= prvStaticAllocationsWithNullBuffers();
xReturn &= prvTraceUtils();
xReturn &= prvPeekTimeout();
xReturn &= prvQueueQueryFromISR();
xReturn &= prvTaskQueryFunctions();
xReturn &= prvTaskTags();
xReturn &= prvTimerQuery();
return xReturn;
}
/*-----------------------------------------------------------*/
|
#include <stdio.h>
#include <stdlib.h>
#define max 5
int main(){
int vet[max];
int num;
int vetB[max];
FILE *fp = NULL;
for(int i = 0; i < max; i++){
printf("digite um numero: ");
scanf("%d", &vet[i]);
}
if(fp = fopen("entrada.txt", "w")){
for(int i = 0; i < max; i++){
fprintf(fp, "%d \n", vet[i]);
}
printf("dados adicionados no arquivo com sucesso!");
fclose(fp);
}else{
printf("deu erro!");
}
if(fp = fopen("entrada.txt", "r")){
int j = 0;
while(fread(num, sizeof(int), 1, fp)){
vetB[j] = num;
j++;
}
fclose(fp);
}else{
printf("erro ao abrir o arquivo!");
}
printf("\nvetor a: \n");
for(int i = 0; i < max; i++){
printf("%d ", vet[i]);
}
printf("\nvetor b: \n");
for(int i = 0; i < max; i++){
printf("%d ", vetB[i]);
}
return 0;
}
|
<filename>src/main/java/tae/cosmetics/gui/util/packet/server/SPacketRecipeBookModule.java<gh_stars>10-100
package tae.cosmetics.gui.util.packet.server;
import java.awt.Color;
import net.minecraft.network.play.server.SPacketRecipeBook;
import tae.cosmetics.gui.util.packet.AbstractPacketModule;
public class SPacketRecipeBookModule extends AbstractPacketModule {
private SPacketRecipeBook packet;
public SPacketRecipeBookModule(SPacketRecipeBook packet, long timestamp) {
super("Recipe Book information.", timestamp, packet);
this.packet = packet;
}
@Override
public void drawText(int x, int y) {
fontRenderer.drawString("SPacketRecipeBook", x, y, Color.WHITE.getRGB());
if(!minimized) {
fontRenderer.drawString("Open: " + packet.isGuiOpen(), x, y + 14, Color.WHITE.getRGB());
fontRenderer.drawString("State: " + packet.getState(), x, y + 28, Color.WHITE.getRGB());
fontRenderer.drawString("Filtering: " + packet.isFilteringCraftable(), x, y + 42, Color.WHITE.getRGB());
}
}
@Override
public boolean type() {
return false;
}
}
|
#pragma once
#include <iostream>
#include <memory>
#include "unittest.h"
class BinaryNode {
public:
typedef std::shared_ptr<BinaryNode> Ptr;
BinaryNode(int iData);
BinaryNode(int iData, BinaryNode *iLeft, BinaryNode *iRight);
int mData;
Ptr mLeft;
Ptr mRight;
};
typedef BinaryNode::Ptr BinaryNodePtr;
class BinaryTree : public UnitTest
{
public:
BinaryNodePtr buildUnbalancedTree();
BinaryNodePtr buildBalancedTree();
bool isBalanced(BinaryNodePtr node);
bool isBalancedHelper(BinaryNodePtr node, int &depth);
//! Check if the ordering of the nodes is violated during traversal
bool processNode(BinaryNodePtr n, int &prev);
//! Check if the tree is a sorted Binary Search Tree.
bool isSorted(BinaryNodePtr node);
bool isSortedRecursive(BinaryNodePtr node, int &prev);
virtual TestResult test() override;
};
|
<filename>omf-impl/src/main/java/org/om/core/impl/persistence/interceptor/handler/collection/wrapper/ReferenceListWrapper.java
package org.om.core.impl.persistence.interceptor.handler.collection.wrapper;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.om.core.api.mapping.CollectionMapping;
import org.om.core.api.session.Session;
/**
* List handler that handles reference list types.
*
* @author <NAME>
*
*/
public class ReferenceListWrapper<T> implements List<T> {
private final Collection<?> collection;
private final CollectionMapping collectionMapping;
private final Class<?> implementationType;
private final Session session;
public ReferenceListWrapper(Session session, CollectionMapping collectionMapping, Class<?> implementationType, Collection<?> result) {
this.session = session;
this.collectionMapping = collectionMapping;
this.implementationType = implementationType;
this.collection = result;
}
@Override
public void add(int index, T element) {
throw new UnsupportedOperationException();
}
@Override
public boolean add(T e) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(int index, Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public boolean contains(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public T get(int index) {
if (index < 0 || index >= size())
throw new IndexOutOfBoundsException(Integer.toString(index));
int i = 0;
for (Object o : collection) {
if (index == i)
return (T) session.get(implementationType, o);
i++;
}
throw new IllegalStateException();
}
@Override
public int indexOf(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean isEmpty() {
return collection.isEmpty();
}
@Override
public Iterator<T> iterator() {
return new ReferenceHandlingIterator(session, implementationType, collection.iterator());
}
@Override
public int lastIndexOf(Object o) {
throw new UnsupportedOperationException();
}
@Override
public ListIterator<T> listIterator() {
throw new UnsupportedOperationException();
}
@Override
public ListIterator<T> listIterator(int index) {
throw new UnsupportedOperationException();
}
@Override
public T remove(int index) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public T set(int index, T element) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return collection.size();
}
@Override
public List<T> subList(int fromIndex, int toIndex) {
throw new UnsupportedOperationException();
}
@Override
public Object[] toArray() {
throw new UnsupportedOperationException();
}
@Override
public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException();
}
}
|
ovhai job run \
-v sentiment_fr:/app/data/sentiment_sentences \
-v sentiment_fr_output:/app/data/output \
-v sentiment_fr_run:/app/runs \
--image gsalouovh/sentiment:main \
-g 1
|
import os
import re
class DirectoryScanner:
def __init__(self, top_dir):
self.files = []
self.dirs = []
self.empties = []
self.ignores = []
self._scan(top_dir)
self._sort_results()
def _do_ignore(self, item):
for pattn in self.ignores:
if pattn.match(item):
return True
return False
def _scan(self, dr):
items = os.listdir(dr)
for item in items:
if self._do_ignore(item):
continue
item_path = os.path.join(dr, item)
if os.path.isfile(item_path):
if os.path.getsize(item_path) == 0:
self.empties.append(item)
else:
self.files.append(item)
elif os.path.isdir(item_path):
self.dirs.append(item)
self._scan(item_path)
def _sort_results(self):
self.files.sort()
self.dirs.sort()
self.empties.sort()
# Example usage
scanner = DirectoryScanner('/path/to/directory')
print(scanner.files)
print(scanner.dirs)
print(scanner.empties) |
# import mock
# from django import test
# from django.conf import settings
# from model_bakery import baker
#
# from devilry.devilry_examiner.views.dashboard import crinstance_dashboard
#
#
# class TestCradminInstanceDashboard(test.TestCase):
# def test_get_rolequeryset_not_matching_requestuser(self):
# requestuser = baker.make(settings.AUTH_USER_MODEL)
# mockrequest = mock.MagicMock()
# mockrequest.user = mock.MagicMock()
# mockrequest.user.id = requestuser.id + 1
# crinstance = crinstance_dashboard.CrAdminInstance(request=mockrequest)
# self.assertEqual(0, crinstance.get_rolequeryset().count())
#
# def test_get_rolequeryset_matching_requestuser(self):
# requestuser = baker.make(settings.AUTH_USER_MODEL)
# mockrequest = mock.MagicMock()
# mockrequest.user = requestuser
# crinstance = crinstance_dashboard.CrAdminInstance(request=mockrequest)
# self.assertEqual(1, crinstance.get_rolequeryset().count())
|
<?php
class TestClass
{
private function __clone()
{
echo "Cloning is not allowed";
}
}
function testCloneMethod($object)
{
$clonedObject = clone $object;
}
// Test the behavior of the __clone method
$testObject = new TestClass();
testCloneMethod($testObject); |
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
NUM_NODES=4
DISK=./kube/kube.vmdk
GUEST_ID=debian7_64Guest
ENABLE_UUID=TRUE
INSTANCE_PREFIX=kubernetes
MASTER_TAG="${INSTANCE_PREFIX}-master"
NODE_TAG="${INSTANCE_PREFIX}-minion"
MASTER_NAME="${INSTANCE_PREFIX}-master"
MASTER_MEMORY_MB=1024
MASTER_CPU=1
NODE_NAMES=($(eval echo ${INSTANCE_PREFIX}-minion-{1..${NUM_NODES}}))
NODE_IP_RANGES="10.244.0.0/16" # Min Prefix supported is 16
MASTER_IP_RANGE="${MASTER_IP_RANGE:-10.246.0.0/24}"
NODE_MEMORY_MB=2048
NODE_CPU=1
SERVICE_CLUSTER_IP_RANGE="10.244.240.0/20" # formerly PORTAL_NET
# Optional: Enable node logging.
ENABLE_NODE_LOGGING=false
LOGGING_DESTINATION=elasticsearch
# Optional: When set to true, Elasticsearch and Kibana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_LOGGING=false
ELASTICSEARCH_LOGGING_REPLICAS=1
# Optional: Cluster monitoring to setup as part of the cluster bring up:
# none - No cluster monitoring setup
# influxdb - Heapster, InfluxDB, and Grafana
# google - Heapster, Google Cloud Monitoring, and Google Cloud Logging
ENABLE_CLUSTER_MONITORING="${KUBE_ENABLE_CLUSTER_MONITORING:-influxdb}"
# Optional: Install cluster DNS.
ENABLE_CLUSTER_DNS="${KUBE_ENABLE_CLUSTER_DNS:-true}"
DNS_SERVER_IP="10.244.240.240"
DNS_DOMAIN="cluster.local"
DNS_REPLICAS=1
# Optional: Install Kubernetes UI
ENABLE_CLUSTER_UI="${KUBE_ENABLE_CLUSTER_UI:-true}"
# We need to configure subject alternate names (SANs) for the master's certificate
# we generate. While users will connect via the external IP, pods (like the UI)
# will connect via the cluster IP, from the SERVICE_CLUSTER_IP_RANGE.
# In addition to the extra SANS here, we'll also add one for for the service IP.
MASTER_EXTRA_SANS="DNS:kubernetes,DNS:kubernetes.default,DNS:kubernetes.default.svc,DNS:kubernetes.default.svc.${DNS_DOMAIN}"
# Optional: if set to true, kube-up will configure the cluster to run e2e tests.
E2E_STORAGE_TEST_ENVIRONMENT=${KUBE_E2E_STORAGE_TEST_ENVIRONMENT:-false}
|
import keras
from keras.models import Sequential
from keras.layers import Dense
# Create model
model = Sequential()
model.add(Dense(32, input_dim=7, activation='relu'))
model.add(Dense(32, activation='relu'))
model.add(Dense(1, activation='linear'))
# Compile model
model.compile(loss='mse', optimizer='adam', metrics=['mse'])
# Fit model on training data
model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=100, batch_size=32) |
var Foreman = require('foreman');
var Horde = require('horde');
Foreman.tick(function gameTick (state) {
console.log('Ready to Work!');
});
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CocoaAsyncSocket/CocoaAsyncSocket.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ZZMakeWiFiManager/ZZMakeWiFiManager.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/CocoaAsyncSocket/CocoaAsyncSocket.framework"
install_framework "${BUILT_PRODUCTS_DIR}/ZZMakeWiFiManager/ZZMakeWiFiManager.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package com.bitsys.common.http.entity;
import java.util.Map.Entry;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.protocol.HTTP;
import com.bitsys.common.http.header.ContentType;
import com.bitsys.common.http.util.ConversionUtils;
/**
* This class is an entity representing a list of URL-encoded pairs. This is
* typically used in HTTP POST requests.
*/
public class FormEntity extends StringEntity
{
/**
* Constructs a <code>FormEntity</code>.
*
* @param parameters the form parameter key-value pairs.
*/
public FormEntity(final Iterable<? extends Entry<String, String>> parameters)
{
super(URLEncodedUtils.format(ConversionUtils.toNameValuePairs(parameters), HTTP.DEF_CONTENT_CHARSET),
ContentType.APPLICATION_FORM_URLENCODED);
}
}
|
import * as React from 'react';
import { StandardProps } from '..';
import { Orientation } from './Stepper';
import { TransitionProps } from '../transitions/transition';
export interface StepContentProps
extends StandardProps<React.HTMLAttributes<HTMLDivElement>, StepContentClasskey> {
active?: boolean;
alternativeLabel?: boolean;
children: React.ReactNode;
completed?: boolean;
last?: boolean;
optional?: boolean;
orientation?: Orientation;
TransitionComponent?: React.ComponentType<TransitionProps>;
transitionDuration?: TransitionProps['timeout'] | 'auto';
TransitionProps?: TransitionProps;
}
export type StepContentClasskey = 'root' | 'last' | 'transition';
declare const StepContent: React.ComponentType<StepContentProps>;
export default StepContent;
|
#!/bin/bash
# ========== Experiment Seq. Idx. 417 / 20.5.3 / N. 4/6/3 - _S=20.5.3 D1_N=4 a=-1 b=-1 c=1 d=-1 e=-1 f=1 D3_N=6 g=1 h=1 i=-1 D4_N=3 j=3 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 417 / 20.5.3 / N. 4/6/3 - _S=20.5.3 D1_N=4 a=-1 b=-1 c=1 d=-1 e=-1 f=1 D3_N=6 g=1 h=1 i=-1 D4_N=3 j=3 ==========\n\n'
if [[ "Yes" == "No" ]]; then
echo 'FATAL: This treatment did not include an SVM layer.'>&2
echo ' Something very wrong happened!'>&2
exit 161
fi
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
DATASET_DIR="$JBHI_DIR/data/edra-dermoscopic-seg.598.tfr"
MODEL_DIR="$JBHI_DIR/models/deep.4"
SVM_DIR="$JBHI_DIR/svm-models"
SVM_PREFIX="$SVM_DIR/deep.4.layer.6.svm"
SVM_PATH="$SVM_PREFIX.pkl"
FEATURES_DIR="$JBHI_DIR/features"
TEST_FEATURES_PREFIX="$FEATURES_DIR/deep.4.layer.6.test.3.index.1185.test"
TEST_FEATURES_PATH="$TEST_FEATURES_PREFIX.feats.pkl"
RESULTS_DIR="$JBHI_DIR/results"
RESULTS_PREFIX="$RESULTS_DIR/deep.4.layer.6.test.3.index.1185.svm"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODEL_DIR/finish.txt:$SVM_PREFIX.finish.txt"
START_PATH="$RESULTS_PREFIX.start.txt"
FINISH_PATH="$RESULTS_PREFIX.finish.txt"
LOCK_PATH="$RESULTS_PREFIX.running.lock"
LAST_OUTPUT="$RESULTS_PATH"
# ...creates mid-way checkpoint after the expensive test features extraction
SEMIFINISH_PATH="$TEST_FEATURES_PREFIX.finish.txt"
# EXPERIMENT_STATUS=1
# STARTED_BEFORE=No
mkdir -p "$FEATURES_DIR"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
#...gets closest checkpoint file
MODEL_CHECKPOINT=$(ls "$MODEL_DIR/"model.ckpt-*.index | \
sed 's/.*ckpt-\([0-9]*\)\..*/\1/' | \
sort -n | \
awk -v c=1 -v t=30000 \
'NR==1{d=$c-t;d=d<0?-d:d;v=$c;next}{m=$c-t;m=m<0?-m:m}m<d{d=m;v=$c}END{print v}')
MODEL_PATH="$MODEL_DIR/model.ckpt-$MODEL_CHECKPOINT"
echo "$MODEL_PATH" >> "$START_PATH"
if [[ ! -f "$SEMIFINISH_PATH" ]]; then
#...performs preliminary feature extraction
echo Extracting SVM test features with "$MODEL_PATH"
python \
"$SOURCES_GIT_DIR/predict_image_classifier.py" \
--model_name="resnet_v2_101_seg" \
--checkpoint_path="$MODEL_PATH" \
--dataset_name=skin_lesions \
--task_name=label \
--dataset_split_name=test \
--preprocessing_name=dermatologic \
--aggressive_augmentation="False" \
--add_rotations="False" \
--minimum_area_to_crop="0.05" \
--normalize_per_image="0" \
--batch_size=1 \
--id_field_name=id \
--pool_features=avg \
--extract_features \
--output_format=pickle \
--add_scores_to_features=none \
--eval_replicas="1" \
--output_file="$TEST_FEATURES_PATH" \
--dataset_dir="$DATASET_DIR"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
if [[ "$EXPERIMENT_STATUS" != "0" || ! -e "$TEST_FEATURES_PATH" ]]; then
exit
fi
date -u >> "$SEMIFINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$SEMIFINISH_PATH"
else
echo Reloading features from "$TEST_FEATURES_PATH"
fi
#...performs prediction with SVM model
python \
"$SOURCES_GIT_DIR/predict_svm_layer.py" \
--output_file "$RESULTS_PATH" \
--input_test "$TEST_FEATURES_PATH" \
--input_model "$SVM_PATH"
# Tip: leave last the arguments that make the command fail if they're absent,
# so if there's a typo or forgotten \ the entire thing fails
EXPERIMENT_STATUS="$?"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
<reponame>chj0911/gx-bos<filename>gx-bos-repository/src/main/java/org/gx/bos/base/core/bean/Replaceable.java
package org.gx.bos.base.core.bean;
/**
* 可互换排序位置的实体
* */
public interface Replaceable<T> {
/**
* 必须实现替换位置的方法
* */
public void replace(T t1, T t2);
}
|
minikube start
kubectl get po -A
echo "https://minikube.sigs.k8s.io/docs/start/"
|
<gh_stars>1-10
#include "bstree.h"
static
struct bstnode *node_alloc(int key, int val){
struct bstnode *node = NULL;
node = calloc(1, sizeof(struct bstnode));
node->key = key;
node->val = val;
return node;
}
static
void tree_purge(struct bstnode* start){
if (start == NULL){
return;
}
tree_purge(start->left);
tree_purge(start->right);
free(start);
return;
}
/* TODO modify these functions */
static
struct bstnode **find_precedence(struct bstnode **start){
/* find the rightmost node of left subtree */
struct bstnode **can;
can = &(*start)->left;
if (*can == NULL){
return NULL;
}
for (;(*can)->right != NULL;){
can = &(*can)->right;
}
return can;
}
static
struct bstnode **find_successor(struct bstnode **start){
/* find the leftmost node of right subtree */
struct bstnode **can;
can = &(*start)->right;
if (*can == NULL){
return NULL;
}
/* only modify parent if candidate exists */
for (;(*can)->left != NULL;){
can = &(*can)->left;
}
return can;
}
struct bstree *bstree_alloc(void){
struct bstree *tree = NULL;
tree = calloc(1, sizeof(struct bstree));
return tree;
}
void bstree_purge(struct bstree *tree){
tree_purge(tree->root);
tree->count = 0;
return;
}
void bstree_free(struct bstree *tree){
tree_purge(tree->root);
free(tree);
return;
}
int bstree_set(struct bstree *tree, int key, int val){
struct bstnode *new = NULL, **indirect;
/*
`indirect` holds the pointer to either `left` or `right`
of `struct bstnode`.
the benefit of using `indirect` is that we can "directly" modify
the field no matter what the form of the structure holding it,
as long as all indirect pointer follow the same rule.
*/
/* (preserve for study purpose)
if (tree->root == NULL){
new = node_alloc(key, val);
tree->root = new;
tree->count++;
return BST_OK;
}
cur = tree->root;
for (;;){
if (key == cur->key){
cur->val = val;
break;
}
// decide which "way" we're modifying
if (key < cur->key){
indirect = &cur->left;
} else {
indirect = &cur->right;
}
if (*indirect != NULL){
// if still branch to compare
cur = *indirect;
} else {
// found a space
new = node_alloc(key, val);
// link the branch
*indirect = new;
tree->count++;
break;
}
}
*/
indirect = &tree->root;
for (;;){
// this field is containing NULL
if ((*indirect) == NULL){
/*
we've found an empty place, implies no corresponding key
was found.
*/
new = node_alloc(key, val);
*indirect = new;
tree->count++;
break;
}
/* assured (*indirect) is not NULL */
if (key < (*indirect)->key){
/* indirect points to the field instead */
indirect = &(*indirect)->left;
} else if (key > (*indirect)->key){
indirect = &(*indirect)->right;
} else {
/* we're pointing is what we're looking for */
(*indirect)->val = val;
break;
}
}
return BST_OK;
}
int bstree_get(struct bstree *tree, int key, int *res){
struct bstnode *cur;
cur = tree->root;
for (;cur != NULL;){
if (cur->key == key){
*res = cur->val;
break;
}
if (key < cur->key){
cur = cur->left;
} else {
cur = cur->right;
}
}
if (cur == NULL){
return BST_NOELEM;
}
return BST_OK;
}
/* once find the correct entry:
find precedence and its parent
if precedence is not NULL
copy key and val from precedence
parend "adopt" left child of precedence
free precedence
set the parent's right to NULL
else if successor is not NULL
copy key and val from precedence
parend "adopt" right child of successor
free successor
set the parent's left to NULL
else
(node is leaf node)
free content of indirect
(essentially `free(node->left)...`)
set content of indirect to NULL
*/
/*
Inspired by "Linus' good taste of coding".
the use of `indirect` eliminates the need to check
whether a node is the root or not, since it
**"directly" points to the address of the field**
(which its type is "pointer to struct", indirect is
"pointer of pointer to struct").
A additional benefit is that `indirect` actually "stays" on
its parent.
if we know where the thing(pointer to struct) is, we can use it and
even modify the field holds it.
Or was it?
*/
int bstree_delete(struct bstree *tree, int key){
struct bstnode **indirect = &tree->root, **victim, *next, *hold;
for (;*indirect != NULL;){
if ((*indirect)->key == key){
/* do delete stuff */
victim = find_precedence(indirect);
// we can abuse pointer to make even shorter/cleaner code here.
if (victim != NULL){
/* the "adopt", now victim is alone */
// abuse ver: *victim = (*victim)->left;
// "unlinks" the node.
next = (*victim)->left;
break;
}
victim = find_successor(indirect);
if (victim != NULL){
next = (*victim)->right;
break;
}
/* it is a leaf node */
free(*indirect);
/* set the branch to NULL */
*indirect = NULL;
/* bypass the after loop condition check and victim-killing */
goto decr_count;
}
if (key < (*indirect)->key){
indirect = &(*indirect)->left;
} else {
indirect = &(*indirect)->right;
}
/* then continue */
}
if (*indirect == NULL){
/* can't find the entry */
return BST_NOELEM;
}
/* we've completed the adopt, time to kill the victim */
(*indirect)->key = (*victim)->key;
(*indirect)->val = (*victim)->val;
hold = *victim;
*victim = next;
free(hold);
decr_count:
tree->count--;
return BST_OK;
}
|
<filename>src/login/components/SignupInstead.tsx
import { h } from 'preact';
import { Link } from 'preact-router/match';
import routes from '@authenticator/app/routes';
const SignupInstead = (): JSX.Element => (
<div class='login-signup'>
<span>Don't have an account? </span>
<Link activeClassName='active' href={routes.SIGNUP}>Create one</Link>
</div>
);
export default SignupInstead;
|
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
HELM=${HELM:-helm}
VVP_NAMESPACE=${VVP_NAMESPACE:-vvp}
JOBS_NAMESPACE=${JOBS_NAMESPACE:-"vvp-jobs"}
helm_uninstall() {
local release
release="$1"
if [ -z "$release" ]; then
echo >&2 "error: release is required"
return 1
fi
$HELM --namespace "$VVP_NAMESPACE" delete "$release" 2>/dev/null || :
}
delete_namespaces() {
kubectl get namespace "$VVP_NAMESPACE" > /dev/null 2>&1 && kubectl delete namespace "$VVP_NAMESPACE" || :
kubectl get namespace "$JOBS_NAMESPACE" > /dev/null 2>&1 && kubectl delete namespace "$JOBS_NAMESPACE" || :
}
main() {
local kube_context
kube_context="$(kubectl config current-context)"
echo -n "This script will delete all playground components and the '$VVP_NAMESPACE' and "
echo "'$JOBS_NAMESPACE' namespaces from Kubernetes."
echo
echo "The currently configured Kubernetes context is: ${kube_context}"
echo
read -r -p "Are you sure you want to continue? (y/N) " yn
case $yn in
"y")
;;
"Y")
;;
*)
echo "Aborted."
exit 1
;;
esac
echo "> Uninstalling Helm applications..."
helm_uninstall minio
helm_uninstall vvp
helm_uninstall prometheus
helm_uninstall grafana
helm_uninstall elasticsearch
helm_uninstall fluentd
helm_uninstall kibana
echo "> Deleting Kubernetes namespaces..."
delete_namespaces
}
main
|
<reponame>DMGithinji/Scrappy-Dictionary
import { Injectable } from '@angular/core';
import { BehaviorSubject } from 'rxjs';
const DEFAULT_LANG = 'swahili';
const ACTIVE_LANG_KEY = 'ng_scrappy_active_lang';
@Injectable({
providedIn: 'root',
})
export class ActiveLangService {
private _activeLang = localStorage.getItem(ACTIVE_LANG_KEY) ?? DEFAULT_LANG;
activeLang$ = new BehaviorSubject<string>(this._activeLang);
setActiveLang(lang: string) {
this.activeLang$.next(lang);
localStorage.setItem(ACTIVE_LANG_KEY, lang);
}
getActiveLang() {
return this.activeLang$;
}
}
|
#!/usr/bin/env bash
MASON_NAME=glfw
MASON_VERSION=2017-04-07-f40d085
MASON_LIB_FILE=lib/libglfw3.a
MASON_PKGCONFIG_FILE=lib/pkgconfig/glfw3.pc
. ${MASON_DIR}/mason.sh
function mason_load_source {
mason_download \
https://github.com/mapbox/glfw/archive/f40d085e3ca4cd8d834f8bc47d9a729ae7d737e2.tar.gz \
ae6bcfe0620bd80f103139c24474b2a2bcda8222
mason_extract_tar_gz
export MASON_BUILD_PATH=${MASON_ROOT}/.build/glfw-f40d085e3ca4cd8d834f8bc47d9a729ae7d737e2
}
function mason_compile {
rm -rf build
mkdir build
cd build
cmake .. \
-DCMAKE_INSTALL_PREFIX=${MASON_PREFIX} \
-DCMAKE_INCLUDE_PATH=${MASON_PREFIX}/include \
-DCMAKE_LIBRARY_PATH=${MASON_PREFIX}/lib \
-DBUILD_SHARED_LIBS=OFF \
-DGLFW_BUILD_DOCS=OFF \
-DGLFW_BUILD_TESTS=OFF \
-DGLFW_BUILD_EXAMPLES=OFF \
-DCMAKE_BUILD_TYPE=Release
make install -j${MASON_CONCURRENCY}
}
function mason_ldflags {
LIBS=$(`mason_pkgconfig` --static --libs-only-l --libs-only-other)
echo ${LIBS//-lglfw3/}
}
mason_run "$@"
|
#!/bin/bash
set -eu
cur=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source $cur/../_utils/test_prepare
WORK_DIR=$TEST_DIR/$TEST_NAME
API_VERSION="v1alpha1"
function prepare_data() {
run_sql 'DROP DATABASE if exists http_apis;' $MYSQL_PORT1 $MYSQL_PASSWORD1
run_sql 'CREATE DATABASE http_apis;' $MYSQL_PORT1 $MYSQL_PASSWORD1
run_sql "CREATE TABLE http_apis.t(i TINYINT, j INT UNIQUE KEY);" $MYSQL_PORT1 $MYSQL_PASSWORD1
for j in $(seq 100); do
run_sql "INSERT INTO http_apis.t VALUES ($j,${j}000$j),($j,${j}001$j);" $MYSQL_PORT1 $MYSQL_PASSWORD1
done
}
function run() {
prepare_data
run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml
check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
# operate mysql config to worker
cp $cur/conf/source1.yaml $WORK_DIR/source1.yaml
sed -i "/relay-binlog-name/i\relay-dir: $WORK_DIR/worker1/relay_log" $WORK_DIR/source1.yaml
cat $WORK_DIR/source1.yaml | sed 's/$/\\n/' | sed 's/"/\\"/g' | tr -d '\n' > $WORK_DIR/source1.yaml.bak
source_data=`cat $WORK_DIR/source1.yaml.bak`
rm $WORK_DIR/source1.yaml.bak
echo $source_data
curl -X PUT 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/sources -d '{"op": 1, "config": ["'"$source_data"'"]}' > $WORK_DIR/create-source.log
check_log_contains $WORK_DIR/create-source.log "\"result\": true" 2
check_log_contains $WORK_DIR/create-source.log "\"source\": \"$SOURCE_ID1\"" 1
echo "start task and check stage"
cat $cur/conf/dm-task.yaml | sed 's/$/\\n/' | sed 's/"/\\"/g' | tr -d '\n' > $WORK_DIR/task.yaml.bak
task_data=`cat $WORK_DIR/task.yaml.bak`
rm $WORK_DIR/task.yaml.bak
echo $task_data
check_http_alive 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/status/test-task "task test-task has no source or not exist" 3
curl -X POST 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/tasks -d '{"task": "'"$task_data"'"}' > $WORK_DIR/start-task.log
check_log_contains $WORK_DIR/start-task.log "\"result\": true" 2
sleep 1
curl -X GET 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/status/test > $WORK_DIR/status.log
check_log_contains $WORK_DIR/status.log "\"stage\": \"Running\"" 1
check_log_contains $WORK_DIR/status.log "\"name\": \"test\"" 1
echo "get sub task configs"
curl -X GET 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/subtasks/test > $WORK_DIR/subtask.log
check_log_contains $WORK_DIR/subtask.log "is-sharding = false" 1
echo "get task config"
curl -X GET 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/tasks/test > $WORK_DIR/task.log
check_log_contains $WORK_DIR/task.log "is-sharding: false" 1
echo "pause task and check stage"
curl -X PUT 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/tasks/test -d '{ "op": 2 }' > $WORK_DIR/pause.log
check_log_contains $WORK_DIR/pause.log "\"op\": \"Pause\"" 1
sleep 1
curl -X GET 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/status/test > $WORK_DIR/status.log
check_log_contains $WORK_DIR/status.log "\"stage\": \"Paused\"" 1
check_log_contains $WORK_DIR/status.log "\"name\": \"test\"" 1
echo "resume task and check stage"
curl -X PUT 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/tasks/test -d '{ "op": 3 }' > $WORK_DIR/resume.log
check_log_contains $WORK_DIR/resume.log "\"op\": \"Resume\"" 1
sleep 1
curl -X GET 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/status/test > $WORK_DIR/status.log
check_log_contains $WORK_DIR/status.log "\"stage\": \"Running\"" 1
check_log_contains $WORK_DIR/status.log "\"name\": \"test\"" 1
sleep 1
curl -X GET "127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/members?leader=true&master=true&worker=true" > $WORK_DIR/list-member.log
check_log_contains $WORK_DIR/list-member.log "leader" 1
check_log_contains $WORK_DIR/list-member.log "masters" 1
check_log_contains $WORK_DIR/list-member.log "workers" 1
check_log_contains $WORK_DIR/list-member.log "\"name\": \"master1\"" 2 # one in leader, one in masters
check_log_contains $WORK_DIR/list-member.log "\"name\": \"worker1\"" 1
check_log_contains $WORK_DIR/list-member.log "\"stage\": \"bound\"" 1
check_log_contains $WORK_DIR/list-member.log "\"source\": \"mysql-replica-01\"" 1
sleep 1
echo "kill dm-worker1"
ps aux | grep dm-worker1 |awk '{print $2}'|xargs kill || true
check_port_offline $WORKER1_PORT 20
curl -X DELETE 127.0.0.1:$MASTER_PORT/apis/${API_VERSION}/members/worker/worker1 > $WORK_DIR/offline-worker.log
check_log_contains $WORK_DIR/offline-worker.log "\"result\": true" 1
echo "check data"
check_sync_diff $WORK_DIR $cur/conf/diff_config.toml
}
cleanup_data http_apis
cleanup_process
run $*
cleanup_process
echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"
|
#!/bin/bash
cd "$(dirname "$0")"
docker build -t zkp .
|
<reponame>snaerth/straumverk
import React, { Component } from 'react';
class Favicons extends Component {
render() {
return [
<link rel="apple-touch-icon" sizes="180x180" href="/static/favicons/apple-touch-icon.png" />,
<link rel="icon" type="image/png" sizes="32x32" href="/static/favicons/favicon-32x32.png" />,
<link rel="icon" type="image/png" sizes="16x16" href="/static/favicons/favicon-16x16.png" />,
<link rel="manifest" href="/static/favicons/site.webmanifest" />,
<link rel="mask-icon" href="/static/favicons/safari-pinned-tab.svg" color="#5bbad5" />,
<meta name="msapplication-TileColor" content="#da532c" />,
<meta name="theme-color" content="#ffffff" />,
];
}
}
export default Favicons;
|
import {Injectable} from '@angular/core';
import {ActivatedRouteSnapshot, CanActivate, Router, RouterStateSnapshot} from '@angular/router';
import {AuthorizationService} from '../../services/authorization.service';
@Injectable({
providedIn: 'root'
})
export class LoginGuard implements CanActivate {
constructor(
private router: Router,
private authorizationService: AuthorizationService
) {
}
canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot) {
if (!this.authorizationService.isAuthorized()) {
return true;
}
this.router.navigate(['/main/welcome']);
return false;
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_ROOT}/eUploadLib/eUploadLib/Assets/EsignUploadBundle.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_ROOT}/eUploadLib/eUploadLib/Assets/EsignUploadBundle.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
<reponame>Joys-digital/explorer<gh_stars>0
'use strict';
module.exports = class BlockDTO {
constructor() {
this.blockHash = null;
this.blockNumber = null;
this.from = null;
this.gas = null;
this.gasPrice = null;
this.hash = null;
this.input = null;
this.nonce = null;
this.to = null;
this.transactionIndex = null;
this.value = null;
}
};
|
package tae.cosmetics.gui.util.packet.server;
import java.awt.Color;
import net.minecraft.network.play.server.SPacketHeldItemChange;
import tae.cosmetics.gui.util.packet.AbstractPacketModule;
public class SPacketHeldItemChangeModule extends AbstractPacketModule {
private SPacketHeldItemChange packet;
public SPacketHeldItemChangeModule(SPacketHeldItemChange packet, long timestamp) {
super("Changes player's held item", timestamp, packet);
this.packet = packet;
}
@Override
public void drawText(int x, int y) {
fontRenderer.drawString("SPacketHeldItemChange", x, y, Color.WHITE.getRGB());
if(!minimized) {
fontRenderer.drawString("Hotbar Index: " + packet.getHeldItemHotbarIndex(), x, y + 14, Color.WHITE.getRGB());
}
}
@Override
public boolean type() {
return false;
}
}
|
<reponame>feserm/BioDWH2
package de.unibi.agbi.biodwh2.drugbank.model;
import com.fasterxml.jackson.annotation.JsonValue;
public enum ProductSource {
FDA_NDC("FDA NDC"),
DPD("DPD"),
EMA("EMA");
private ProductSource(String value) {
this.value = value;
}
public final String value;
@JsonValue
public String toValue() {
return value;
}
}
|
class TreeNode:
def __init__(self, val):
self.val = val
self.left = None
self.right = None
def traverseTree(root):
res = []
helper(root, "", res)
def helper(node, path, res):
if not node:
return
path += str(node.val)
if not node.left and not node.right:
res.append(path)
helper(node.left, path, res)
helper(node.right, path, res) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.