text stringlengths 1 1.05M |
|---|
#!/bin/bash
last_tag=$(git describe --tags --abbrev=0)
echo -n "0.4.0-dev+"
git rev-list $1 ^$last_tag | wc -l | sed -e 's/[^[:digit:]]//g'
|
#! /bin/bash
#PBS -l nodes=1:gpus=2
#PBS -l walltime=12:00:00
#PBS -j oe
#PBS -N TonyGPU
#PBS -q isi
sleep 43200 |
<filename>src/main/js/apps/sample/nls/bundle.js<gh_stars>0
define({
root: {
service: {
streets: "Streets",
topo: "Topography"
}
},
de: true
});
|
#!/bin/sh
cd ~/git/compendium/
rm -rf docs
mv _book docs
git add .
git commit -m"Update the website" || true
git push -f origin master
|
package org.springaop.chapter.three.autoproxy;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.log4j.Logger;
import org.springaop.chapter.three.util.Constants;
public class AnimalAdvice implements MethodInterceptor {
public Object invoke(MethodInvocation invocation) throws Throwable {
Logger log = Logger.getLogger(Constants.LOG_NAME);
StringBuilder sb = new StringBuilder();
sb.append("Target Class:").append(invocation.getThis()).append("\n")
.append(invocation.getMethod()).append("\n");
Object retVal = invocation.proceed();
sb.append(" return value:").append(retVal).append("\n");
log.info(sb.toString());
return retVal;
}
}
|
=SUMIF(A1:A3, "A", B1:B3) + SUMIF(A1:A3, "B", B1:B3) + SUMIF(A1:A3, "C", B1:B3) |
<filename>framework/src/main/java/com/javatest/framework/commons/domain/vo/ApiResponse.java
package com.javatest.framework.commons.domain.vo;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ApiResponse {
private String code;
private String message;
}
|
#!/usr/bin/env bash
set -e -x
# Compile wheels
for PYBIN in /opt/python/*/bin; do
if [[ "${PYBIN}" == *"cp27"* ]] || [[ "${PYBIN}" == *"cp35"* ]] || [[ "${PYBIN}" == *"cp36"* ]] || [[ "${PYBIN}" == *"cp37"* ]]; then
"${PYBIN}/pip" install -r /io/requirements.txt
"${PYBIN}/cython" /io/src/*.pyx
"${PYBIN}/pip" install -e /io/
"${PYBIN}/pip" wheel /io/ -w wheelhouse/
rm -rf /io/build /io/*.egg-info
fi
done
# Bundle external shared libraries into the wheels
for whl in wheelhouse/dedupe*.whl; do
if [[ "${whl}" != *"dedupe_"* ]]; then
auditwheel repair "$whl" -w /io/wheelhouse/
fi
done
|
export const getBottom = (bounds, force) => {
const {left, top, width, height, clientWidth, clientHeight} = bounds
if (!force && top + height + 40 >= clientHeight) {
return getTop(bounds, true) // eslint-disable-line
}
const x = left + width / 2
const y = top + height
const leftOffset = x < 100
const rightOffset = !leftOffset && x + 100 > clientWidth
const layoutClass = leftOffset
? 'left-offset'
: rightOffset
? 'right-offset'
: 'centred' // eslint-disable-line
return {
x,
y,
positionClass: 'bottom',
layoutClass,
}
}
export const getTop = (bounds, force) => {
const {top} = bounds
const bottom = getBottom(bounds, true)
if (!force && top - 40 <= 0) {
return bottom
}
return {...bottom, y: top, positionClass: 'top'}
}
export const getRight = (bounds, force) => {
const {left, top, width, height, clientWidth, clientHeight} = bounds
if (!force && left + width + 200 >= clientWidth) {
return getLeft(bounds, true) // eslint-disable-line
}
const x = left + width
const y = top + height / 2
const topOffset = y < 50
const bottomOffset = !topOffset && y + 50 > clientHeight
const layoutClass = topOffset
? 'top-offset'
: bottomOffset
? 'bottom-offset'
: 'mid' // eslint-disable-line
return {
x,
y,
positionClass: 'right',
layoutClass,
}
}
export const getLeft = (bounds, force) => {
const {left} = bounds
const right = getRight(bounds, force)
if (!force && left - 200 <= 0) {
return right
}
return {...right, x: left, positionClass: 'left'}
}
export const getPosition = position => {
switch (position) {
case 'top':
return getTop
case 'right':
return getRight
case 'left':
return getLeft
case 'bottom':
default:
return getBottom
}
}
export const getBounds = e => {
const {top, left, width, height} = e.currentTarget.getBoundingClientRect()
const {pageXOffset, pageYOffset} = window
const {
scrollTop,
clientTop,
scrollLeft,
clientLeft,
clientWidth,
clientHeight,
} = document.body
const absTop = top + Math.max(pageYOffset, scrollTop) - clientTop
const absLeft = left + Math.max(pageXOffset, scrollLeft) - clientLeft
return {
top: absTop,
left: absLeft,
width,
height,
clientWidth,
clientHeight,
}
}
export function getRenderType(children) {
switch (typeof children) {
case 'undefined':
case 'string':
return 'WRAP'
case 'function':
return 'RENDER_PROPS'
default:
if (children.length) {
return 'WRAP'
}
return 'CLONE'
}
}
export default {
getBounds,
getPosition,
}
|
<reponame>naga-project/webfx<gh_stars>100-1000
/*
* Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.javafx.binding;
import javafx.beans.InvalidationListener;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import java.util.Arrays;
/**
* A convenience class for creating implementations of {@link javafx.beans.value.ObservableValue}.
* It contains all of the infrastructure support for value invalidation- and
* change event notification.
*
* This implementation can handle adding and removing listeners while the
* observers are being notified, but it is not thread-safe.
*
*
*/
public abstract class ExpressionHelper<T> extends ExpressionHelperBase {
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Static methods
public static <T> ExpressionHelper<T> addListener(ExpressionHelper<T> helper, ObservableValue<T> observable, InvalidationListener listener) {
if ((observable == null) || (listener == null)) {
throw new NullPointerException();
}
observable.getValue(); // validate observable
return (helper == null)? new SingleInvalidation<T>(observable, listener) : helper.addListener(listener);
}
public static <T> ExpressionHelper<T> removeListener(ExpressionHelper<T> helper, InvalidationListener listener) {
if (listener == null) {
throw new NullPointerException();
}
return (helper == null)? null : helper.removeListener(listener);
}
public static <T> ExpressionHelper<T> addListener(ExpressionHelper<T> helper, ObservableValue<T> observable, ChangeListener<? super T> listener) {
if ((observable == null) || (listener == null)) {
throw new NullPointerException();
}
return (helper == null)? new SingleChange<T>(observable, listener) : helper.addListener(listener);
}
public static <T> ExpressionHelper<T> removeListener(ExpressionHelper<T> helper, ChangeListener<? super T> listener) {
if (listener == null) {
throw new NullPointerException();
}
return (helper == null)? null : helper.removeListener(listener);
}
public static <T> void fireValueChangedEvent(ExpressionHelper<T> helper) {
if (helper != null) {
helper.fireValueChangedEvent();
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Common implementations
protected final ObservableValue<T> observable;
private ExpressionHelper(ObservableValue<T> observable) {
this.observable = observable;
}
protected abstract ExpressionHelper<T> addListener(InvalidationListener listener);
protected abstract ExpressionHelper<T> removeListener(InvalidationListener listener);
protected abstract ExpressionHelper<T> addListener(ChangeListener<? super T> listener);
protected abstract ExpressionHelper<T> removeListener(ChangeListener<? super T> listener);
protected abstract void fireValueChangedEvent();
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Implementations
private static class SingleInvalidation<T> extends ExpressionHelper<T> {
private final InvalidationListener listener;
private SingleInvalidation(ObservableValue<T> expression, InvalidationListener listener) {
super(expression);
this.listener = listener;
}
@Override
protected ExpressionHelper<T> addListener(InvalidationListener listener) {
return new Generic<T>(observable, this.listener, listener);
}
@Override
protected ExpressionHelper<T> removeListener(InvalidationListener listener) {
return (listener.equals(this.listener))? null : this;
}
@Override
protected ExpressionHelper<T> addListener(ChangeListener<? super T> listener) {
return new Generic<T>(observable, this.listener, listener);
}
@Override
protected ExpressionHelper<T> removeListener(ChangeListener<? super T> listener) {
return this;
}
@Override
protected void fireValueChangedEvent() {
try {
listener.invalidated(observable);
} catch (Exception e) {
e.printStackTrace(); //Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), e);
}
}
}
private static class SingleChange<T> extends ExpressionHelper<T> {
private final ChangeListener<? super T> listener;
private T currentValue;
private SingleChange(ObservableValue<T> observable, ChangeListener<? super T> listener) {
super(observable);
this.listener = listener;
this.currentValue = observable.getValue();
}
@Override
protected ExpressionHelper<T> addListener(InvalidationListener listener) {
return new Generic<T>(observable, listener, this.listener);
}
@Override
protected ExpressionHelper<T> removeListener(InvalidationListener listener) {
return this;
}
@Override
protected ExpressionHelper<T> addListener(ChangeListener<? super T> listener) {
return new Generic<T>(observable, this.listener, listener);
}
@Override
protected ExpressionHelper<T> removeListener(ChangeListener<? super T> listener) {
return (listener.equals(this.listener))? null : this;
}
@Override
protected void fireValueChangedEvent() {
final T oldValue = currentValue;
currentValue = observable.getValue();
final boolean changed = (currentValue == null)? (oldValue != null) : !currentValue.equals(oldValue);
if (changed) {
try {
listener.changed(observable, oldValue, currentValue);
} catch (Exception e) {
e.printStackTrace(); //Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), e);
}
}
}
}
private static class Generic<T> extends ExpressionHelper<T> {
private InvalidationListener[] invalidationListeners;
private ChangeListener<? super T>[] changeListeners;
private int invalidationSize;
private int changeSize;
private boolean locked;
private T currentValue;
private Generic(ObservableValue<T> observable, InvalidationListener listener0, InvalidationListener listener1) {
super(observable);
this.invalidationListeners = new InvalidationListener[] {listener0, listener1};
this.invalidationSize = 2;
}
private Generic(ObservableValue<T> observable, ChangeListener<? super T> listener0, ChangeListener<? super T> listener1) {
super(observable);
this.changeListeners = new ChangeListener[] {listener0, listener1};
this.changeSize = 2;
this.currentValue = observable.getValue();
}
private Generic(ObservableValue<T> observable, InvalidationListener invalidationListener, ChangeListener<? super T> changeListener) {
super(observable);
this.invalidationListeners = new InvalidationListener[] {invalidationListener};
this.invalidationSize = 1;
this.changeListeners = new ChangeListener[] {changeListener};
this.changeSize = 1;
this.currentValue = observable.getValue();
}
@Override
protected Generic<T> addListener(InvalidationListener listener) {
if (invalidationListeners == null) {
invalidationListeners = new InvalidationListener[] {listener};
invalidationSize = 1;
} else {
final int oldCapacity = invalidationListeners.length;
if (locked) {
final int newCapacity = (invalidationSize < oldCapacity)? oldCapacity : (oldCapacity * 3)/2 + 1;
invalidationListeners = Arrays.copyOf(invalidationListeners, newCapacity);
} else if (invalidationSize == oldCapacity) {
invalidationSize = trim(invalidationSize, invalidationListeners);
if (invalidationSize == oldCapacity) {
final int newCapacity = (oldCapacity * 3)/2 + 1;
invalidationListeners = Arrays.copyOf(invalidationListeners, newCapacity);
}
}
invalidationListeners[invalidationSize++] = listener;
}
return this;
}
@Override
protected ExpressionHelper<T> removeListener(InvalidationListener listener) {
if (invalidationListeners != null) {
for (int index = 0; index < invalidationSize; index++) {
if (listener.equals(invalidationListeners[index])) {
if (invalidationSize == 1) {
if (changeSize == 1) {
return new SingleChange<T>(observable, changeListeners[0]);
}
invalidationListeners = null;
invalidationSize = 0;
} else if ((invalidationSize == 2) && (changeSize == 0)) {
return new SingleInvalidation<T>(observable, invalidationListeners[1-index]);
} else {
final int numMoved = invalidationSize - index - 1;
final InvalidationListener[] oldListeners = invalidationListeners;
if (locked) {
invalidationListeners = new InvalidationListener[invalidationListeners.length];
System.arraycopy(oldListeners, 0, invalidationListeners, 0, index);
}
if (numMoved > 0) {
System.arraycopy(oldListeners, index+1, invalidationListeners, index, numMoved);
}
invalidationSize--;
if (!locked) {
invalidationListeners[invalidationSize] = null; // Let gc do its work
}
}
break;
}
}
}
return this;
}
@Override
protected ExpressionHelper<T> addListener(ChangeListener<? super T> listener) {
if (changeListeners == null) {
changeListeners = new ChangeListener[] {listener};
changeSize = 1;
} else {
final int oldCapacity = changeListeners.length;
if (locked) {
final int newCapacity = (changeSize < oldCapacity)? oldCapacity : (oldCapacity * 3)/2 + 1;
changeListeners = Arrays.copyOf(changeListeners, newCapacity);
} else if (changeSize == oldCapacity) {
changeSize = trim(changeSize, changeListeners);
if (changeSize == oldCapacity) {
final int newCapacity = (oldCapacity * 3)/2 + 1;
changeListeners = Arrays.copyOf(changeListeners, newCapacity);
}
}
changeListeners[changeSize++] = listener;
}
if (changeSize == 1) {
currentValue = observable.getValue();
}
return this;
}
@Override
protected ExpressionHelper<T> removeListener(ChangeListener<? super T> listener) {
if (changeListeners != null) {
for (int index = 0; index < changeSize; index++) {
if (listener.equals(changeListeners[index])) {
if (changeSize == 1) {
if (invalidationSize == 1) {
return new SingleInvalidation<T>(observable, invalidationListeners[0]);
}
changeListeners = null;
changeSize = 0;
} else if ((changeSize == 2) && (invalidationSize == 0)) {
return new SingleChange<T>(observable, changeListeners[1-index]);
} else {
final int numMoved = changeSize - index - 1;
final ChangeListener<? super T>[] oldListeners = changeListeners;
if (locked) {
changeListeners = new ChangeListener[changeListeners.length];
System.arraycopy(oldListeners, 0, changeListeners, 0, index);
}
if (numMoved > 0) {
System.arraycopy(oldListeners, index+1, changeListeners, index, numMoved);
}
changeSize--;
if (!locked) {
changeListeners[changeSize] = null; // Let gc do its work
}
}
break;
}
}
}
return this;
}
@Override
protected void fireValueChangedEvent() {
final InvalidationListener[] curInvalidationList = invalidationListeners;
final int curInvalidationSize = invalidationSize;
final ChangeListener<? super T>[] curChangeList = changeListeners;
final int curChangeSize = changeSize;
try {
locked = true;
for (int i = 0; i < curInvalidationSize; i++) {
try {
curInvalidationList[i].invalidated(observable);
} catch (Exception e) {
e.printStackTrace(); //Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), e);
}
}
if (curChangeSize > 0) {
final T oldValue = currentValue;
currentValue = observable.getValue();
final boolean changed = (currentValue == null)? (oldValue != null) : !currentValue.equals(oldValue);
if (changed) {
for (int i = 0; i < curChangeSize; i++) {
try {
curChangeList[i].changed(observable, oldValue, currentValue);
} catch (Exception e) {
e.printStackTrace(); //Thread.currentThread().getUncaughtExceptionHandler().uncaughtException(Thread.currentThread(), e);
}
}
}
}
} finally {
locked = false;
}
}
}
}
|
#!/bin/bash
# Builds the latest released version
echo "▶️ $0 $*"
ORIGINAL_GITHUB_REPO="digitalocean/netbox"
GITHUB_REPO="${GITHUB_REPO-$ORIGINAL_GITHUB_REPO}"
URL_RELEASES="https://api.github.com/repos/${GITHUB_REPO}/releases"
JQ_LATEST="group_by(.prerelease) | .[] | sort_by(.published_at) | reverse | .[0] | select(.prerelease==${PRERELEASE-false}) | .tag_name"
CURL="curl -sS"
VERSION=$($CURL "${URL_RELEASES}" | jq -r "${JQ_LATEST}")
# Check if the prerelease version is actually higher than stable version
if [ "${PRERELEASE}" == "true" ]; then
JQ_STABLE="group_by(.prerelease) | .[] | sort_by(.published_at) | reverse | .[0] | select(.prerelease==false) | .tag_name"
STABLE_VERSION=$($CURL "${URL_RELEASES}" | jq -r "${JQ_STABLE}")
# shellcheck disable=SC2003
MAJOR_STABLE=$(expr match "${STABLE_VERSION}" 'v\([0-9]\+\)')
# shellcheck disable=SC2003
MINOR_STABLE=$(expr match "${STABLE_VERSION}" 'v[0-9]\+\.\([0-9]\+\)')
# shellcheck disable=SC2003
MAJOR_UNSTABLE=$(expr match "${VERSION}" 'v\([0-9]\+\)')
# shellcheck disable=SC2003
MINOR_UNSTABLE=$(expr match "${VERSION}" 'v[0-9]\+\.\([0-9]\+\)')
if ( [ "$MAJOR_STABLE" -eq "$MAJOR_UNSTABLE" ] && [ "$MINOR_STABLE" -ge "$MINOR_UNSTABLE" ] ) \
|| [ "$MAJOR_STABLE" -gt "$MAJOR_UNSTABLE" ]; then
echo "❎ Latest unstable version ('$VERSION') is not higher than the latest stable version ('$STABLE_VERSION')."
if [ -z "$DEBUG" ]; then
exit 0
else
echo "⚠️ Would exit here with code '0', but DEBUG is enabled."
fi
fi
fi
# Check if that version is not already available on docker hub:
ORIGINAL_DOCKERHUB_REPO="ninech/netbox"
DOCKERHUB_REPO="${DOCKERHUB_REPO-$ORIGINAL_DOCKERHUB_REPO}"
URL_DOCKERHUB_TOKEN="https://auth.docker.io/token?service=registry.docker.io&scope=repository:${DOCKERHUB_REPO}:pull"
BEARER_TOKEN="$($CURL "${URL_DOCKERHUB_TOKEN}" | jq -r .token)"
URL_DOCKERHUB_TAG="https://registry.hub.docker.com/v2/${DOCKERHUB_REPO}/tags/list"
AUTHORIZATION_HEADER="Authorization: Bearer ${BEARER_TOKEN}"
if [ -z "$VARIANT" ]; then
DOCKER_TAG="${VERSION}"
else
DOCKER_TAG="${VERSION}-${VARIANT}"
fi
ALREADY_BUILT="$($CURL -H "${AUTHORIZATION_HEADER}" "${URL_DOCKERHUB_TAG}" | jq -e ".tags | any(.==\"${DOCKER_TAG}\")")"
if [ "$ALREADY_BUILT" == "false" ]; then
# shellcheck disable=SC2068
./build.sh "${VERSION}" $@
exit $?
else
echo "✅ ${DOCKER_TAG} already exists on https://hub.docker.com/r/${DOCKERHUB_REPO}"
exit 0
fi
|
#!/bin/sh
"screen -xR"
|
function InitializeCustomSDKToolset {
if [[ "$restore" != true ]]; then
return
fi
# The following frameworks and tools are used only for testing.
# Do not attempt to install them in source build.
if [[ "${DotNetBuildFromSource:-}" == "true" ]]; then
return
fi
InitializeDotNetCli true
InstallDotNetSharedFramework "1.0.5"
InstallDotNetSharedFramework "1.1.2"
InstallDotNetSharedFramework "2.1.0"
InstallDotNetSharedFramework "2.2.7"
}
# Installs additional shared frameworks for testing purposes
function InstallDotNetSharedFramework {
local version=$1
local dotnet_root=$DOTNET_INSTALL_DIR
local fx_dir="$dotnet_root/shared/Microsoft.NETCore.App/$version"
if [[ ! -d "$fx_dir" ]]; then
GetDotNetInstallScript "$dotnet_root"
local install_script=$_GetDotNetInstallScript
bash "$install_script" --version $version --install-dir "$dotnet_root" --runtime "dotnet"
local lastexitcode=$?
if [[ $lastexitcode != 0 ]]; then
echo "Failed to install Shared Framework $version to '$dotnet_root' (exit code '$lastexitcode')."
ExitWithExitCode $lastexitcode
fi
fi
}
InitializeCustomSDKToolset
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
# This command builds and runs a local kubernetes cluster.
# You may need to run this as root to allow kubelet to open docker's socket,
# and to write the test CA in /var/run/kubernetes.
DOCKER_OPTS=${DOCKER_OPTS:-""}
DOCKER=(docker ${DOCKER_OPTS})
DOCKERIZE_KUBELET=${DOCKERIZE_KUBELET:-""}
ALLOW_PRIVILEGED=${ALLOW_PRIVILEGED:-""}
ALLOW_SECURITY_CONTEXT=${ALLOW_SECURITY_CONTEXT:-""}
PSP_ADMISSION=${PSP_ADMISSION:-""}
RUNTIME_CONFIG=${RUNTIME_CONFIG:-""}
KUBELET_AUTHORIZATION_WEBHOOK=${KUBELET_AUTHORIZATION_WEBHOOK:-""}
KUBELET_AUTHENTICATION_WEBHOOK=${KUBELET_AUTHENTICATION_WEBHOOK:-""}
POD_MANIFEST_PATH=${POD_MANIFEST_PATH:-"/var/run/kubernetes/static-pods"}
KUBELET_FLAGS=${KUBELET_FLAGS:-""}
# Name of the network plugin, eg: "kubenet"
NET_PLUGIN=${NET_PLUGIN:-""}
# Place the binaries required by NET_PLUGIN in this directory, eg: "/home/kubernetes/bin".
NET_PLUGIN_DIR=${NET_PLUGIN_DIR:-""}
SERVICE_CLUSTER_IP_RANGE=${SERVICE_CLUSTER_IP_RANGE:-10.0.0.0/24}
FIRST_SERVICE_CLUSTER_IP=${FIRST_SERVICE_CLUSTER_IP:-10.0.0.1}
# if enabled, must set CGROUP_ROOT
CGROUPS_PER_QOS=${CGROUPS_PER_QOS:-true}
# name of the cgroup driver, i.e. cgroupfs or systemd
CGROUP_DRIVER=${CGROUP_DRIVER:-""}
# owner of client certs, default to current user if not specified
USER=${USER:-$(whoami)}
# enables testing eviction scenarios locally.
EVICTION_HARD=${EVICTION_HARD:-"memory.available<100Mi"}
EVICTION_SOFT=${EVICTION_SOFT:-""}
EVICTION_PRESSURE_TRANSITION_PERIOD=${EVICTION_PRESSURE_TRANSITION_PERIOD:-"1m"}
# This script uses docker0 (or whatever container bridge docker is currently using)
# and we don't know the IP of the DNS pod to pass in as --cluster-dns.
# To set this up by hand, set this flag and change DNS_SERVER_IP.
# Note also that you need API_HOST (defined above) for correct DNS.
ENABLE_CLUSTER_DNS=${KUBE_ENABLE_CLUSTER_DNS:-true}
DNS_SERVER_IP=${KUBE_DNS_SERVER_IP:-10.0.0.10}
DNS_DOMAIN=${KUBE_DNS_NAME:-"cluster.local"}
KUBECTL=${KUBECTL:-cluster/kubectl.sh}
WAIT_FOR_URL_API_SERVER=${WAIT_FOR_URL_API_SERVER:-10}
ENABLE_DAEMON=${ENABLE_DAEMON:-false}
HOSTNAME_OVERRIDE=${HOSTNAME_OVERRIDE:-"127.0.0.1"}
CLOUD_PROVIDER=${CLOUD_PROVIDER:-""}
CLOUD_CONFIG=${CLOUD_CONFIG:-""}
FEATURE_GATES=${FEATURE_GATES:-"AllAlpha=true"}
STORAGE_BACKEND=${STORAGE_BACKEND:-"etcd3"}
# enable swagger ui
ENABLE_SWAGGER_UI=${ENABLE_SWAGGER_UI:-false}
# enable kubernetes dashboard
ENABLE_CLUSTER_DASHBOARD=${KUBE_ENABLE_CLUSTER_DASHBOARD:-false}
# enable audit log
ENABLE_APISERVER_BASIC_AUDIT=${ENABLE_APISERVER_BASIC_AUDIT:-false}
# RBAC Mode options
ALLOW_ANY_TOKEN=${ALLOW_ANY_TOKEN:-false}
ENABLE_RBAC=${ENABLE_RBAC:-false}
KUBECONFIG_TOKEN=${KUBECONFIG_TOKEN:-""}
AUTH_ARGS=${AUTH_ARGS:-""}
# Install a default storage class (enabled by default)
DEFAULT_STORAGE_CLASS=${KUBE_DEFAULT_STORAGE_CLASS:-true}
# start the cache mutation detector by default so that cache mutators will be found
KUBE_CACHE_MUTATION_DETECTOR="${KUBE_CACHE_MUTATION_DETECTOR:-true}"
export KUBE_CACHE_MUTATION_DETECTOR
ADMISSION_CONTROL_CONFIG_FILE=${ADMISSION_CONTROL_CONFIG_FILE:-""}
# START_MODE can be 'all', 'kubeletonly', or 'nokubelet'
START_MODE=${START_MODE:-"all"}
# sanity check for OpenStack provider
if [ "${CLOUD_PROVIDER}" == "openstack" ]; then
if [ "${CLOUD_CONFIG}" == "" ]; then
echo "Missing CLOUD_CONFIG env for OpenStack provider!"
exit 1
fi
if [ ! -f "${CLOUD_CONFIG}" ]; then
echo "Cloud config ${CLOUD_CONFIG} doesn't exist"
exit 1
fi
fi
if [ "$(id -u)" != "0" ]; then
echo "WARNING : This script MAY be run as root for docker socket / iptables functionality; if failures occur, retry as root." 2>&1
fi
# Stop right away if the build fails
set -e
source "${KUBE_ROOT}/hack/lib/init.sh"
function usage {
echo "This script starts a local kube cluster. "
echo "Example 0: hack/local-up-cluster.sh -h (this 'help' usage description)"
echo "Example 1: hack/local-up-cluster.sh -o _output/dockerized/bin/linux/amd64/ (run from docker output)"
echo "Example 2: hack/local-up-cluster.sh -O (auto-guess the bin path for your platform)"
echo "Example 3: hack/local-up-cluster.sh (build a local copy of the source)"
}
# This function guesses where the existing cached binary build is for the `-O`
# flag
function guess_built_binary_path {
local hyperkube_path=$(kube::util::find-binary "hyperkube")
if [[ -z "${hyperkube_path}" ]]; then
return
fi
echo -n "$(dirname "${hyperkube_path}")"
}
### Allow user to supply the source directory.
GO_OUT=${GO_OUT:-}
while getopts "ho:O" OPTION
do
case $OPTION in
o)
echo "skipping build"
GO_OUT="$OPTARG"
echo "using source $GO_OUT"
;;
O)
GO_OUT=$(guess_built_binary_path)
if [ $GO_OUT == "" ]; then
echo "Could not guess the correct output directory to use."
exit 1
fi
;;
h)
usage
exit
;;
?)
usage
exit
;;
esac
done
if [ "x$GO_OUT" == "x" ]; then
make -C "${KUBE_ROOT}" WHAT="cmd/kubectl cmd/hyperkube"
else
echo "skipped the build."
fi
function test_rkt {
if [[ -n "${RKT_PATH}" ]]; then
${RKT_PATH} list 2> /dev/null 1> /dev/null
if [ "$?" != "0" ]; then
echo "Failed to successfully run 'rkt list', please verify that ${RKT_PATH} is the path of rkt binary."
exit 1
fi
else
rkt list 2> /dev/null 1> /dev/null
if [ "$?" != "0" ]; then
echo "Failed to successfully run 'rkt list', please verify that rkt is in \$PATH."
exit 1
fi
fi
}
# Shut down anyway if there's an error.
set +e
API_PORT=${API_PORT:-8080}
API_SECURE_PORT=${API_SECURE_PORT:-6443}
# WARNING: For DNS to work on most setups you should export API_HOST as the docker0 ip address,
API_HOST=${API_HOST:-localhost}
API_HOST_IP=${API_HOST_IP:-"127.0.0.1"}
API_BIND_ADDR=${API_BIND_ADDR:-"0.0.0.0"}
KUBELET_HOST=${KUBELET_HOST:-"127.0.0.1"}
# By default only allow CORS for requests on localhost
API_CORS_ALLOWED_ORIGINS=${API_CORS_ALLOWED_ORIGINS:-/127.0.0.1(:[0-9]+)?$,/localhost(:[0-9]+)?$}
KUBELET_PORT=${KUBELET_PORT:-10250}
LOG_LEVEL=${LOG_LEVEL:-3}
LOG_DIR=${LOG_DIR:-"/tmp"}
CONTAINER_RUNTIME=${CONTAINER_RUNTIME:-"docker"}
CONTAINER_RUNTIME_ENDPOINT=${CONTAINER_RUNTIME_ENDPOINT:-""}
IMAGE_SERVICE_ENDPOINT=${IMAGE_SERVICE_ENDPOINT:-""}
ENABLE_CRI=${ENABLE_CRI:-"true"}
RKT_PATH=${RKT_PATH:-""}
RKT_STAGE1_IMAGE=${RKT_STAGE1_IMAGE:-""}
CHAOS_CHANCE=${CHAOS_CHANCE:-0.0}
CPU_CFS_QUOTA=${CPU_CFS_QUOTA:-true}
ENABLE_HOSTPATH_PROVISIONER=${ENABLE_HOSTPATH_PROVISIONER:-"false"}
CLAIM_BINDER_SYNC_PERIOD=${CLAIM_BINDER_SYNC_PERIOD:-"15s"} # current k8s default
ENABLE_CONTROLLER_ATTACH_DETACH=${ENABLE_CONTROLLER_ATTACH_DETACH:-"true"} # current default
KEEP_TERMINATED_POD_VOLUMES=${KEEP_TERMINATED_POD_VOLUMES:-"true"}
# This is the default dir and filename where the apiserver will generate a self-signed cert
# which should be able to be used as the CA to verify itself
CERT_DIR=${CERT_DIR:-"/var/run/kubernetes"}
ROOT_CA_FILE=${CERT_DIR}/server-ca.crt
ROOT_CA_KEY=${CERT_DIR}/server-ca.key
CLUSTER_SIGNING_CERT_FILE=${CLUSTER_SIGNING_CERT_FILE:-"${ROOT_CA_FILE}"}
CLUSTER_SIGNING_KEY_FILE=${CLUSTER_SIGNING_KEY_FILE:-"${ROOT_CA_KEY}"}
# name of the cgroup driver, i.e. cgroupfs or systemd
if [[ ${CONTAINER_RUNTIME} == "docker" ]]; then
# default cgroup driver to match what is reported by docker to simplify local development
if [[ -z ${CGROUP_DRIVER} ]]; then
# match driver with docker runtime reported value (they must match)
CGROUP_DRIVER=$(docker info | grep "Cgroup Driver:" | cut -f3- -d' ')
echo "Kubelet cgroup driver defaulted to use: ${CGROUP_DRIVER}"
fi
fi
# Ensure CERT_DIR is created for auto-generated crt/key and kubeconfig
mkdir -p "${CERT_DIR}" &>/dev/null || sudo mkdir -p "${CERT_DIR}"
CONTROLPLANE_SUDO=$(test -w "${CERT_DIR}" || echo "sudo -E")
function test_apiserver_off {
# For the common local scenario, fail fast if server is already running.
# this can happen if you run local-up-cluster.sh twice and kill etcd in between.
if [[ "${API_PORT}" -gt "0" ]]; then
curl --silent -g $API_HOST:$API_PORT
if [ ! $? -eq 0 ]; then
echo "API SERVER insecure port is free, proceeding..."
else
echo "ERROR starting API SERVER, exiting. Some process on $API_HOST is serving already on $API_PORT"
exit 1
fi
fi
curl --silent -k -g $API_HOST:$API_SECURE_PORT
if [ ! $? -eq 0 ]; then
echo "API SERVER secure port is free, proceeding..."
else
echo "ERROR starting API SERVER, exiting. Some process on $API_HOST is serving already on $API_SECURE_PORT"
exit 1
fi
}
function detect_binary {
# Detect the OS name/arch so that we can find our binary
case "$(uname -s)" in
Darwin)
host_os=darwin
;;
Linux)
host_os=linux
;;
*)
echo "Unsupported host OS. Must be Linux or Mac OS X." >&2
exit 1
;;
esac
case "$(uname -m)" in
x86_64*)
host_arch=amd64
;;
i?86_64*)
host_arch=amd64
;;
amd64*)
host_arch=amd64
;;
aarch64*)
host_arch=arm64
;;
arm64*)
host_arch=arm64
;;
arm*)
host_arch=arm
;;
i?86*)
host_arch=x86
;;
s390x*)
host_arch=s390x
;;
ppc64le*)
host_arch=ppc64le
;;
*)
echo "Unsupported host arch. Must be x86_64, 386, arm, arm64, s390x or ppc64le." >&2
exit 1
;;
esac
GO_OUT="${KUBE_ROOT}/_output/local/bin/${host_os}/${host_arch}"
}
cleanup_dockerized_kubelet()
{
if [[ -e $KUBELET_CIDFILE ]]; then
docker kill $(<$KUBELET_CIDFILE) > /dev/null
rm -f $KUBELET_CIDFILE
fi
}
cleanup()
{
echo "Cleaning up..."
# delete running images
# if [[ "${ENABLE_CLUSTER_DNS}" == true ]]; then
# Still need to figure why this commands throw an error: Error from server: client: etcd cluster is unavailable or misconfigured
# ${KUBECTL} --namespace=kube-system delete service kube-dns
# And this one hang forever:
# ${KUBECTL} --namespace=kube-system delete rc kube-dns-v10
# fi
# Check if the API server is still running
[[ -n "${APISERVER_PID-}" ]] && APISERVER_PIDS=$(pgrep -P ${APISERVER_PID} ; ps -o pid= -p ${APISERVER_PID})
[[ -n "${APISERVER_PIDS-}" ]] && sudo kill ${APISERVER_PIDS}
# Check if the controller-manager is still running
[[ -n "${CTLRMGR_PID-}" ]] && CTLRMGR_PIDS=$(pgrep -P ${CTLRMGR_PID} ; ps -o pid= -p ${CTLRMGR_PID})
[[ -n "${CTLRMGR_PIDS-}" ]] && sudo kill ${CTLRMGR_PIDS}
if [[ -n "$DOCKERIZE_KUBELET" ]]; then
cleanup_dockerized_kubelet
else
# Check if the kubelet is still running
[[ -n "${KUBELET_PID-}" ]] && KUBELET_PIDS=$(pgrep -P ${KUBELET_PID} ; ps -o pid= -p ${KUBELET_PID})
[[ -n "${KUBELET_PIDS-}" ]] && sudo kill ${KUBELET_PIDS}
fi
# Check if the proxy is still running
[[ -n "${PROXY_PID-}" ]] && PROXY_PIDS=$(pgrep -P ${PROXY_PID} ; ps -o pid= -p ${PROXY_PID})
[[ -n "${PROXY_PIDS-}" ]] && sudo kill ${PROXY_PIDS}
# Check if the scheduler is still running
[[ -n "${SCHEDULER_PID-}" ]] && SCHEDULER_PIDS=$(pgrep -P ${SCHEDULER_PID} ; ps -o pid= -p ${SCHEDULER_PID})
[[ -n "${SCHEDULER_PIDS-}" ]] && sudo kill ${SCHEDULER_PIDS}
# Check if the etcd is still running
[[ -n "${ETCD_PID-}" ]] && kube::etcd::stop
[[ -n "${ETCD_DIR-}" ]] && kube::etcd::clean_etcd_dir
exit 0
}
function warning {
message=$1
echo $(tput bold)$(tput setaf 1)
echo "WARNING: ${message}"
echo $(tput sgr0)
}
function start_etcd {
echo "Starting etcd"
kube::etcd::start
}
function set_service_accounts {
SERVICE_ACCOUNT_LOOKUP=${SERVICE_ACCOUNT_LOOKUP:-true}
SERVICE_ACCOUNT_KEY=${SERVICE_ACCOUNT_KEY:-/tmp/kube-serviceaccount.key}
# Generate ServiceAccount key if needed
if [[ ! -f "${SERVICE_ACCOUNT_KEY}" ]]; then
mkdir -p "$(dirname ${SERVICE_ACCOUNT_KEY})"
openssl genrsa -out "${SERVICE_ACCOUNT_KEY}" 2048 2>/dev/null
fi
}
function start_apiserver {
security_admission=""
if [[ -z "${ALLOW_SECURITY_CONTEXT}" ]]; then
security_admission=",SecurityContextDeny"
fi
if [[ -n "${PSP_ADMISSION}" ]]; then
security_admission=",PodSecurityPolicy"
fi
# Admission Controllers to invoke prior to persisting objects in cluster
ADMISSION_CONTROL=NamespaceLifecycle,LimitRanger,ServiceAccount${security_admission},ResourceQuota,DefaultStorageClass,DefaultTolerationSeconds
# This is the default dir and filename where the apiserver will generate a self-signed cert
# which should be able to be used as the CA to verify itself
audit_arg=""
APISERVER_BASIC_AUDIT_LOG=""
if [[ "${ENABLE_APISERVER_BASIC_AUDIT:-}" = true ]]; then
# We currently only support enabling with a fixed path and with built-in log
# rotation "disabled" (large value) so it behaves like kube-apiserver.log.
# External log rotation should be set up the same as for kube-apiserver.log.
APISERVER_BASIC_AUDIT_LOG=/tmp/kube-apiserver-audit.log
audit_arg=" --audit-log-path=${APISERVER_BASIC_AUDIT_LOG}"
audit_arg+=" --audit-log-maxage=0"
audit_arg+=" --audit-log-maxbackup=0"
# Lumberjack doesn't offer any way to disable size-based rotation. It also
# has an in-memory counter that doesn't notice if you truncate the file.
# 2000000000 (in MiB) is a large number that fits in 31 bits. If the log
# grows at 10MiB/s (~30K QPS), it will rotate after ~6 years if apiserver
# never restarts. Please manually restart apiserver before this time.
audit_arg+=" --audit-log-maxsize=2000000000"
fi
swagger_arg=""
if [[ "${ENABLE_SWAGGER_UI}" = true ]]; then
swagger_arg="--enable-swagger-ui=true "
fi
anytoken_arg=""
if [[ "${ALLOW_ANY_TOKEN}" = true ]]; then
anytoken_arg="--insecure-allow-any-token "
KUBECONFIG_TOKEN="${KUBECONFIG_TOKEN:-system:admin/system:masters}"
fi
authorizer_arg=""
if [[ "${ENABLE_RBAC}" = true ]]; then
authorizer_arg="--authorization-mode=RBAC "
fi
priv_arg=""
if [[ -n "${ALLOW_PRIVILEGED}" ]]; then
priv_arg="--allow-privileged "
fi
runtime_config=""
if [[ -n "${RUNTIME_CONFIG}" ]]; then
runtime_config="--runtime-config=${RUNTIME_CONFIG}"
fi
# Let the API server pick a default address when API_HOST_IP
# is set to 127.0.0.1
advertise_address=""
if [[ "${API_HOST_IP}" != "127.0.0.1" ]]; then
advertise_address="--advertise_address=${API_HOST_IP}"
fi
# Create CA signers
if [[ "${ENABLE_SINGLE_CA_SIGNER:-}" = true ]]; then
kube::util::create_signing_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" server '"client auth","server auth"'
sudo cp "${CERT_DIR}/server-ca.key" "${CERT_DIR}/client-ca.key"
sudo cp "${CERT_DIR}/server-ca.crt" "${CERT_DIR}/client-ca.crt"
sudo cp "${CERT_DIR}/server-ca-config.json" "${CERT_DIR}/client-ca-config.json"
else
kube::util::create_signing_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" server '"server auth"'
kube::util::create_signing_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" client '"client auth"'
fi
# Create auth proxy client ca
kube::util::create_signing_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" request-header '"client auth"'
# serving cert for kube-apiserver
kube::util::create_serving_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "server-ca" kube-apiserver kubernetes.default kubernetes.default.svc "localhost" ${API_HOST_IP} ${API_HOST} ${FIRST_SERVICE_CLUSTER_IP}
# Create client certs signed with client-ca, given id, given CN and a number of groups
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" 'client-ca' kubelet system:node:${HOSTNAME_OVERRIDE} system:nodes
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" 'client-ca' kube-proxy system:kube-proxy system:nodes
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" 'client-ca' controller system:kube-controller-manager
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" 'client-ca' scheduler system:kube-scheduler
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" 'client-ca' admin system:admin system:masters
# Create matching certificates for kube-aggregator
kube::util::create_serving_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "server-ca" kube-aggregator api.kube-public.svc "localhost" ${API_HOST_IP}
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" request-header-ca auth-proxy system:auth-proxy
# TODO remove masters and add rolebinding
kube::util::create_client_certkey "${CONTROLPLANE_SUDO}" "${CERT_DIR}" 'client-ca' kube-aggregator system:kube-aggregator system:masters
kube::util::write_client_kubeconfig "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "${ROOT_CA_FILE}" "${API_HOST}" "${API_SECURE_PORT}" kube-aggregator
APISERVER_LOG=${LOG_DIR}/kube-apiserver.log
${CONTROLPLANE_SUDO} "${GO_OUT}/hyperkube" apiserver ${swagger_arg} ${audit_arg} ${anytoken_arg} ${authorizer_arg} ${priv_arg} ${runtime_config}\
${advertise_address} \
--v=${LOG_LEVEL} \
--cert-dir="${CERT_DIR}" \
--client-ca-file="${CERT_DIR}/client-ca.crt" \
--service-account-key-file="${SERVICE_ACCOUNT_KEY}" \
--service-account-lookup="${SERVICE_ACCOUNT_LOOKUP}" \
--admission-control="${ADMISSION_CONTROL}" \
--admission-control-config-file="${ADMISSION_CONTROL_CONFIG_FILE}" \
--bind-address="${API_BIND_ADDR}" \
--secure-port="${API_SECURE_PORT}" \
--tls-cert-file="${CERT_DIR}/serving-kube-apiserver.crt" \
--tls-private-key-file="${CERT_DIR}/serving-kube-apiserver.key" \
--tls-ca-file="${CERT_DIR}/server-ca.crt" \
--insecure-bind-address="${API_HOST_IP}" \
--insecure-port="${API_PORT}" \
--storage-backend=${STORAGE_BACKEND} \
--etcd-servers="http://${ETCD_HOST}:${ETCD_PORT}" \
--service-cluster-ip-range="${SERVICE_CLUSTER_IP_RANGE}" \
--feature-gates="${FEATURE_GATES}" \
--cloud-provider="${CLOUD_PROVIDER}" \
--cloud-config="${CLOUD_CONFIG}" \
--requestheader-username-headers=X-Remote-User \
--requestheader-group-headers=X-Remote-Group \
--requestheader-extra-headers-prefix=X-Remote-Extra- \
--requestheader-client-ca-file="${CERT_DIR}/request-header-ca.crt" \
--requestheader-allowed-names=system:auth-proxy \
--proxy-client-cert-file="${CERT_DIR}/client-auth-proxy.crt" \
--proxy-client-key-file="${CERT_DIR}/client-auth-proxy.key" \
--cors-allowed-origins="${API_CORS_ALLOWED_ORIGINS}" >"${APISERVER_LOG}" 2>&1 &
APISERVER_PID=$!
# Wait for kube-apiserver to come up before launching the rest of the components.
echo "Waiting for apiserver to come up"
# this uses the API port because if you don't have any authenticator, you can't seem to use the secure port at all.
# this matches what happened with the combination in 1.4.
# TODO change this conditionally based on whether API_PORT is on or off
kube::util::wait_for_url "http://${API_HOST_IP}:${API_SECURE_PORT}/healthz" "apiserver: " 1 ${WAIT_FOR_URL_API_SERVER} \
|| { echo "check apiserver logs: ${APISERVER_LOG}" ; exit 1 ; }
# Create kubeconfigs for all components, using client certs
kube::util::write_client_kubeconfig "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "${ROOT_CA_FILE}" "${API_HOST}" "${API_SECURE_PORT}" admin
${CONTROLPLANE_SUDO} chown "${USER}" "${CERT_DIR}/client-admin.key" # make readable for kubectl
kube::util::write_client_kubeconfig "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "${ROOT_CA_FILE}" "${API_HOST}" "${API_SECURE_PORT}" kubelet
kube::util::write_client_kubeconfig "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "${ROOT_CA_FILE}" "${API_HOST}" "${API_SECURE_PORT}" kube-proxy
kube::util::write_client_kubeconfig "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "${ROOT_CA_FILE}" "${API_HOST}" "${API_SECURE_PORT}" controller
kube::util::write_client_kubeconfig "${CONTROLPLANE_SUDO}" "${CERT_DIR}" "${ROOT_CA_FILE}" "${API_HOST}" "${API_SECURE_PORT}" scheduler
if [[ -z "${AUTH_ARGS}" ]]; then
if [[ "${ALLOW_ANY_TOKEN}" = true ]]; then
# use token authentication
if [[ -n "${KUBECONFIG_TOKEN}" ]]; then
AUTH_ARGS="--token=${KUBECONFIG_TOKEN}"
else
AUTH_ARGS="--token=system:admin/system:masters"
fi
else
# default to the admin client cert/key
AUTH_ARGS="--client-key=${CERT_DIR}/client-admin.key --client-certificate=${CERT_DIR}/client-admin.crt"
fi
fi
${CONTROLPLANE_SUDO} cp "${CERT_DIR}/admin.kubeconfig" "${CERT_DIR}/admin-kube-aggregator.kubeconfig"
${CONTROLPLANE_SUDO} chown $(whoami) "${CERT_DIR}/admin-kube-aggregator.kubeconfig"
${KUBECTL} config set-cluster local-up-cluster --kubeconfig="${CERT_DIR}/admin-kube-aggregator.kubeconfig" --server="https://${API_HOST_IP}:31090"
echo "use 'kubectl --kubeconfig=${CERT_DIR}/admin-kube-aggregator.kubeconfig' to use the aggregated API server"
}
function start_controller_manager {
node_cidr_args=""
if [[ "${NET_PLUGIN}" == "kubenet" ]]; then
node_cidr_args="--allocate-node-cidrs=true --cluster-cidr=10.1.0.0/16 "
fi
CTLRMGR_LOG=${LOG_DIR}/kube-controller-manager.log
${CONTROLPLANE_SUDO} "${GO_OUT}/hyperkube" controller-manager \
--v=${LOG_LEVEL} \
--service-account-private-key-file="${SERVICE_ACCOUNT_KEY}" \
--root-ca-file="${ROOT_CA_FILE}" \
--cluster-signing-cert-file="${CLUSTER_SIGNING_CERT_FILE}" \
--cluster-signing-key-file="${CLUSTER_SIGNING_KEY_FILE}" \
--enable-hostpath-provisioner="${ENABLE_HOSTPATH_PROVISIONER}" \
${node_cidr_args} \
--pvclaimbinder-sync-period="${CLAIM_BINDER_SYNC_PERIOD}" \
--feature-gates="${FEATURE_GATES}" \
--cloud-provider="${CLOUD_PROVIDER}" \
--cloud-config="${CLOUD_CONFIG}" \
--kubeconfig "$CERT_DIR"/controller.kubeconfig \
--use-service-account-credentials \
--master="https://${API_HOST}:${API_SECURE_PORT}" >"${CTLRMGR_LOG}" 2>&1 &
CTLRMGR_PID=$!
}
function start_kubelet {
KUBELET_LOG=${LOG_DIR}/kubelet.log
mkdir -p ${POD_MANIFEST_PATH} || true
priv_arg=""
if [[ -n "${ALLOW_PRIVILEGED}" ]]; then
priv_arg="--allow-privileged "
fi
mkdir -p /var/lib/kubelet
if [[ -z "${DOCKERIZE_KUBELET}" ]]; then
# Enable dns
if [[ "${ENABLE_CLUSTER_DNS}" = true ]]; then
dns_args="--cluster-dns=${DNS_SERVER_IP} --cluster-domain=${DNS_DOMAIN}"
else
# To start a private DNS server set ENABLE_CLUSTER_DNS and
# DNS_SERVER_IP/DOMAIN. This will at least provide a working
# DNS server for real world hostnames.
dns_args="--cluster-dns=8.8.8.8"
fi
net_plugin_args=""
if [[ -n "${NET_PLUGIN}" ]]; then
net_plugin_args="--network-plugin=${NET_PLUGIN}"
fi
auth_args=""
if [[ -n "${KUBELET_AUTHORIZATION_WEBHOOK:-}" ]]; then
auth_args="${auth_args} --authorization-mode=Webhook"
fi
if [[ -n "${KUBELET_AUTHENTICATION_WEBHOOK:-}" ]]; then
auth_args="${auth_args} --authentication-token-webhook"
fi
if [[ -n "${CLIENT_CA_FILE:-}" ]]; then
auth_args="${auth_args} --client-ca-file=${CLIENT_CA_FILE}"
fi
net_plugin_dir_args=""
if [[ -n "${NET_PLUGIN_DIR}" ]]; then
net_plugin_dir_args="--network-plugin-dir=${NET_PLUGIN_DIR}"
fi
container_runtime_endpoint_args=""
if [[ -n "${CONTAINER_RUNTIME_ENDPOINT}" ]]; then
container_runtime_endpoint_args="--container-runtime-endpoint=${CONTAINER_RUNTIME_ENDPOINT}"
fi
image_service_endpoint_args=""
if [[ -n "${IMAGE_SERVICE_ENDPOINT}" ]]; then
image_service_endpoint_args="--image-service-endpoint=${IMAGE_SERVICE_ENDPOINT}"
fi
sudo -E "${GO_OUT}/hyperkube" kubelet ${priv_arg}\
--v=${LOG_LEVEL} \
--chaos-chance="${CHAOS_CHANCE}" \
--container-runtime="${CONTAINER_RUNTIME}" \
--rkt-path="${RKT_PATH}" \
--rkt-stage1-image="${RKT_STAGE1_IMAGE}" \
--hostname-override="${HOSTNAME_OVERRIDE}" \
--cloud-provider="${CLOUD_PROVIDER}" \
--cloud-config="${CLOUD_CONFIG}" \
--address="${KUBELET_HOST}" \
--require-kubeconfig \
--kubeconfig "$CERT_DIR"/kubelet.kubeconfig \
--feature-gates="${FEATURE_GATES}" \
--cpu-cfs-quota=${CPU_CFS_QUOTA} \
--enable-controller-attach-detach="${ENABLE_CONTROLLER_ATTACH_DETACH}" \
--cgroups-per-qos=${CGROUPS_PER_QOS} \
--cgroup-driver=${CGROUP_DRIVER} \
--keep-terminated-pod-volumes=${KEEP_TERMINATED_POD_VOLUMES} \
--eviction-hard=${EVICTION_HARD} \
--eviction-soft=${EVICTION_SOFT} \
--eviction-pressure-transition-period=${EVICTION_PRESSURE_TRANSITION_PERIOD} \
--pod-manifest-path="${POD_MANIFEST_PATH}" \
${auth_args} \
${dns_args} \
${net_plugin_dir_args} \
${net_plugin_args} \
${container_runtime_endpoint_args} \
${image_service_endpoint_args} \
--port="$KUBELET_PORT" \
${KUBELET_FLAGS} >"${KUBELET_LOG}" 2>&1 &
KUBELET_PID=$!
# Quick check that kubelet is running.
if ps -p $KUBELET_PID > /dev/null ; then
echo "kubelet ( $KUBELET_PID ) is running."
else
cat ${KUBELET_LOG} ; exit 1
fi
else
# Docker won't run a container with a cidfile (container id file)
# unless that file does not already exist; clean up an existing
# dockerized kubelet that might be running.
cleanup_dockerized_kubelet
cred_bind=""
# path to cloud credentials.
cloud_cred=""
if [ "${CLOUD_PROVIDER}" == "aws" ]; then
cloud_cred="${HOME}/.aws/credentials"
fi
if [ "${CLOUD_PROVIDER}" == "gce" ]; then
cloud_cred="${HOME}/.config/gcloud"
fi
if [ "${CLOUD_PROVIDER}" == "openstack" ]; then
cloud_cred="${CLOUD_CONFIG}"
fi
if [[ -n "${cloud_cred}" ]]; then
cred_bind="--volume=${cloud_cred}:${cloud_cred}:ro"
fi
docker run \
--volume=/:/rootfs:ro \
--volume=/var/run:/var/run:rw \
--volume=/sys:/sys:ro \
--volume=/var/lib/docker/:/var/lib/docker:ro \
--volume=/var/lib/kubelet/:/var/lib/kubelet:rw \
--volume=/dev:/dev \
--volume=/run/xtables.lock:/run/xtables.lock:rw \
${cred_bind} \
--net=host \
--privileged=true \
-i \
--cidfile=$KUBELET_CIDFILE \
gcr.io/google_containers/kubelet \
/kubelet --v=${LOG_LEVEL} --containerized ${priv_arg}--chaos-chance="${CHAOS_CHANCE}" --pod-manifest-path="${POD_MANIFEST_PATH}" --hostname-override="${HOSTNAME_OVERRIDE}" --cloud-provider="${CLOUD_PROVIDER}" --cloud-config="${CLOUD_CONFIG}" \ --address="127.0.0.1" --require-kubeconfig --kubeconfig "$CERT_DIR"/kubelet.kubeconfig --api-servers="https://${API_HOST}:${API_SECURE_PORT}" --port="$KUBELET_PORT" --enable-controller-attach-detach="${ENABLE_CONTROLLER_ATTACH_DETACH}" &> $KUBELET_LOG &
fi
}
function start_kubeproxy {
PROXY_LOG=${LOG_DIR}/kube-proxy.log
cat <<EOF > /tmp/kube-proxy.yaml
apiVersion: componentconfig/v1alpha1
kind: KubeProxyConfiguration
clientConnection:
kubeconfig: ${CERT_DIR}/kube-proxy.kubeconfig
hostnameOverride: ${HOSTNAME_OVERRIDE}
featureGates: ${FEATURE_GATES}
EOF
sudo "${GO_OUT}/hyperkube" proxy \
--config=/tmp/kube-proxy.yaml \
--master="https://${API_HOST}:${API_SECURE_PORT}" >"${PROXY_LOG}" \
--v=${LOG_LEVEL} 2>&1 &
PROXY_PID=$!
SCHEDULER_LOG=${LOG_DIR}/kube-scheduler.log
${CONTROLPLANE_SUDO} "${GO_OUT}/hyperkube" scheduler \
--v=${LOG_LEVEL} \
--kubeconfig "$CERT_DIR"/scheduler.kubeconfig \
--master="https://${API_HOST}:${API_SECURE_PORT}" >"${SCHEDULER_LOG}" 2>&1 &
SCHEDULER_PID=$!
}
function start_kubedns {
if [[ "${ENABLE_CLUSTER_DNS}" = true ]]; then
echo "Creating kube-system namespace"
sed -e "s/{{ pillar\['dns_domain'\] }}/${DNS_DOMAIN}/g" "${KUBE_ROOT}/cluster/addons/dns/kubedns-controller.yaml.in" >| kubedns-deployment.yaml
sed -e "s/{{ pillar\['dns_server'\] }}/${DNS_SERVER_IP}/g" "${KUBE_ROOT}/cluster/addons/dns/kubedns-svc.yaml.in" >| kubedns-svc.yaml
# TODO update to dns role once we have one.
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create clusterrolebinding system:kube-dns --clusterrole=cluster-admin --serviceaccount=kube-system:default
# use kubectl to create kubedns deployment and service
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" --namespace=kube-system create -f ${KUBE_ROOT}/cluster/addons/dns/kubedns-sa.yaml
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" --namespace=kube-system create -f ${KUBE_ROOT}/cluster/addons/dns/kubedns-cm.yaml
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" --namespace=kube-system create -f kubedns-deployment.yaml
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" --namespace=kube-system create -f kubedns-svc.yaml
echo "Kube-dns deployment and service successfully deployed."
rm kubedns-deployment.yaml kubedns-svc.yaml
fi
}
function start_kubedashboard {
if [[ "${ENABLE_CLUSTER_DASHBOARD}" = true ]]; then
echo "Creating kubernetes-dashboard"
# use kubectl to create the dashboard
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create -f ${KUBE_ROOT}/cluster/addons/dashboard/dashboard-controller.yaml
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create -f ${KUBE_ROOT}/cluster/addons/dashboard/dashboard-service.yaml
echo "kubernetes-dashboard deployment and service successfully deployed."
fi
}
function create_psp_policy {
echo "Create podsecuritypolicy policies for RBAC."
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create -f ${KUBE_ROOT}/examples/podsecuritypolicy/rbac/policies.yaml
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create -f ${KUBE_ROOT}/examples/podsecuritypolicy/rbac/roles.yaml
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create -f ${KUBE_ROOT}/examples/podsecuritypolicy/rbac/bindings.yaml
}
function create_storage_class {
if [ -z "$CLOUD_PROVIDER" ]; then
# No cloud provider -> no default storage class
return
fi
CLASS_FILE=${KUBE_ROOT}/cluster/addons/storage-class/${CLOUD_PROVIDER}/default.yaml
if [ -e $CLASS_FILE ]; then
echo "Create default storage class for $CLOUD_PROVIDER"
${KUBECTL} --kubeconfig="${CERT_DIR}/admin.kubeconfig" create -f $CLASS_FILE
else
echo "No storage class available for $CLOUD_PROVIDER."
fi
}
function print_success {
if [[ "${START_MODE}" != "kubeletonly" ]]; then
cat <<EOF
Local Kubernetes cluster is running. Press Ctrl-C to shut it down.
Logs:
${APISERVER_LOG:-}
${CTLRMGR_LOG:-}
${PROXY_LOG:-}
${SCHEDULER_LOG:-}
EOF
fi
if [[ "${ENABLE_APISERVER_BASIC_AUDIT:-}" = true ]]; then
echo " ${APISERVER_BASIC_AUDIT_LOG}"
fi
if [[ "${START_MODE}" == "all" ]]; then
echo " ${KUBELET_LOG}"
elif [[ "${START_MODE}" == "nokubelet" ]]; then
echo
echo "No kubelet was started because you set START_MODE=nokubelet"
echo "Run this script again with START_MODE=kubeletonly to run a kubelet"
fi
if [[ "${START_MODE}" != "kubeletonly" ]]; then
echo
cat <<EOF
To start using your cluster, you can open up another terminal/tab and run:
export KUBECONFIG=${CERT_DIR}/admin.kubeconfig
cluster/kubectl.sh
Alternatively, you can write to the default kubeconfig:
export KUBERNETES_PROVIDER=local
cluster/kubectl.sh config set-cluster local --server=https://${API_HOST}:${API_SECURE_PORT} --certificate-authority=${ROOT_CA_FILE}
cluster/kubectl.sh config set-credentials myself ${AUTH_ARGS}
cluster/kubectl.sh config set-context local --cluster=local --user=myself
cluster/kubectl.sh config use-context local
cluster/kubectl.sh
EOF
else
cat <<EOF
The kubelet was started.
Logs:
${KUBELET_LOG}
EOF
fi
}
# validate that etcd is: not running, in path, and has minimum required version.
if [[ "${START_MODE}" != "kubeletonly" ]]; then
kube::etcd::validate
fi
if [ "${CONTAINER_RUNTIME}" == "docker" ] && ! kube::util::ensure_docker_daemon_connectivity; then
exit 1
fi
if [[ "${CONTAINER_RUNTIME}" == "rkt" ]]; then
test_rkt
fi
if [[ "${START_MODE}" != "kubeletonly" ]]; then
test_apiserver_off
fi
kube::util::test_openssl_installed
kube::util::ensure-cfssl
### IF the user didn't supply an output/ for the build... Then we detect.
if [ "$GO_OUT" == "" ]; then
detect_binary
fi
echo "Detected host and ready to start services. Doing some housekeeping first..."
echo "Using GO_OUT $GO_OUT"
KUBELET_CIDFILE=/tmp/kubelet.cid
if [[ "${ENABLE_DAEMON}" = false ]]; then
trap cleanup EXIT
fi
echo "Starting services now!"
if [[ "${START_MODE}" != "kubeletonly" ]]; then
start_etcd
set_service_accounts
start_apiserver
start_controller_manager
start_kubeproxy
start_kubedns
start_kubedashboard
fi
if [[ "${START_MODE}" != "nokubelet" ]]; then
## TODO remove this check if/when kubelet is supported on darwin
# Detect the OS name/arch and display appropriate error.
case "$(uname -s)" in
Darwin)
warning "kubelet is not currently supported in darwin, kubelet aborted."
KUBELET_LOG=""
;;
Linux)
start_kubelet
;;
*)
warning "Unsupported host OS. Must be Linux or Mac OS X, kubelet aborted."
;;
esac
fi
if [[ -n "${PSP_ADMISSION}" && "${ENABLE_RBAC}" = true ]]; then
create_psp_policy
fi
if [[ "$DEFAULT_STORAGE_CLASS" = "true" ]]; then
create_storage_class
fi
print_success
if [[ "${ENABLE_DAEMON}" = false ]]; then
while true; do sleep 1; done
fi
|
from collections import Counter
def replace_most_common_word(sentence):
words = sentence.split()
most_common_word = Counter(words).most_common(1)[0][0]
words = [w if w != most_common_word else '*' for w in words]
output_sentence = ' '.join(words)
return output_sentence
sentence = "Random acts of kindness"
output_sentence = replace_most_common_word(sentence)
print(output_sentence) |
randomsay() {
cow=(`cowsay -l | tail -n +2 | tr " " "\n" | egrep -v 'telebears|sodomized' | sort -R | head -n 1`)
cowsay -f $cow "$@"
}
|
import React from 'react';
const ArticleTeaser = ({ article }) => {
return(
<div>
<h3>{ article.title }</h3>
</div>
);
};
export default ArticleTeaser; |
#!/usr/bin/env bash
#If the root password start by sm:// that means the format is the following
# projects/<PROJECT_ID>/secrets/<SECRET_NAME>/versions/<VERSION>
# And require a get from Secret Manager
if [[ -v ROOT_PASSWORD ]]; then
if [[ ${ROOT_PASSWORD} == sm://* ]]; then
PROJECT_ID=$(echo ${ROOT_PASSWORD} | cut -d "/" -f 4)
SECRET_NAME=$(echo ${ROOT_PASSWORD} | cut -d "/" -f 6)
VERSION=$(echo ${ROOT_PASSWORD} | cut -d "/" -f 8)
MYSQL_ROOT_PASSWORD=$(gcloud secrets versions access ${VERSION} --secret=${SECRET_NAME} --project=${PROJECT_ID})
if [[ $? != 0 ]]; then
echo "Invalid secret (bad formatted or not exist): ${ROOT_PASSWORD}"
exit 2
fi
else
MYSQL_ROOT_PASSWORD=${ROOT_PASSWORD}
fi
else
echo "ROOT_PASSWORD environment variable must be set to define the database root password."
exit 1
fi
mkdir -p /var/lib/mysql
echo "Mounting Cloud Filestore."
mount --verbose -o nolock ${FILESTORE_IP_ADDRESS}:/${FILE_SHARE_NAME} /var/lib/mysql
echo "Mounting completed."
# Run MySQL service
source docker-entrypoint.sh
_main "mysqld" &
# Run the Webserver
/main |
package vectorwing.farmersdelight.common.mixin;
import net.minecraft.advancements.CriteriaTriggers;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.stats.Stats;
import net.minecraft.world.entity.LivingEntity;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.item.BowlFoodItem;
import net.minecraft.world.item.Item;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.Items;
import net.minecraft.world.level.Level;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import vectorwing.farmersdelight.common.Configuration;
@Mixin(BowlFoodItem.class)
public abstract class SoupItemMixin extends Item
{
public SoupItemMixin(Properties properties) {
super(properties);
}
@Override
public int getItemStackLimit(ItemStack stack) {
if (Configuration.ENABLE_STACKABLE_SOUP_ITEMS.get()) {
ResourceLocation stackable = stack.getItem().getRegistryName();
String stackableKey = "";
if (stackable != null) {
stackableKey = stackable.toString();
}
if (Configuration.OVERRIDE_ALL_SOUP_ITEMS.get() && !Configuration.SOUP_ITEM_LIST.get().contains(stackableKey)
|| !Configuration.OVERRIDE_ALL_SOUP_ITEMS.get() && Configuration.SOUP_ITEM_LIST.get().contains(stackableKey)) {
return 16;
}
}
return super.getItemStackLimit(stack);
}
/**
* Replication of ConsumableItem but in Mixin form, to allow SoupItems to stack
*/
@Inject(at = @At(value = "HEAD"), method = "finishUsingItem", cancellable = true)
private void onItemUseFinish(ItemStack stack, Level worldIn, LivingEntity subject, CallbackInfoReturnable<ItemStack> cir) {
if (Configuration.ENABLE_STACKABLE_SOUP_ITEMS.get()) {
ItemStack container = stack.getContainerItem();
if (container.isEmpty())
container = new ItemStack(Items.BOWL);
if (stack.isEdible()) {
super.finishUsingItem(stack, worldIn, subject);
} else {
Player player = subject instanceof Player ? (Player) subject : null;
if (player instanceof ServerPlayer) {
CriteriaTriggers.CONSUME_ITEM.trigger((ServerPlayer) player, stack);
}
if (player != null) {
player.awardStat(Stats.ITEM_USED.get(this));
if (!player.getAbilities().instabuild) {
stack.shrink(1);
}
}
}
if (stack.isEmpty()) {
cir.setReturnValue(container);
} else {
if (subject instanceof Player && !((Player) subject).getAbilities().instabuild) {
Player player = (Player) subject;
if (!player.getInventory().add(container)) {
player.drop(container, false);
}
}
cir.setReturnValue(stack);
}
}
}
}
|
from __future__ import absolute_import
import base64
import pycurl
import json
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA
from Crypto.Signature import PKCS1_v1_5
from io import BytesIO
REQUEST_TYPE_HEAD = 0
REQUEST_TYPE_GET = 1
REQUEST_TYPE_POST = 2
def perform_https_request(config, config_dir, path, request_type, verify=True, post_data=None, file_descriptor=None):
content = BytesIO()
headers = {}
c = pycurl.Curl()
def parse_headers(header_line):
if ':' not in header_line:
return
name, value = header_line.split(':', 1)
headers[name.strip().lower()] = value.strip()
# Request type
if request_type == REQUEST_TYPE_HEAD:
c.setopt(pycurl.HTTPGET, 1)
c.setopt(pycurl.NOBODY, 1)
elif request_type == REQUEST_TYPE_GET:
c.setopt(pycurl.HTTPGET, 1)
elif request_type == REQUEST_TYPE_POST:
c.setopt(pycurl.POST, 1)
c.setopt(pycurl.POSTFIELDS, json.dumps(post_data))
# TLS certificate verification
if verify:
c.setopt(pycurl.SSL_VERIFYPEER, 1)
c.setopt(pycurl.SSL_VERIFYHOST, 2)
else:
c.setopt(pycurl.SSL_VERIFYPEER, 0)
c.setopt(pycurl.SSL_VERIFYHOST, 0)
# Client certificate
if verify:
c.setopt(pycurl.SSLCERT, '{}/{}'.format(config_dir, config.get('general', 'certfile')))
c.setopt(pycurl.SSLKEY, '{}/{}'.format(config_dir, config.get('general', 'keyfile')))
# Proxy configuration
# Currently we only support NTLM through cntlm
if config.get('proxy', 'mode') == '1':
c.setopt(pycurl.PROXY, '127.0.0.1')
c.setopt(pycurl.PROXYPORT, 3128)
# Target output
if file_descriptor is not None:
c.setopt(pycurl.WRITEFUNCTION, file_descriptor.write)
else:
c.setopt(pycurl.WRITEFUNCTION, content.write)
c.setopt(pycurl.URL, 'https://{}:{}/{}'.format(config.get('server', 'name'), config.get('server', 'port_https'), path))
c.setopt(pycurl.CAINFO, '{}/{}'.format(config_dir, config.get('server', 'certfile')))
c.setopt(pycurl.HEADERFUNCTION, parse_headers)
c.perform()
status_code = c.getinfo(pycurl.HTTP_CODE)
c.close()
return {'status': status_code, 'headers': headers, 'content': content.getvalue()}
def sign_data(key, data):
key = RSA.importKey(key)
signer = PKCS1_v1_5.new(key)
digest = SHA.new()
digest.update(json.dumps(data).encode('utf-8'))
sign = signer.sign(digest)
return encode_data(sign)
def encode_data(data):
return base64.b64encode(data).decode('utf-8')
|
<gh_stars>0
export enum SegmentType {
L_SEG = 0,
S_SEG = 1,
R_SEG = 2
}
/* The segment types for each of the Path types */
export const DIRDATA: SegmentType[][] = [
[SegmentType.L_SEG, SegmentType.S_SEG, SegmentType.L_SEG],
[SegmentType.L_SEG, SegmentType.S_SEG, SegmentType.R_SEG],
[SegmentType.R_SEG, SegmentType.S_SEG, SegmentType.L_SEG],
[SegmentType.R_SEG, SegmentType.S_SEG, SegmentType.R_SEG],
[SegmentType.R_SEG, SegmentType.L_SEG, SegmentType.R_SEG],
[SegmentType.L_SEG, SegmentType.R_SEG, SegmentType.L_SEG]
]
export interface DubinsIntermediateResults {
alpha: number
beta: number
d: number
sa: number
sb: number
ca: number
cb: number
c_ab: number
d_sq: number
}
export enum DubinsPathType {
LSL = 0,
LSR = 1,
RSL = 2,
RSR = 3,
RLR = 4,
LRL = 5
}
export type config = [number, number, number]
export interface DubinsPath {
/* the initial configuration */
qi: config
/* the lengths of the three segments */
segLength: config
/* model forward velocity / model angular velocity */
rho: number
/* the path type described */
type: DubinsPathType
}
export enum ERROR_CODE {
EDUBOK = 0, /* No error */
EDUBCOCONFIGS = 1, /* Colocated configurations */
EDUBPARAM = 2, /* Path parameterisitation error */
EDUBBADRHO = 3, /* the rho value is invalid */
EDUBNOPATH = 4 /* no connection between configurations with this word */
}
const M_PI = 3.14159265358979323846264338327950288
function fmodr(x: number, y: number) {
return x - y * Math.floor(x / y)
}
function mod2pi(theta: number) {
return fmodr(theta, 2 * M_PI)
}
class Dubins {
_NS_IN: DubinsIntermediateResults
// _Q: config
readonly _C_DEFAULT_DOUBLE = 0.0 // I'm pretty sure JS just reverts this to `0` right away, but this is the default val of a double in C
readonly _DEFAULT_CONFIG: config = [this._C_DEFAULT_DOUBLE, this._C_DEFAULT_DOUBLE, this._C_DEFAULT_DOUBLE]
_printConfiguration(q: config, x: number) {
console.log(q[0], q[1], q[2], x)
return 0
}
constructor() {
this._NS_IN = {
alpha: this._C_DEFAULT_DOUBLE,
beta: this._C_DEFAULT_DOUBLE,
d: this._C_DEFAULT_DOUBLE,
sa: this._C_DEFAULT_DOUBLE,
sb: this._C_DEFAULT_DOUBLE,
ca: this._C_DEFAULT_DOUBLE,
cb: this._C_DEFAULT_DOUBLE,
c_ab: this._C_DEFAULT_DOUBLE,
d_sq: this._C_DEFAULT_DOUBLE
}
}
public shortestAndSample(start: config, end: config, turning_radius: number, step_size: number, callback: (q: [number, number, number], x: number) => ERROR_CODE) {
const ret = this.dubins_shortest_path(start, end, turning_radius)
return this.dubins_path_sample_many(ret[1], step_size, callback)
}
public dubins_shortest_path(user_FIRST: config, user_LAST: config, turning_radius: number): [ERROR_CODE, DubinsPath] {
let errcode: ERROR_CODE
let segLength: config = this._DEFAULT_CONFIG
let cost: number
let best_cost: number = Number.POSITIVE_INFINITY
let best_word: number = -1
errcode = this.dubins_intermediate_results(user_FIRST, user_LAST, turning_radius) // tested, works
let path = {
qi: user_FIRST,
segLength: this._DEFAULT_CONFIG,
rho: turning_radius,
type: -415393080
}
if (errcode != ERROR_CODE.EDUBOK) {
return [errcode, path]
}
for (let i = 0; i < 6; i++) {
let pathType: DubinsPathType = i
let ret = this.dubins_word(pathType, segLength)
errcode = ret[0]
segLength = ret[1]
if (errcode == ERROR_CODE.EDUBOK) {
cost = segLength[0] + segLength[1] + segLength[2]
// console.log("best_cost, cost, pathType: ", best_cost, cost, pathType)
if (cost < best_cost) {
best_word = i
best_cost = cost
path.segLength = segLength
path.type = pathType
}
}
}
if (best_word == -1) {
return [ERROR_CODE.EDUBNOPATH, path]
}
return [ERROR_CODE.EDUBOK, path]
}
public dubins_intermediate_results(q0: config, q1: config, rho: number) {
// used to take `in` but now access class attr _NS_IN
let dx: number, dy: number, D: number, d: number, theta: number, alpha: number, beta: number
if (rho <= 0.0) {
return ERROR_CODE.EDUBBADRHO
}
dx = q1[0] - q0[0]
dy = q1[1] - q0[1]
D = Math.sqrt(dx * dx + dy * dy)
d = D / rho
theta = 0
/* test required to prevent domain errors if dx=0 and dy=0 */
if (d > 0) {
theta = mod2pi(Math.atan2(dy, dx))
}
alpha = mod2pi(q0[2] - theta)
beta = mod2pi(q1[2] - theta)
this._NS_IN.alpha = alpha
this._NS_IN.beta = beta
this._NS_IN.d = d
this._NS_IN.sa = Math.sin(alpha)
this._NS_IN.sb = Math.sin(beta)
this._NS_IN.ca = Math.cos(alpha)
this._NS_IN.cb = Math.cos(beta)
this._NS_IN.c_ab = Math.cos(alpha - beta)
this._NS_IN.d_sq = d * d
return ERROR_CODE.EDUBOK
}
public dubins_path_sample_many(path: DubinsPath, stepSize: number, cb: (q: config, x: number) => ERROR_CODE): ERROR_CODE {
let retcode: ERROR_CODE
let ret: [ERROR_CODE, config?]
let q: config = this._DEFAULT_CONFIG
let x: number = 0.0
let length: number = this.dubins_path_length(path)
while (x < length) {
ret = this.dubins_path_sample(path, x, q)
if (ret[0] === ERROR_CODE.EDUBOK) {
q = ret[1] as config
}
retcode = cb(q, x)
if (retcode != ERROR_CODE.EDUBOK) {
return retcode
}
x += stepSize
}
return ERROR_CODE.EDUBOK
}
public dubins_path_length(path: DubinsPath) {
if (path.rho <= 0) throw new Error(`Rho: ${path.rho}`)
let length = 0
length += path.segLength[0]
length += path.segLength[1]
length += path.segLength[2]
length = length * path.rho
return length
}
public dubins_segment(t: number, qi: config, qt_i: config, type: SegmentType): config {
if (qt_i.length < 3) throw new Error(`QT ${qt_i}`)
let qt: config = qt_i.slice() as config
let st: number = Math.sin(qi[2])
let ct: number = Math.cos(qi[2])
if (type == SegmentType.L_SEG) {
qt[0] = +Math.sin(qi[2] + t) - st
qt[1] = -Math.cos(qi[2] + t) + ct
qt[2] = t
}
else if (type == SegmentType.R_SEG) {
qt[0] = -Math.sin(qi[2] - t) + st
qt[1] = +Math.cos(qi[2] - t) - ct
qt[2] = -t
}
else if (type == SegmentType.S_SEG) {
qt[0] = ct * t
qt[1] = st * t
qt[2] = 0.0
}
qt[0] += qi[0]
qt[1] += qi[1]
qt[2] += qi[2]
return qt
}
public dubins_path_sample(path: DubinsPath, t: number, q_i: config): [ERROR_CODE, config] {
if (q_i.length < 3) throw new Error(`Q ${q_i}`)
let q: config = q_i.slice() as config
/* tprime is the normalised variant of the parameter t */
let tprime = t / path.rho
let qi: config = this._DEFAULT_CONFIG
let q1: config = this._DEFAULT_CONFIG /* end-of segment 1 */
let q2: config = this._DEFAULT_CONFIG /* end-of segment 2 */
const types: SegmentType[] = DIRDATA[path.type]
let p1: number, p2: number
if (t < 0 || t > this.dubins_path_length(path)) {
return [ERROR_CODE.EDUBPARAM, this._DEFAULT_CONFIG]
}
/* initial configuration */
qi[0] = 0.0
qi[1] = 0.0
qi[2] = path.qi[2]
/* generate the target configuration */
p1 = path.segLength[0]
p2 = path.segLength[1]
// console.log(p1, qi, q1, types[0])
q1 = this.dubins_segment(p1, qi, q1, types[0])
q2 = this.dubins_segment(p2, q1, q2, types[1])
if (tprime < p1) {
q = this.dubins_segment(tprime, qi, q, types[0])
}
else if (tprime < (p1 + p2)) {
q = this.dubins_segment(tprime - p1, q1, q, types[1])
}
else {
q = this.dubins_segment(tprime - p1 - p2, q2, q, types[2])
}
/* scale the target configuration, translate back to the original starting point */
q[0] = q[0] * path.rho + path.qi[0]
q[1] = q[1] * path.rho + path.qi[1]
q[2] = mod2pi(q[2])
return [ERROR_CODE.EDUBOK, q]
}
public dubins_LSL(out_arg: config): [ERROR_CODE, config] {
let out = out_arg.slice() as config
let tmp0: number, tmp1: number, p_sq: number
tmp0 = this._NS_IN.d + this._NS_IN.sa - this._NS_IN.sb
p_sq = 2 + this._NS_IN.d_sq - (2 * this._NS_IN.c_ab) + (2 * this._NS_IN.d * (this._NS_IN.sa - this._NS_IN.sb))
if (p_sq >= 0) {
tmp1 = Math.atan2((this._NS_IN.cb - this._NS_IN.ca), tmp0)
out[0] = mod2pi(tmp1 - this._NS_IN.alpha)
out[1] = Math.sqrt(p_sq)
out[2] = mod2pi(this._NS_IN.beta - tmp1)
return [ERROR_CODE.EDUBOK, out]
}
return [ERROR_CODE.EDUBNOPATH, out]
}
public dubins_RSR(out_arg: config): [ERROR_CODE, config] {
let out = out_arg.slice() as config
let tmp0: number = this._NS_IN.d - this._NS_IN.sa + this._NS_IN.sb
let p_sq: number = 2 + this._NS_IN.d_sq - (2 * this._NS_IN.c_ab) + (2 * this._NS_IN.d * (this._NS_IN.sb - this._NS_IN.sa))
if (p_sq >= 0) {
let tmp1: number = Math.atan2((this._NS_IN.ca - this._NS_IN.cb), tmp0)
out[0] = mod2pi(this._NS_IN.alpha - tmp1)
out[1] = Math.sqrt(p_sq)
out[2] = mod2pi(tmp1 - this._NS_IN.beta)
return [ERROR_CODE.EDUBOK, out]
}
return [ERROR_CODE.EDUBNOPATH, out]
}
public dubins_LSR(out_arg: config): [ERROR_CODE, config] {
let out = out_arg.slice() as config
let p_sq: number = -2 + (this._NS_IN.d_sq) + (2 * this._NS_IN.c_ab) + (2 * this._NS_IN.d * (this._NS_IN.sa + this._NS_IN.sb))
if (p_sq >= 0) {
let p: number = Math.sqrt(p_sq)
let tmp0: number = Math.atan2((-this._NS_IN.ca - this._NS_IN.cb), (this._NS_IN.d + this._NS_IN.sa + this._NS_IN.sb)) - Math.atan2(-2.0, p)
out[0] = mod2pi(tmp0 - this._NS_IN.alpha)
out[1] = p
out[2] = mod2pi(tmp0 - mod2pi(this._NS_IN.beta))
return [ERROR_CODE.EDUBOK, out]
}
return [ERROR_CODE.EDUBNOPATH, out]
}
public dubins_RSL(out_arg: config): [ERROR_CODE, config] {
let out = out_arg.slice() as config
let p_sq: number = -2 + this._NS_IN.d_sq + (2 * this._NS_IN.c_ab) - (2 * this._NS_IN.d * (this._NS_IN.sa + this._NS_IN.sb))
if (p_sq >= 0) {
let p: number = Math.sqrt(p_sq)
let tmp0: number = Math.atan2((this._NS_IN.ca + this._NS_IN.cb), (this._NS_IN.d - this._NS_IN.sa - this._NS_IN.sb)) - Math.atan2(2.0, p)
out[0] = mod2pi(this._NS_IN.alpha - tmp0)
out[1] = p
out[2] = mod2pi(this._NS_IN.beta - tmp0)
return [ERROR_CODE.EDUBOK, out]
}
return [ERROR_CODE.EDUBNOPATH, out]
}
public dubins_RLR(out_arg: config): [ERROR_CODE, config] {
let out = out_arg.slice() as config
let tmp0: number = (6. - this._NS_IN.d_sq + 2 * this._NS_IN.c_ab + 2 * this._NS_IN.d * (this._NS_IN.sa - this._NS_IN.sb)) / 8.
let phi: number = Math.atan2(this._NS_IN.ca - this._NS_IN.cb, this._NS_IN.d - this._NS_IN.sa + this._NS_IN.sb)
if (Math.abs(tmp0) <= 1) {
let p: number = mod2pi((2 * M_PI) - Math.acos(tmp0))
let t: number = mod2pi(this._NS_IN.alpha - phi + mod2pi(p / 2.))
out[0] = t
out[1] = p
out[2] = mod2pi(this._NS_IN.alpha - this._NS_IN.beta - t + mod2pi(p))
return [ERROR_CODE.EDUBOK, out]
}
return [ERROR_CODE.EDUBNOPATH, out]
}
public dubins_LRL(out_arg: config): [ERROR_CODE, config] {
let out = out_arg.slice() as config
let tmp0: number = (6. - this._NS_IN.d_sq + 2 * this._NS_IN.c_ab + 2 * this._NS_IN.d * (this._NS_IN.sb - this._NS_IN.sa)) / 8.
let phi: number = Math.atan2(this._NS_IN.ca - this._NS_IN.cb, this._NS_IN.d + this._NS_IN.sa - this._NS_IN.sb)
if (Math.abs(tmp0) <= 1) {
let p: number = mod2pi(2 * M_PI - Math.acos(tmp0))
let t: number = mod2pi(-this._NS_IN.alpha - phi + p / 2.)
out[0] = t
out[1] = p
out[2] = mod2pi(mod2pi(this._NS_IN.beta) - this._NS_IN.alpha - t + mod2pi(p))
return [ERROR_CODE.EDUBOK, out]
}
return [ERROR_CODE.EDUBNOPATH, out]
}
public dubins_word(pathType: DubinsPathType, out: config): [ERROR_CODE, config] {
switch (pathType) {
case DubinsPathType.LSL:
return this.dubins_LSL(out)
case DubinsPathType.RSL:
return this.dubins_RSL(out)
case DubinsPathType.LSR:
return this.dubins_LSR(out)
case DubinsPathType.RSR:
return this.dubins_RSR(out)
case DubinsPathType.LRL:
return this.dubins_LRL(out)
case DubinsPathType.RLR:
return this.dubins_RLR(out)
default:
return [ERROR_CODE.EDUBNOPATH, out]
}
}
}
export default Dubins
|
echo
echo "Installing most recent version of OpenJDK"
brew install openjdk
# Configure opensjdk as instructed by 'brew info openjdk'
sudo ln -sfn "$(brew --prefix)/opt/openjdk/libexec/openjdk.jdk" /Library/Java/JavaVirtualMachines/openjdk.jdk
echo "export PATH=\"$(brew --prefix)/opt/openjdk/bin:\$PATH\"" >> ~/.zshenv
echo "export CPPFLAGS=\"-I$(brew --prefix)/opt/openjdk/include\"" >> ~/.zshenv
# more java tools
source ${MY_DIR}/scripts/opt-in/java-tools.sh |
package cn.finalteam.rxgalleryfinalprovider.ui.fragment;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.content.FileProvider;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.yalantis.ucrop.UCrop;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import javax.inject.Inject;
import cn.finalteam.rxgalleryfinalprovider.BuildConfig;
import cn.finalteam.rxgalleryfinalprovider.Configuration;
import cn.finalteam.rxgalleryfinalprovider.R;
import cn.finalteam.rxgalleryfinalprovider.RxGalleryFinal;
import cn.finalteam.rxgalleryfinalprovider.anim.Animation;
import cn.finalteam.rxgalleryfinalprovider.anim.AnimationListener;
import cn.finalteam.rxgalleryfinalprovider.anim.SlideInUnderneathAnimation;
import cn.finalteam.rxgalleryfinalprovider.anim.SlideOutUnderneathAnimation;
import cn.finalteam.rxgalleryfinalprovider.bean.BucketBean;
import cn.finalteam.rxgalleryfinalprovider.bean.ImageCropBean;
import cn.finalteam.rxgalleryfinalprovider.bean.MediaBean;
import cn.finalteam.rxgalleryfinalprovider.di.component.DaggerMediaGridComponent;
import cn.finalteam.rxgalleryfinalprovider.di.component.MediaGridComponent;
import cn.finalteam.rxgalleryfinalprovider.di.component.RxGalleryFinalComponent;
import cn.finalteam.rxgalleryfinalprovider.di.module.MediaGridModule;
import cn.finalteam.rxgalleryfinalprovider.presenter.impl.MediaGridPresenterImpl;
import cn.finalteam.rxgalleryfinalprovider.rxbus.RxBus;
import cn.finalteam.rxgalleryfinalprovider.rxbus.RxBusSubscriber;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.CloseMediaViewPageFragmentEvent;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.ImageRadioResultEvent;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.MediaCheckChangeEvent;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.OpenMediaPageFragmentEvent;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.OpenMediaPreviewFragmentEvent;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.RequestStorageReadAccessPermissionEvent;
import cn.finalteam.rxgalleryfinalprovider.rxbus.event.SendMediaPageFragmentDataEvent;
import cn.finalteam.rxgalleryfinalprovider.ui.activity.MediaActivity;
import cn.finalteam.rxgalleryfinalprovider.ui.adapter.BucketAdapter;
import cn.finalteam.rxgalleryfinalprovider.ui.adapter.MediaGridAdapter;
import cn.finalteam.rxgalleryfinalprovider.ui.widget.FooterAdapter;
import cn.finalteam.rxgalleryfinalprovider.ui.widget.HorizontalDividerItemDecoration;
import cn.finalteam.rxgalleryfinalprovider.ui.widget.MarginDecoration;
import cn.finalteam.rxgalleryfinalprovider.ui.widget.RecyclerViewFinal;
import cn.finalteam.rxgalleryfinalprovider.utils.CameraUtils;
import cn.finalteam.rxgalleryfinalprovider.utils.EmptyViewUtils;
import cn.finalteam.rxgalleryfinalprovider.utils.FilenameUtils;
import cn.finalteam.rxgalleryfinalprovider.utils.Logger;
import cn.finalteam.rxgalleryfinalprovider.utils.MediaScanner;
import cn.finalteam.rxgalleryfinalprovider.utils.MediaUtils;
import cn.finalteam.rxgalleryfinalprovider.utils.PermissionCheckUtils;
import cn.finalteam.rxgalleryfinalprovider.utils.ThemeUtils;
import cn.finalteam.rxgalleryfinalprovider.view.MediaGridView;
import rx.Observable;
import rx.Observer;
import rx.Subscriber;
import rx.Subscription;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
/**
* Desction:
* Author:pengjianbo
* Date:16/5/7 上午10:02
*/
public class MediaGridFragment extends BaseFragment implements MediaGridView, RecyclerViewFinal.OnLoadMoreListener,
FooterAdapter.OnItemClickListener, View.OnClickListener, MediaScanner.ScanCallback, BucketAdapter.OnRecyclerViewItemClickListener {
private final String IMAGE_STORE_FILE_NAME = "IMG_%s.jpg";
private final int TAKE_IMAGE_REQUEST_CODE = 1001;
private final int MY_PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE = 1002;
public static final int MY_PERMISSIONS_REQUEST_CAMERA = 106;
public static final int MY_PERMISSIONS_REQUEST_READ_STORAGE = 104;
private final String TAKE_URL_STORAGE_KEY = "take_url_storage_key";
private final String BUCKET_ID_KEY = "bucket_id_key";
private final int LIMIT = 23;
@Inject
MediaGridPresenterImpl mMediaGridPresenter;
@Inject
Configuration mConfiguration;
@Inject
DisplayMetrics mScreenSize;
private List<MediaBean> mMediaBeanList;
private MediaGridAdapter mMediaGridAdapter;
private RecyclerViewFinal mRvMedia;
private LinearLayout mLlEmptyView;
private RecyclerView mRvBucket;
private BucketAdapter mBucketAdapter;
private RelativeLayout mRlBucektOverview;
private List<BucketBean> mBucketBeanList;
private TextView mTvFolderName;
private TextView mTvPreview;
private RelativeLayout mRlRootView;
private MediaScanner mMediaScanner;
private int mPage = 1;
private File mImageStoreDir;
private File mImageStoreCropDir;
private String mImagePath;
private String mBucketId = String.valueOf(Integer.MIN_VALUE);
private MediaActivity mMediaActivity;
private Subscription mSubscrMediaCheckChangeEvent;
private Subscription mSubscrCloseMediaViewPageFragmentEvent;
private Subscription mSubscrRequestStorageReadAccessPermissionEvent;
public static MediaGridFragment newInstance() {
return new MediaGridFragment();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof MediaActivity) {
mMediaActivity = (MediaActivity) context;
}
//mImageStoreDir = context.getExternalFilesDir(Environment.DIRECTORY_PICTURES);
mImageStoreDir = new File(Environment.getExternalStorageDirectory(), "/DCIM/RxGalleryFinal/");
mImageStoreCropDir = new File(mImageStoreDir, "crop");
if (!mImageStoreCropDir.exists()) {
mImageStoreCropDir.mkdirs();
}
mMediaScanner = new MediaScanner(context);
}
@Override
public int getContentView() {
return R.layout.gallery_fragment_media_grid;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mRvMedia = (RecyclerViewFinal) view.findViewById(R.id.rv_media);
mLlEmptyView = (LinearLayout) view.findViewById(R.id.ll_empty_view);
mRvBucket = (RecyclerView) view.findViewById(R.id.rv_bucket);
mRlBucektOverview = (RelativeLayout) view.findViewById(R.id.rl_bucket_overview);
mRlRootView = (RelativeLayout) view.findViewById(R.id.rl_root_view);
mRvMedia.setEmptyView(mLlEmptyView);
GridLayoutManager gridLayoutManager = new GridLayoutManager(getContext(), 3);
gridLayoutManager.setOrientation(GridLayoutManager.VERTICAL);
mRvMedia.addItemDecoration(new MarginDecoration(getContext()));
mRvMedia.setLayoutManager(gridLayoutManager);
mRvMedia.setOnLoadMoreListener(this);
mRvMedia.setFooterViewHide(true);
if (mConfiguration.getPauseOnScrollListener() != null) {
mRvMedia.addOnScrollListener(mConfiguration.getPauseOnScrollListener());
mRvBucket.addOnScrollListener(mConfiguration.getPauseOnScrollListener());
}
mTvFolderName = (TextView) view.findViewById(R.id.tv_folder_name);
mTvFolderName.setOnClickListener(this);
mTvPreview = (TextView) view.findViewById(R.id.tv_preview);
mTvPreview.setOnClickListener(this);
mTvPreview.setEnabled(false);
if (mConfiguration.isRadio()) {
view.findViewById(R.id.tv_preview_vr).setVisibility(View.GONE);
mTvPreview.setVisibility(View.GONE);
}
mMediaBeanList = new ArrayList<>();
mMediaGridAdapter = new MediaGridAdapter(getContext(), mMediaBeanList, mMediaActivity.getCheckedList(),
mScreenSize.widthPixels, mConfiguration);
mRvMedia.setAdapter(mMediaGridAdapter);
mMediaGridPresenter.setMediaGridView(this);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(getContext());
linearLayoutManager.setOrientation(GridLayoutManager.VERTICAL);
mRvBucket.addItemDecoration(new HorizontalDividerItemDecoration.Builder(getContext())
.color(getResources().getColor(R.color.gallery_bucket_list_item_normal_color))
.size(getResources().getDimensionPixelSize(R.dimen.gallery_divider_decoration_height))
.margin(getResources().getDimensionPixelSize(R.dimen.gallery_bucket_margin),
getResources().getDimensionPixelSize(R.dimen.gallery_bucket_margin))
.build());
mRvBucket.setLayoutManager(linearLayoutManager);
mBucketBeanList = new ArrayList<>();
mBucketAdapter = new BucketAdapter(getContext(), mBucketBeanList, mConfiguration);
mRvBucket.setAdapter(mBucketAdapter);
mRvMedia.setOnItemClickListener(this);
mMediaGridPresenter.getBucketList();
mBucketAdapter.setOnRecyclerViewItemClickListener(this);
mRlBucektOverview.setVisibility(View.INVISIBLE);
new SlideInUnderneathAnimation(mRvBucket)
.setDirection(Animation.DIRECTION_DOWN)
.animate();
subscribeEvent();
Activity activity = mMediaActivity;
if (activity == null) {
activity = getActivity();
}
String requestStorageAccessPermissionTips = ThemeUtils.resolveString(getContext(),
R.attr.gallery_request_storage_access_permission_tips,
R.string.gallery_default_request_storage_access_permission_tips);
boolean success = PermissionCheckUtils.checkReadExternalPermission(activity, requestStorageAccessPermissionTips,
MediaActivity.REQUEST_STORAGE_READ_ACCESS_PERMISSION);
if (success) {
mMediaGridPresenter.getMediaList(mBucketId, mPage, LIMIT);
}
}
private void subscribeEvent() {
mSubscrMediaCheckChangeEvent = RxBus.getDefault().toObservable(MediaCheckChangeEvent.class)
.subscribe(new RxBusSubscriber<MediaCheckChangeEvent>() {
@Override
protected void onEvent(MediaCheckChangeEvent mediaCheckChangeEvent) {
if (mMediaActivity.getCheckedList().size() == 0) {
mTvPreview.setEnabled(false);
} else {
mTvPreview.setEnabled(true);
}
}
});
RxBus.getDefault().add(mSubscrMediaCheckChangeEvent);
mSubscrCloseMediaViewPageFragmentEvent = RxBus.getDefault().toObservable(CloseMediaViewPageFragmentEvent.class)
.subscribe(new RxBusSubscriber<CloseMediaViewPageFragmentEvent>() {
@Override
protected void onEvent(CloseMediaViewPageFragmentEvent closeMediaViewPageFragmentEvent) throws Exception {
mMediaGridAdapter.notifyDataSetChanged();
}
});
RxBus.getDefault().add(mSubscrCloseMediaViewPageFragmentEvent);
mSubscrRequestStorageReadAccessPermissionEvent = RxBus.getDefault().toObservable(RequestStorageReadAccessPermissionEvent.class)
.subscribe(new RxBusSubscriber<RequestStorageReadAccessPermissionEvent>() {
@Override
protected void onEvent(RequestStorageReadAccessPermissionEvent requestStorageReadAccessPermissionEvent) throws Exception {
if (requestStorageReadAccessPermissionEvent.isSuccess()) {
mMediaGridPresenter.getMediaList(mBucketId, mPage, LIMIT);
} else {
getActivity().finish();
}
}
});
RxBus.getDefault().add(mSubscrRequestStorageReadAccessPermissionEvent);
}
@Override
public void setTheme() {
super.setTheme();
int pageColor = ThemeUtils.resolveColor(getContext(), R.attr.gallery_page_bg, R.color.gallery_default_page_bg);
mRlRootView.setBackgroundColor(pageColor);
}
@Override
protected void setupComponent(RxGalleryFinalComponent rxGalleryFinalComponent) {
MediaGridComponent mediaGridComponent = DaggerMediaGridComponent.builder()
.rxGalleryFinalComponent(RxGalleryFinal.getRxGalleryFinalComponent())
.mediaGridModule(new MediaGridModule(getContext(), true))
.build();
mediaGridComponent.inject(this);
}
@Override
public void loadMore() {
mMediaGridPresenter.getMediaList(mBucketId, mPage, LIMIT);
}
@Override
public void onRequestMediaCallback(List<MediaBean> list) {
if (mPage == 1 && TextUtils.equals(mBucketId, String.valueOf(Integer.MIN_VALUE))) {
MediaBean takePhotoBean = new MediaBean();
takePhotoBean.setId(Integer.MIN_VALUE);
takePhotoBean.setBucketId(String.valueOf(Integer.MIN_VALUE));
mMediaBeanList.add(takePhotoBean);
}
if (list != null && list.size() > 0) {
mMediaBeanList.addAll(list);
Logger.i(String.format("得到:%s张图片", list.size()));
} else {
Logger.i("没有更多图片");
}
mMediaGridAdapter.notifyDataSetChanged();
mPage++;
if (list == null || list.size() < LIMIT) {
mRvMedia.setFooterViewHide(true);
mRvMedia.setHasLoadMore(false);
} else {
mRvMedia.setFooterViewHide(false);
mRvMedia.setHasLoadMore(true);
}
if (mMediaBeanList.size() == 0) {
String mediaEmptyTils = ThemeUtils.resolveString(getContext(), R.attr.gallery_media_empty_tips, R.string.gallery_default_media_empty_tips);
EmptyViewUtils.showMessage(mLlEmptyView, mediaEmptyTils);
}
mRvMedia.onLoadMoreComplete();
}
@Override
public void onRequestBucketCallback(List<BucketBean> list) {
if (list == null || list.size() == 0) {
return;
}
mBucketBeanList.addAll(list);
mBucketAdapter.setSelectedBucket(list.get(0));
mBucketAdapter.notifyDataSetChanged();
}
@Override
public void onItemClick(View view, int position) {
BucketBean bucketBean = mBucketBeanList.get(position);
String bucketId = bucketBean.getBucketId();
mRlBucektOverview.setVisibility(View.GONE);
if (TextUtils.equals(mBucketId, bucketId)) {
return;
}
mBucketId = bucketId;
EmptyViewUtils.showLoading(mLlEmptyView);
mRvMedia.setHasLoadMore(false);
mMediaBeanList.clear();
mMediaGridAdapter.notifyDataSetChanged();
mBucketAdapter.setSelectedBucket(bucketBean);
mRvMedia.setFooterViewHide(true);
mPage = 1;
mMediaGridPresenter.getMediaList(mBucketId, mPage, LIMIT);
}
@Override
public void onItemClick(RecyclerView.ViewHolder holder, int position) {
MediaBean mediaBean = mMediaBeanList.get(position);
if (mediaBean.getId() == Integer.MIN_VALUE) {
if (!CameraUtils.hasCamera(getContext())) {
Toast.makeText(getContext(), R.string.gallery_device_no_camera_tips, Toast.LENGTH_SHORT).show();
return;
}
openCamera();
} else {
if (mConfiguration.isRadio()) {
if (!mConfiguration.isCrop()) {
ImageCropBean bean = new ImageCropBean();
bean.copyMediaBean(mediaBean);
RxBus.getDefault().post(new ImageRadioResultEvent(bean));
getActivity().finish();
} else {
String ext = FilenameUtils.getExtension(mediaBean.getOriginalPath());
Bitmap.CompressFormat format = Bitmap.CompressFormat.JPEG;
if (ext != null && TextUtils.equals(ext.toLowerCase(), "png")) {
format = Bitmap.CompressFormat.PNG;
} else if (ext != null && TextUtils.equals(ext.toLowerCase(), "webp")) {
format = Bitmap.CompressFormat.WEBP;
}
try {
String originalPath = mediaBean.getOriginalPath();
File file = new File(originalPath);
UCrop uCrop = UCrop.of(mediaBean, Uri.fromFile(new File(mImageStoreCropDir, file.getName())));
uCrop = uCrop.useSourceImageAspectRatio();
UCrop.Options options = new UCrop.Options();
options.setHideBottomControls(mConfiguration.isHideBottomControls());
options.setCompressionFormat(format);
if (mConfiguration.getCompressionQuality() != 0) {
options.setCompressionQuality(mConfiguration.getCompressionQuality());
}
if (mConfiguration.getMaxBitmapSize() != 0) {
options.setMaxBitmapSize(mConfiguration.getMaxBitmapSize());
}
int[] gestures = mConfiguration.getAllowedGestures();
if (gestures != null && gestures.length == 3) {
options.setAllowedGestures(gestures[0], gestures[1], gestures[2]);
}
if (mConfiguration.getMaxScaleMultiplier() != 0) {
options.setMaxScaleMultiplier(mConfiguration.getMaxScaleMultiplier());
}
//设置等比缩放
if (mConfiguration.getAspectRatioX() != 0 && mConfiguration.getAspectRatioY() != 0) {
options.withAspectRatio(mConfiguration.getAspectRatioX(), mConfiguration.getAspectRatioY());
}
//设置等比缩放默认值索引及等比缩放值列表
if (mConfiguration.getAspectRatio() != null && mConfiguration.getSelectedByDefault() > mConfiguration.getAspectRatio().length) {
options.setAspectRatioOptions(mConfiguration.getSelectedByDefault(), mConfiguration.getAspectRatio());
}
options.setFreeStyleCropEnabled(mConfiguration.isFreestyleCropEnabled());
options.setOvalDimmedLayer(mConfiguration.isOvalDimmedLayer());
uCrop = uCrop.withOptions(options);
uCrop.start(getActivity());
} catch (Exception e) {
Logger.e(e);
}
}
} else {
RxBus.getDefault().post(new OpenMediaPageFragmentEvent());
MediaBean firstBean = mMediaBeanList.get(0);
List<MediaBean> gridMediaList = mMediaBeanList;
int pos = position;
if (firstBean.getId() == Integer.MIN_VALUE) {
pos = position - 1;
gridMediaList = mMediaBeanList.subList(1, mMediaBeanList.size());
}
RxBus.getDefault().postSticky(new SendMediaPageFragmentDataEvent(gridMediaList, pos));
}
}
}
private void openCamera() {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(getActivity(),
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
MY_PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE);
} else if (ContextCompat.checkSelfPermission(getActivity(),
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.CAMERA},
MY_PERMISSIONS_REQUEST_CAMERA);
} else if (ContextCompat.checkSelfPermission(getActivity(),
Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(getActivity(), new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
MY_PERMISSIONS_REQUEST_READ_STORAGE);
} else {
Log.e("else", "else");
Intent captureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (captureIntent.resolveActivity(getActivity().getPackageManager()) != null) {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.ENGLISH);
String filename = String.format(IMAGE_STORE_FILE_NAME, dateFormat.format(new Date()));
//File photoFile = new File(mImageStoreDir, filename);
Log.e("filename", filename);
Log.e("mImageStoreDir", mImageStoreDir.getAbsolutePath());
File photoFile = null;
try {
photoFile = File.createTempFile(
filename, /* prefix */
".jpg", /* suffix */
mImageStoreDir /* directory */
);
} catch (IOException e) {
e.printStackTrace();
}
if (photoFile != null) {
mImagePath = photoFile.getAbsolutePath();
Log.e("photoFile", photoFile.getAbsolutePath());
if (mImagePath != null) {
Uri photoURI = null;
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.M) {
photoURI = Uri.fromFile(photoFile);
} else {
Log.e("photoUri", BuildConfig.APPLICATION_ID);
photoURI = FileProvider.getUriForFile(getActivity(),
BuildConfig.APPLICATION_ID + ".provider",
photoFile);
Log.e("photoUri", photoURI.toString());
}
//captureIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(new File(mImagePath)));
captureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoURI);
startActivityForResult(captureIntent, TAKE_IMAGE_REQUEST_CODE);
} else {
Log.e("photoFile", "is null");
}
}
} else {
Toast.makeText(getContext(), R.string.gallery_device_camera_unable, Toast.LENGTH_SHORT).show();
}
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
Logger.i("onActivityResult: requestCode=" + requestCode + ", resultCode=" + resultCode);
if (requestCode == TAKE_IMAGE_REQUEST_CODE && resultCode == Activity.RESULT_OK) {
Logger.i(String.format("拍照成功,图片存储路径:%s", mImagePath));
//刷新相册数据库
mMediaScanner.scanFile(mImagePath, "image/jpeg", this);
}
if (requestCode == MY_PERMISSIONS_REQUEST_WRITE_EXTERNAL_STORAGE && resultCode == Activity.RESULT_OK) {
openCamera();
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (!TextUtils.isEmpty(mImagePath)) {
outState.putString(TAKE_URL_STORAGE_KEY, mImagePath);
}
if (!TextUtils.isEmpty(mBucketId)) {
outState.putString(BUCKET_ID_KEY, mBucketId);
}
}
@Override
public void onViewStateRestored(@Nullable Bundle savedInstanceState) {
super.onViewStateRestored(savedInstanceState);
if (savedInstanceState == null) {
return;
}
mImagePath = savedInstanceState.getString(TAKE_URL_STORAGE_KEY);
mBucketId = savedInstanceState.getString(BUCKET_ID_KEY);
}
@Override
public void onDestroy() {
super.onDestroy();
mMediaScanner.unScanFile();
}
@Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.tv_preview) {
RxBus.getDefault().post(new OpenMediaPreviewFragmentEvent());
} else if (id == R.id.tv_folder_name) {
v.setEnabled(false);
int visibility = mRlBucektOverview.getVisibility();
if (visibility == View.VISIBLE) {
new SlideOutUnderneathAnimation(mRvBucket)
.setDirection(Animation.DIRECTION_DOWN)
.setDuration(Animation.DURATION_DEFAULT)
.setListener(new AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
v.setEnabled(true);
mRlBucektOverview.setVisibility(View.GONE);
}
})
.animate();
} else {
mRlBucektOverview.setVisibility(View.VISIBLE);
new SlideInUnderneathAnimation(mRvBucket)
.setDirection(Animation.DIRECTION_DOWN)
.setDuration(Animation.DURATION_DEFAULT)
.setListener(new AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
v.setEnabled(true);
}
})
.animate();
}
}
}
@Override
public void onScanCompleted(String[] images) {
if (images == null || images.length == 0) {
Logger.i("images empty");
return;
}
Observable.create((Observable.OnSubscribe<MediaBean>) new Observable.OnSubscribe<MediaBean>() {
@Override
public void call(Subscriber<? super MediaBean> subscriber) {
MediaBean mediaBean = MediaUtils.getMediaBeanWithImage(MediaGridFragment.this.getContext(), images[0]);
subscriber.onNext(mediaBean);
subscriber.onCompleted();
}
})
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<MediaBean>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
Logger.i("获取MediaBean异常");
}
@Override
public void onNext(MediaBean mediaBean) {
if (!isDetached() && mediaBean != null) {
mMediaBeanList.add(1, mediaBean);
mMediaGridAdapter.notifyDataSetChanged();
}
}
});
}
@Override
public void onDestroyView() {
super.onDestroyView();
RxBus.getDefault().remove(mSubscrMediaCheckChangeEvent);
RxBus.getDefault().remove(mSubscrCloseMediaViewPageFragmentEvent);
}
}
|
<gh_stars>1-10
import React, { Suspense } from 'react';
import { GridContent } from '@ant-design/pro-layout';
import { Row, Col } from 'antd';
import { createUseRestful, useTable } from '@/hooks';
import {
BIZ_ANALYSE_OVERVIEW,
USER_LOGS_API,
MODULES_LOGS_API,
MONITOR_OS_RUNNING_24H_API,
MONITOR_EMQX_ALL_API,
} from '@/services/resources';
import ModuleLoginLogCard from './components/ModuleLogCard';
import Running24HRow from './components/Running24HRow';
import EMQChartListRow from './components/EMQChartListRow';
const IntroduceRow = React.lazy(() => import('./components/IntroduceRow'));
const UserLoginLogCard = React.lazy(() => import('./components/UserLoginLogCard'));
const Overview = () => {
const { data: homeData } = createUseRestful(BIZ_ANALYSE_OVERVIEW).useSWRQuery();
const { data: running24HData } = createUseRestful(MONITOR_OS_RUNNING_24H_API).useSWRQuery();
const { data: emqListData } = createUseRestful(MONITOR_EMQX_ALL_API).useSWRQuery();
const { tableProps: userLogsTableProps } = useTable(createUseRestful(USER_LOGS_API));
const { tableProps: moduleLogsTableProps } = useTable(createUseRestful(MODULES_LOGS_API));
return (
<GridContent>
<React.Fragment>
<Suspense fallback={<div />}>
<IntroduceRow loading={!homeData} data={homeData} />
</Suspense>
<Suspense fallback={<div />}>
<Running24HRow loading={!running24HData} data={running24HData as any} />
</Suspense>
<Suspense fallback={<div />}>
<EMQChartListRow loading={!emqListData} data={emqListData} />
</Suspense>
<Row gutter={24}>
<Col style={{ marginBottom: 20 }} xl={12} lg={24} md={24} sm={24} xs={24}>
<Suspense fallback={<div />}>
<ModuleLoginLogCard tableProps={moduleLogsTableProps as any} />
</Suspense>
</Col>
<Col style={{ marginBottom: 20 }} xl={12} lg={24} md={24} sm={24} xs={24}>
<Suspense fallback={<div />}>
<UserLoginLogCard tableProps={userLogsTableProps as any} />
</Suspense>
</Col>
</Row>
</React.Fragment>
</GridContent>
);
};
export default Overview;
|
import reducer from '../../reducers';
import ProductMock from '../../__mocks__/ProductMock';
describe('Reducers', () => {
test('Return initialState', () => {
expect(reducer({}, '')).toEqual({});
});
test('Add to Cart reducer', () => {
const initialState = {
cart: [],
};
const payload = ProductMock;
const action = {
type: 'ADD_TO_CART',
payload,
};
const expectedState = {
cart: [payload],
};
expect(reducer(initialState, action)).toEqual(expectedState);
});
test('Remove to Cart reducer', () => {
const cartItem = ProductMock;
const initialState = {
cart: [cartItem],
};
const payload = cartItem;
const action = {
type: 'REMOVE_FROM_CART',
payload,
};
const expectedState = {
cart: [],
};
expect(reducer(initialState, action)).toEqual(expectedState);
});
});
|
#!/bin/bash
#
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -eo pipefail
# check for wget
which wget || { echo 'wget not found, please install.' && exit 1; }
# download
mkdir -p uff_faster_rcnn && \
cd uff_faster_rcnn && \
wget 'https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_4.x_apps/master/models/frcnn/faster_rcnn.pb' && \
wget 'https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_4.x_apps/master/models/frcnn/2015_0502_034830_005_00001_rain_000179.ppm' && \
wget 'https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_4.x_apps/master/models/frcnn/2016_1111_185016_003_00001_night_000441.ppm' && \
wget 'https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_4.x_apps/master/models/frcnn/57ea04a57823530017bf15bf_000000.ppm' && \
wget 'https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_4.x_apps/master/models/frcnn/57ea04a57823530017bf15bf_001008.ppm' && \
ls *.ppm | cut -d. -f1 >> list.txt && \
echo 'Model downloading finished !' && \
cd ..
|
function initMap() {
const map = new google.maps.Map(document.getElementById("map"), {
zoom: 12,
center: { lat: -28.643387, lng: 153.612224 },
mapTypeControl: true,
mapTypeControlOptions: {
style: google.maps.MapTypeControlStyle.HORIZONTAL_BAR,
position: google.maps.ControlPosition.TOP_CENTER,
},
zoomControl: true,
zoomControlOptions: {
position: google.maps.ControlPosition.LEFT_CENTER,
},
scaleControl: true,
streetViewControl: true,
streetViewControlOptions: {
position: google.maps.ControlPosition.LEFT_TOP,
},
fullscreenControl: true,
});
}
|
package com.twu.biblioteca.handlers.operation;
import com.twu.biblioteca.components.Library;
import com.twu.biblioteca.components.User;
import com.twu.biblioteca.handlers.InputHandler;
public class UserProfileHandler extends InputHandler {
public UserProfileHandler(Library library) {
super(library);
}
/**
* Print heading message
*/
@Override
protected void printHeading() {
System.out.println("Please find your profile information below:");
}
/**
* Retrieve options given optional user input
*
* @param input User input
* @return Array of options in string form
*/
@Override
protected String[] retrieveOptions(String... input) {
User currentUser = SIGNED_IN_AS;
this.setOptionWithIndex(false);
return new String[] {
"Name: " + currentUser.getName(),
"Email: " + currentUser.getEmail(),
"Library ID: " + currentUser.getIdentifier(),
};
}
}
|
#!/usr/bin/env bash
CYAN='\033[1;36m'
NC='\033[0m' # No Color
OPTIND=1
should_update_mac=false
should_update_brew=false
should_update_gem=false
mac_action() {
echo -e "${CYAN}🖥 MAS upgrade applications from AppStore 🖥${NC}"
mas upgrade
echo -e "${CYAN}🖥 Mac OS upgrade 🖥${NC}"
softwareupdate --install --all
}
brew_action() {
echo -e "${CYAN}🍺 BREW update 🍺${NC}"
brew update
echo -e "${CYAN}🍺 BREW upgrade 🍺${NC}"
brew upgrade
echo -e "${CYAN}🍺 BREW CASK upgrade 🍺${NC}"
brew upgrade --cask --greedy
echo -e "${CYAN}🍺 BREW cleanup 🍺${NC}"
brew cleanup
}
gem_action() {
echo -e "${CYAN}💎 GEM update system 💎${NC}"
gem update --system
echo -e "${CYAN}💎 GEM update 💎${NC}"
gem update
echo -e "${CYAN}💎 GEM cleanup 💎${NC}"
gem cleanup
}
show_help() {
cat <<EOF
Usage: $0 [options]
EXAMPLE:
$0 -a
OPTIONS:
-a Update all
-b Brew update
-g Gem update
-h Help
-m Mac OS and AppStore update
EOF
}
if [[ ! $@ =~ ^\-.+ ]]; then
show_help
exit 0
fi
while getopts "habgm:" opt; do
case "$opt" in
h)
show_help
exit 0
;;
a)
should_update_mac=true
should_update_brew=true
should_update_gem=true
;;
b)
should_update_brew=true
;;
g)
should_update_gem=true
;;
m)
should_update_mac=true
;;
esac
done
if $should_update_mac; then
mac_action
fi
if $should_update_brew; then
brew_action
fi
if $should_update_gem; then
gem_action
fi
|
my_Dict = {'a': 7, 'b': 8, 'c': 11, 'd': 5}
sorted_values = sorted(my_dict.values())
print(sorted_values) // Output [5, 7, 8, 11] |
<reponame>Koltak/mfgtools
/*
* Copyright 2009-2013, 2016 Freescale Semiconductor, Inc.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of the Freescale Semiconductor nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
// PropertyT.h
//
//////////////////////////////////////////////////////////////////////
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
#ifndef __PARAMETER_XXX__
#define __PARAMETER_XXX__
#include "stdafx.h"
#include <map>
#include <vector>
typedef std::vector<CString> StdStringArray;
namespace property
{
class Parameter
{
public:
typedef std::map<CString, Parameter*> ParamMap;
Parameter(LPCTSTR desc = NULL)
: Desc(desc)
{};
virtual const CString ToString() const = 0;
virtual int Parse(CString str) = 0;
virtual StdStringArray GetValueStrings() = 0;
CString Desc;
ParamMap map;
bool IsContainer() { return map.size() > 0; };
};
template <typename T>
class ParameterT : public Parameter
{
public:
ParameterT(T value = (T)0, T valDefault = (T)0, LPCTSTR desc = NULL)
: Parameter(desc)
, Value(value)
, Default(valDefault)
{};
const CString ToString() const
{
CString str, fmt;
fmt.Format(_T("0x%%0%dX"), 2*sizeof(T));
if ( ValueList.empty() )
{
str.Format(fmt, Value);
}
else
{
typename std::map<T, CString>::const_iterator key;
key = ValueList.find(Value);
if ( key == ValueList.end() )
str = _T("Not found.");
else
str = key->second;
}
return str;
}
int Parse(CString str)
{
int ret = FALSE;
if ( ValueList.empty() )
{
_stscanf_s(str.GetBuffer(), _T("%i"), &Value);
}
else
{
typename std::map<T, CString>::iterator pair;
for ( pair = ValueList.begin(); pair != ValueList.end(); ++pair )
{
if ( str.Compare((*pair).second) == 0 )
{
Value = (*pair).first;
ret = TRUE;
break;
}
}
}
return ret;
}
StdStringArray GetValueStrings()
{
StdStringArray strArray;
typename std::map<T, CString>::iterator pair;
for ( pair = ValueList.begin(); pair != ValueList.end(); ++pair )
{
strArray.push_back((*pair).second);
}
return strArray;
}
T Value;
T Default;
typename std::map<T, CString> ValueList;
};
int ParseParameterString(LPCTSTR stringToParse, Parameter::ParamMap& paramMap);
} // namespace property
using namespace property;
#endif
|
<reponame>coffeeandhops/spree_wholesale
require 'test_helper'
class Spree::RolesTest < ActiveSupport::TestCase
should "find or create wholesale role" do
role = Spree::Role.find_or_create_by_name("wholesaler")
assert !role.nil?, "Wholesale role does not exist."
end
end
|
#!/bin/bash
# Install scala of version $1
# Author: Zhiqiang Ma (http://www.ericzma.com)
if [ $# < 2 ]
then
echo "Usage: $0 version"
exit 1
fi
ver=$1
echo "You are to install scala $ver to /opt/"
echo ""
echo "You need to run this as root or by sudo.
Enter to continue. Ctrl-C to abort."
read input
wget http://www.scala-lang.org/files/archive/scala-$ver.tgz
tar xf scala-$ver.tgz -C /opt/
echo "export PATH=/opt/scala-$ver/bin/:\$PATH" > /etc/profile.d/scala.sh
rm -f scala-$ver.tgz
echo "Scala $ver is installed."
echo "Please login again and try to run \`scala\` and test it"
echo "Enjoy!"
|
<filename>kernel-d-security/security-api/src/main/java/cn/stylefeng/roses/kernel/security/api/pojo/DragCaptchaImageDTO.java
package cn.stylefeng.roses.kernel.security.api.pojo;
import cn.stylefeng.roses.kernel.rule.annotation.ChineseDescription;
import lombok.AllArgsConstructor;
import lombok.Data;
/**
* 剪裁图片dto传输实体
*
* @author fengshuonan
* @date 2021/7/5 14:10
*/
@Data
@AllArgsConstructor
public class DragCaptchaImageDTO {
/**
* 本次验证码缓存的key
*/
@ChineseDescription("本次验证码缓存的key")
private String key;
/**
* 剪裁后的源图片(base64编码)
*/
@ChineseDescription("剪裁后的源图片(base64编码)")
private String srcImage;
/**
* 剪裁的小拼图图片(base64编码)
*/
@ChineseDescription("剪裁的小拼图图片(base64编码)")
private String cutImage;
/**
* x轴坐标
*/
@ChineseDescription("x轴坐标")
private Integer locationX;
/**
* y轴坐标
*/
@ChineseDescription("y轴坐标")
private Integer locationY;
public DragCaptchaImageDTO(String srcImage, String cutImage, int locationX, int locationY) {
this.srcImage = srcImage;
this.cutImage = cutImage;
this.locationX = locationX;
this.locationY = locationY;
}
} |
<reponame>RainPoetry/mybatis-3
/**
* Copyright 2009-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cc.mybatis.common;
/**
* @author chenchong
* @create 2021/2/3 5:25 下午
* @description
*/
//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by FernFlower decompiler)
//
import java.security.InvalidParameterException;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
import com.fasterxml.jackson.databind.util.BeanUtil;
import org.apache.commons.lang3.StringUtils;
public class ValidateUtil {
private static final int[] POWER_LIST = new int[]{7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2};
private static final int[] PARITY_BIT = new int[]{49, 48, 88, 57, 56, 55, 54, 53, 52, 51, 50};
private static final String YEAR_PREFIX = "19";
private static final Map<Integer, String> ZONE_NUM = new HashMap(100);
private static final Pattern VIN_PATTERN = Pattern.compile("[A-HJ-NPR-Z0-9]{17}");
private static final Pattern PLATE_NO_PATTERN = Pattern.compile("^[京津沪渝冀豫云辽黑湘皖鲁新苏浙赣鄂桂甘晋蒙陕吉闽贵粤青藏川宁琼使领A-Z][A-Z][A-Z0-9]{4,5}[A-Z0-9挂学警港澳]$");
private static final Pattern OWNER_NAME = Pattern.compile("^[\\u4e00-\\u9fa5]{2,5}$");
private static final Pattern COMPANY_NAME = Pattern.compile("^[\\u4e00-\\u9fa5]{3,}$");
private static final Pattern COMPANY_PHONE = Pattern.compile("^0[0-9]{2,3}[1-9][0-9]{6,7}$");
private static final Pattern BANK_CARD = Pattern.compile("^[0-9]{13,19}$");
private static final Pattern MOBILE = Pattern.compile("^1[0-9]{10}$");
private static final Map<Integer, Integer> VIN_MAP_WEIGHTING = new HashMap(20);
private static final Map<Character, Integer> VIN_MAP_VALUE = new HashMap(20);
private static final int[] POWER = new int[]{1, 3, 9, 27, 19, 26, 16, 17, 20, 29, 25, 13, 8, 24, 10, 30, 28};
private static final char[] SOCIAL_CREDIT_CODE = new char[]{'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'T', 'U', 'W', 'X', 'Y'};
private ValidateUtil() {
}
public static boolean isValidPlateNo(String plateNo) {
return StringUtils.isNotBlank(plateNo) && PLATE_NO_PATTERN.matcher(plateNo).matches();
}
public static boolean isValidCertificateNo(String certNo) {
boolean isInvalid = certNo == null || certNo.length() != 15 && certNo.length() != 18;
if (isInvalid) {
return false;
} else {
char[] cs = certNo.toUpperCase().toCharArray();
int power = checksum(cs);
if (certNo.length() == 18 && cs[cs.length - 1] != PARITY_BIT[power % 11]) {
return false;
} else if (!ZONE_NUM.containsKey(Integer.valueOf(certNo.substring(0, 2)))) {
return false;
} else {
String year = certNo.length() == 15 ? "19" + certNo.substring(6, 8) : certNo.substring(6, 10);
int iYear = Integer.parseInt(year);
if (iYear >= 1900 && iYear <= Calendar.getInstance().get(1)) {
String month = certNo.length() == 15 ? certNo.substring(8, 10) : certNo.substring(10, 12);
int iMonth = Integer.parseInt(month);
if (iMonth >= 1 && iMonth <= 12) {
String day = certNo.length() == 15 ? certNo.substring(10, 12) : certNo.substring(12, 14);
int iDay = Integer.parseInt(day);
return iDay >= 1 && iDay <= 31;
} else {
return false;
}
} else {
return false;
}
}
}
}
private static int checksum(char[] cs) {
int power = 0;
for(int i = 0; i < cs.length - 1; ++i) {
if (i < cs.length - 1) {
power += (cs[i] - 48) * POWER_LIST[i];
}
}
return power;
}
public static boolean isValidEngineNo(String engineNo) {
return StringUtils.isNotBlank(engineNo) && engineNo.length() > 1 && engineNo.length() < 50 && !engineNo.contains("*");
}
public static boolean isValidVin(String vin) {
if (StringUtils.isBlank(vin)) {
return false;
} else if (!VIN_PATTERN.matcher(vin).matches()) {
return false;
} else {
char[] vinArr = vin.toCharArray();
int amount = 0;
int result;
for(result = 0; result < vinArr.length; ++result) {
amount += (Integer)VIN_MAP_VALUE.get(vinArr[result]) * (Integer)VIN_MAP_WEIGHTING.get(result + 1);
}
result = amount % 11;
if (result == 10) {
return vinArr[8] == 'X';
} else {
return result == (Integer)VIN_MAP_VALUE.get(vinArr[8]);
}
}
}
public static boolean isValidRegisterDate(Date registerDate) {
return registerDate != null && registerDate.getTime() < System.currentTimeMillis();
}
public static void notNull(Object object, String description) {
if (object == null) {
description = StringUtils.isBlank(description) ? "参数" : description;
throw new InvalidParameterException(description + "为空");
}
}
public static void notBlank(CharSequence text, String description) {
if (StringUtils.isBlank(text)) {
description = StringUtils.isBlank(description) ? "参数" : description;
throw new InvalidParameterException(description + "为空");
}
}
public static boolean isValidCompanyCreditCode(String creditCode) {
if (StringUtils.length(creditCode) != 18) {
return false;
} else {
Map<String, Integer> datas = new HashMap(20);
for(int i = 0; i < SOCIAL_CREDIT_CODE.length; ++i) {
datas.put(SOCIAL_CREDIT_CODE[i] + "", i);
}
char[] chars = creditCode.substring(0, 17).toCharArray();
int sum = 0;
int i;
for(i = 0; i < chars.length; ++i) {
Integer code = (Integer)datas.get(chars[i] + "");
if (code == null) {
return false;
}
sum += POWER[i] * code;
}
i = sum % 31;
i = i == 0 ? 31 : i;
return creditCode.substring(17, 18).equals(SOCIAL_CREDIT_CODE[31 - i] + "");
}
}
public static boolean isValidVehicleOwnerName(String name) {
return StringUtils.isNotBlank(name) && OWNER_NAME.matcher(name).matches();
}
public static boolean isValidCompanyName(String companyName) {
return StringUtils.isNotBlank(companyName) && COMPANY_NAME.matcher(companyName).matches();
}
public static boolean isValidBankCard(String bankCard) {
return StringUtils.isNotBlank(bankCard) && BANK_CARD.matcher(bankCard).matches();
}
public static boolean isValidCompanyPhone(String companyPhone) {
return StringUtils.isNotBlank(companyPhone) && COMPANY_PHONE.matcher(companyPhone).matches();
}
public static boolean isValidMobile(String mobile) {
return StringUtils.isNotBlank(mobile) && MOBILE.matcher(mobile).matches();
}
static {
ZONE_NUM.put(11, "北京");
ZONE_NUM.put(12, "天津");
ZONE_NUM.put(13, "河北");
ZONE_NUM.put(14, "山西");
ZONE_NUM.put(15, "内蒙古");
ZONE_NUM.put(21, "辽宁");
ZONE_NUM.put(22, "吉林");
ZONE_NUM.put(23, "黑龙江");
ZONE_NUM.put(31, "上海");
ZONE_NUM.put(32, "江苏");
ZONE_NUM.put(33, "浙江");
ZONE_NUM.put(34, "安徽");
ZONE_NUM.put(35, "福建");
ZONE_NUM.put(36, "江西");
ZONE_NUM.put(37, "山东");
ZONE_NUM.put(41, "河南");
ZONE_NUM.put(42, "湖北");
ZONE_NUM.put(43, "湖南");
ZONE_NUM.put(44, "广东");
ZONE_NUM.put(45, "广西");
ZONE_NUM.put(46, "海南");
ZONE_NUM.put(50, "重庆");
ZONE_NUM.put(51, "四川");
ZONE_NUM.put(52, "贵州");
ZONE_NUM.put(53, "云南");
ZONE_NUM.put(54, "西藏");
ZONE_NUM.put(61, "陕西");
ZONE_NUM.put(62, "甘肃");
ZONE_NUM.put(63, "青海");
ZONE_NUM.put(64, "宁夏");
ZONE_NUM.put(65, "新疆");
ZONE_NUM.put(71, "台湾");
ZONE_NUM.put(81, "香港");
ZONE_NUM.put(82, "澳门");
ZONE_NUM.put(91, "外国");
VIN_MAP_WEIGHTING.put(1, 8);
VIN_MAP_WEIGHTING.put(2, 7);
VIN_MAP_WEIGHTING.put(3, 6);
VIN_MAP_WEIGHTING.put(4, 5);
VIN_MAP_WEIGHTING.put(5, 4);
VIN_MAP_WEIGHTING.put(6, 3);
VIN_MAP_WEIGHTING.put(7, 2);
VIN_MAP_WEIGHTING.put(8, 10);
VIN_MAP_WEIGHTING.put(9, 0);
VIN_MAP_WEIGHTING.put(10, 9);
VIN_MAP_WEIGHTING.put(11, 8);
VIN_MAP_WEIGHTING.put(12, 7);
VIN_MAP_WEIGHTING.put(13, 6);
VIN_MAP_WEIGHTING.put(14, 5);
VIN_MAP_WEIGHTING.put(15, 4);
VIN_MAP_WEIGHTING.put(16, 3);
VIN_MAP_WEIGHTING.put(17, 2);
VIN_MAP_VALUE.put('0', 0);
VIN_MAP_VALUE.put('1', 1);
VIN_MAP_VALUE.put('2', 2);
VIN_MAP_VALUE.put('3', 3);
VIN_MAP_VALUE.put('4', 4);
VIN_MAP_VALUE.put('5', 5);
VIN_MAP_VALUE.put('6', 6);
VIN_MAP_VALUE.put('7', 7);
VIN_MAP_VALUE.put('8', 8);
VIN_MAP_VALUE.put('9', 9);
VIN_MAP_VALUE.put('A', 1);
VIN_MAP_VALUE.put('B', 2);
VIN_MAP_VALUE.put('C', 3);
VIN_MAP_VALUE.put('D', 4);
VIN_MAP_VALUE.put('E', 5);
VIN_MAP_VALUE.put('F', 6);
VIN_MAP_VALUE.put('G', 7);
VIN_MAP_VALUE.put('H', 8);
VIN_MAP_VALUE.put('J', 1);
VIN_MAP_VALUE.put('K', 2);
VIN_MAP_VALUE.put('M', 4);
VIN_MAP_VALUE.put('L', 3);
VIN_MAP_VALUE.put('N', 5);
VIN_MAP_VALUE.put('P', 7);
VIN_MAP_VALUE.put('R', 9);
VIN_MAP_VALUE.put('S', 2);
VIN_MAP_VALUE.put('T', 3);
VIN_MAP_VALUE.put('U', 4);
VIN_MAP_VALUE.put('V', 5);
VIN_MAP_VALUE.put('W', 6);
VIN_MAP_VALUE.put('X', 7);
VIN_MAP_VALUE.put('Y', 8);
VIN_MAP_VALUE.put('Z', 9);
}
}
|
<reponame>gszhuang/mpush_study<filename>mpush-cache/src/main/java/com/mpush/cache/redis/RedisKey.java<gh_stars>0
/*
* (C) Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* <EMAIL> (夜色)
*/
package com.mpush.cache.redis;
public final class RedisKey {
private static final String USER_PREFIX = "mp_uc_";
private static final String SESSION_PREFIX = "mp_s_";
private static final String FAST_CONNECTION_DEVICE_PREFIX = "mp_f_c_d_";
private static final String USER_ONLINE_KEY = "mp_u_ol_";
private static final String CONN_NUM_ = "mp_cn_";
public static final String getUserKey(String userId) {
return USER_PREFIX + userId;
}
public static final String getSessionKey(String sessionId) {
return SESSION_PREFIX + sessionId;
}
//for fast connection test
public static final String getDeviceIdKey(String deviceId) {
return FAST_CONNECTION_DEVICE_PREFIX + deviceId;
}
public static final String getUserOnlineKey(String extranetAddress) {
return USER_ONLINE_KEY + extranetAddress;
}
// public static final String getConnNum(String extranetAddress) {
// return CONN_NUM_ + extranetAddress;
// }
}
|
<reponame>calamus-fr/calamus-parent
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package fr.calamus.common.model;
import java.util.List;
import java.util.Map;
/**
* First column is id
* @author haerwynn
*/
public class EntityMapWithStringId extends EntityMapWithId<String>{
public EntityMapWithStringId(String cols) {
super(cols);
}
public EntityMapWithStringId(String cols, String labels) {
super(cols, labels);
}
public EntityMapWithStringId(Map<? extends String, ? extends Object> m, String cols) {
super(m, cols);
}
public EntityMapWithStringId(Map<? extends String, ? extends Object> m, String cols, String labels) {
super(m, cols, labels);
}
public EntityMapWithStringId(List<String> cols) {
super(cols);
}
public EntityMapWithStringId(List<String> cols, List<String> labels) {
super(cols, labels);
}
public EntityMapWithStringId(Map<? extends String, ? extends Object> m, List<String> cols) {
super(m, cols);
}
public EntityMapWithStringId(Map<? extends String, ? extends Object> m, List<String> cols, List<String> labels) {
super(m, cols, labels);
}
@Override
public String getId() {
return (String) get(getIdKey());
}
@Override
public void setId(String id) {
put(getIdKey(), id);
}
}
|
curl "https://api.m3o.com/v1/crypto/Quote" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $M3O_API_TOKEN" \
-d '{
"symbol": "BTCUSD"
}'
|
#!/bin/bash
./_run.sh kolla $1
|
<filename>features/FEATURE_BLE/targets/TARGET_NORDIC/TARGET_MCU_NRF51822/sdk/source/ble/peer_manager/pm_mutex.c
/*
* Copyright (c) Nordic Semiconductor ASA
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* 3. Neither the name of Nordic Semiconductor ASA nor the names of other
* contributors to this software may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "pm_mutex.h"
#include <stdbool.h>
#include <string.h>
#include "nrf_error.h"
#include "app_util_platform.h"
/**@brief Locks the mutex defined by the mask.
*
* @param p_mutex pointer to the mutex storage.
* @param mutex_mask the mask identifying the mutex position.
*
* @retval true if the mutex could be locked.
* @retval false if the mutex was already locked.
*/
static bool lock_by_mask(uint8_t * p_mutex, uint8_t mutex_mask)
{
bool success = false;
if ( (*p_mutex & mutex_mask) == 0 )
{
CRITICAL_REGION_ENTER();
if ( (*p_mutex & mutex_mask) == 0 )
{
*p_mutex |= mutex_mask;
success = true;
}
CRITICAL_REGION_EXIT();
}
return ( success );
}
void pm_mutex_init(uint8_t * p_mutex, uint16_t mutex_size)
{
if (p_mutex != NULL)
{
memset(&p_mutex[0], 0, MUTEX_STORAGE_SIZE(mutex_size));
}
}
bool pm_mutex_lock(uint8_t * p_mutex, uint16_t mutex_id)
{
if (p_mutex != NULL)
{
return ( lock_by_mask(&(p_mutex[mutex_id >> 3]), (1 << (mutex_id & 0x07))) );
}
else
{
return false;
}
}
void pm_mutex_unlock(uint8_t * p_mutex, uint16_t mutex_id)
{
uint8_t mutex_base = mutex_id >> 3;
uint8_t mutex_mask = (1 << (mutex_id & 0x07));
if ((p_mutex != NULL)
&& (p_mutex[mutex_base] & mutex_mask))
{
CRITICAL_REGION_ENTER();
p_mutex[mutex_base] &= ~mutex_mask;
CRITICAL_REGION_EXIT();
}
}
uint16_t pm_mutex_lock_first_available(uint8_t * p_mutex, uint16_t mutex_size)
{
if (p_mutex != NULL)
{
for ( uint16_t i = 0; i < mutex_size; i++ )
{
if ( lock_by_mask(&(p_mutex[i >> 3]), 1 << (i & 0x07)) )
{
return ( i );
}
}
}
return ( mutex_size );
}
bool pm_mutex_lock_status_get(uint8_t * p_mutex, uint16_t mutex_id)
{
if (p_mutex != NULL)
{
return ( (p_mutex[mutex_id >> 3] & (1 << (mutex_id & 0x07))) );
}
else
{
return true;
}
}
|
<gh_stars>1-10
package com.krailis.scala_99_problems.Logic_and_Codes
import org.scalatest.FunSuite
class P50Test extends FunSuite {
test("n = 1") {
assert(P50.grayCode(1) === List("0", "1"))
}
test("n = 2") {
assert(P50.grayCode(2) === List("00", "01", "11", "10"))
}
test("n = 3") {
assert(P50.grayCode(3) === List("000", "001", "011", "010", "110", "111", "101", "100"))
}
}
|
#!/bin/bash
#List all files owned by you
ALLFID=$(gdrive list --query "'me' in owners"| awk 'FNR > 1 {print $1}')
#Loop through all your files
for FID in $ALLFID
do
#List all share ID (USER) in your file
ALLPID=$(gdrive share list $FID | awk 'FNR > 2 {print $1}')
for PID in $ALLPID
do
echo "Processing FID: $FID with PID: $PID"
#Remove share ID (USER) from the file
gdrive share revoke $FID $PID
done
done
|
let a = 8;
let n = 1000;
for (let i = n; i > 0; i = i - 1) {
for (let j = n; j > i; j = j - 1){
a = a + 6 * i;
}
}
console.log(a);
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
SCRIPTDIR=$(cd $(dirname "$0") && pwd)
HOMEDIR="$SCRIPTDIR/../../../"
# clone OpenWhisk utilities repo. in order to run scanCode
cd $HOMEDIR
git clone https://github.com/apache/incubator-openwhisk-utilities.git
|
#!/usr/bin/env bash
__build() {
local exitCode=0
local name="$basename"
local docBuild="$HOME/Documents/rpmbuild"
rpmbuild -ba "$1" >>"$docBuild/status.txt" 2>>"$docBuild/errors.txt" && exitCode=0 || exitCode=1
echo "$name finished with exit code $exitCode" >>"$docBuild/status.txt"
return $exitCode
}
# Clean previous build
rm -Rf /var/tmp/BUILD*
rm -Rf "$HOME/Documents"/{rpmbuild,sourceforge}
mkdir -p "$HOME/Documents"/{rpmbuild,sourceforge}
# Clear previous status
echo >"$HOME/Documents/rpmbuild/status.txt"
echo >"$HOME/Documents/rpmbuild/errors.txt"
# Finally run rpmbuild
# Create spec list
ls "$HOME"/rpmbuild/*/*.spec >"$HOME/Documents/rpmbuild/build.txt"
packages="$(<"$HOME/Documents/rpmbuild/build.txt")"
for i in ${packages}; do
basename="$(basename $(dirname "$i"))"
printf "Building RPM for %s" "$basename: "
if __build "$i"; then
printf "rpmbuild of %s has succeeded\n" "$basename"
else
printf "rpmbuild of %s has failed\n" "$basename"
fi
done
# Fix permissions
find "$HOME"/.gnupg "$HOME"/.ssh -type f -exec chmod 600 {} \;
find "$HOME"/.gnupg "$HOME"/.ssh -type d -exec chmod 700 {} \;
# Sign rpm packages
find "$HOME/Documents/rpmbuild" -iname "*.rpm" >>"$HOME/Documents/rpmbuild/pkgs.txt"
addsign="$(<"$HOME/Documents/rpmbuild/pkgs.txt")"
rpmsign --addsign "${addsign}"
|
import { Component, Output, EventEmitter,OnInit } from '@angular/core';
import {NgForm} from '@angular/forms';
import { TitleCasePipe } from '@angular/common';
import {ServicingService } from '../../../services/addServicing.service';
import {NgbModal,NgbActiveModal, ModalDismissReasons} from '@ng-bootstrap/ng-bootstrap';
@Component({
selector: 'app-modal2',
templateUrl: './AddEmployee.component.html',
styleUrls: ['./AddEmployee.component.scss']
})
export class AddEmployee implements OnInit {
maxLen: any;
constructor(private titlecasePipe:TitleCasePipe,private activeModal: NgbActiveModal,private _data:ServicingService) {
// this.modalContent.permission="Coordinator";
}
message: string = "Hola Mundo!";
@Output() messageEvent = new EventEmitter<string>();
modalHeader: string;
modalContent:any;
modalId:number;
desingnation:string;
public svcid:string;
public salutation:any;
showResult:boolean = false;
successMsg:boolean;
ngOnInit() {
// console.log(this. modalContent);
if(sessionStorage.getItem('selectedsvc')){
// console.log(sessionStorage.getItem('selectedsvc'));
this.svcid = sessionStorage.getItem('selectedsvc');
// console.log(this.svcid);
}
else{
this.svcid = JSON.parse(sessionStorage.getItem('globalsvcid'));
}
if(sessionStorage.getItem('loginCountryFlag') === '2') {
this.maxLen = '8';
console.log("this.maxLen ", this.maxLen);
}
if(sessionStorage.getItem('loginCountryFlag') === '1') {
this.maxLen = '10';
console.log("this.maxLen ", this.maxLen);
}
else{
// this.svcid = JSON.parse(sessionStorage.getItem('globalsvcid'));
// console.log(this.svcid);
}
this.salutation = [
{ id: 1, type: 'Mr' },
{ id: 2, type: 'Mrs' },
{ id: 3, type: 'Ms' },
];
// this.modalContent.salutation = this.salutation[0].type;
if(this. modalContent == 1){
this.desingnation="Cre";
}
else if (this. modalContent == 2){
this.desingnation="Service Advisor";
}
else if (this. modalContent == 3){
this.desingnation="Coordinator";
}
else {
this.desingnation="Sales Executive";
}
this.sendMessage();
}
closeModal() {
this.activeModal.close();
}
sendMessage() {
this.messageEvent.emit(this.message)
}
onSubmit(f: NgForm) {
// console.log(f.value.id);
if(this.modalContent == 4 ){
// console.log(this.modalContent);
var reqpara3 = {
requesttype:'createuser',
servicecentreid:JSON.parse(this.svcid),
usertype:7,
username:f.value.salutation1+'.'+f.value.name,
mobilenumber:f.value.mobile1,
email:f.value.email
}
}
else if (this.modalContent == 3 ){
// console.log(this.modalContent);
var reqpara3 = {
requesttype:'createuser',
servicecentreid:JSON.parse(this.svcid),
usertype:13,
username:f.value.salutation1+'.'+f.value.name,
mobilenumber:f.value.mobile1,
email:f.value.email
}
}
else if (this.modalContent == 2 ){
// console.log(this.modalContent);
var reqpara3 = {
requesttype:'createuser',
servicecentreid:JSON.parse(this.svcid),
usertype:3,
username:f.value.salutation1+'.'+f.value.name,
mobilenumber:f.value.mobile1,
email:f.value.email
}
}
else {
// console.log(this.modalContent);
var reqpara3 = {
requesttype:'createuser',
servicecentreid:JSON.parse(this.svcid),
usertype:2,
username:f.value.salutation1+'.'+f.value.name,
mobilenumber:f.value.mobile1,
email:f.value.email
}
}
const as3 = JSON.stringify(reqpara3);
this._data.webServiceCall(as3).subscribe(res =>{
// console.log(res);
if(res[0].userexists[0].does_exist === "0"){
this.successMsg = true;
this.showResult = true;
}
else{
this.successMsg = false;
this.showResult = true;
}
f.reset();
// this.activeModal.close();
});
}
}
|
<gh_stars>0
package softuni.exam.config;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.modelmapper.ModelMapper;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import softuni.exam.adapters.LocalDateAdapterJSON;
import softuni.exam.adapters.LocalDateTimeAdapterJSON;
import softuni.exam.utils.ValidationUtil;
import softuni.exam.utils.XmlParser;
import softuni.exam.utils.impl.ValidationUtilImpl;
import softuni.exam.utils.impl.XmlParserImpl;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Random;
@Configuration
public class ApplicationBeanConfiguration {
@Bean
public BufferedReader bufferedReader() {
return new BufferedReader(new InputStreamReader(System.in));
}
@Bean
public Random random() {
return new Random();
}
@Bean
public ModelMapper modelMapper() {
return new ModelMapper();
}
@Bean
public ValidationUtil validationUtil() {
return new ValidationUtilImpl();
}
@Bean
public Gson gson() {
return new GsonBuilder()
.excludeFieldsWithoutExposeAnnotation()
.setPrettyPrinting()
.registerTypeAdapter(LocalDate.class, new LocalDateAdapterJSON().nullSafe())
.registerTypeAdapter(LocalDateTime.class, new LocalDateTimeAdapterJSON().nullSafe())
.create();
}
@Bean
public XmlParser xmlParser() {
return new XmlParserImpl();
}
}
|
<gh_stars>0
// Script permettant d'afficher la modal d'ajout d'un user/cours (en fonction de la page où l'on se trouve)
const modal = document.querySelector('#modal');
const openModal = document.querySelector('#modal-open')
const closeModal = document.querySelector('#modal-close')
const icon = document.querySelector('.fa-plus')
openModal.addEventListener('click', () => {
if (modal.style.display == "block") {
modal.style.display = "none";
icon.style.transform = 'none';
} else {
modal.style.display = "block";
icon.style.transform = 'rotate(45deg)'
}
})
|
package cn.crabapples.spring.form;
import cn.crabapples.spring.common.groups.IsNotNull;
import cn.crabapples.spring.common.groups.IsNull;
import com.alibaba.fastjson.JSONObject;
import lombok.Setter;
import javax.validation.constraints.*;
/**
* TODO 测试参数验证的DTO
*
* @author Mr.He
* @date 2019/9/21 18:46
* e-mail <EMAIL>
* qq 294046317
* pc-name 29404
*/
@Setter
//@Api
public class DemoPostForm2 {
@Null(message = "ID必须为空",groups = {IsNull.class})
@NotNull(message = "ID不能为空",groups = {IsNotNull.class})
private String id;
@Size(min = 2,max = 5,message = "姓名有误")
private String name;
@Min(value = 0, message = "类型错误")
@Max(value = 1, message = "类型错误")
@NotEmpty(message = "类型不能为空")
private int type;
public String getId() {
return id;
}
public String getName() {
return name;
}
public int getType() {
return type;
}
@Override
public String toString() {
return JSONObject.toJSONString(this);
}
}
|
<filename>order/src/main/java/epizza/order/status/OrderStatus.java<gh_stars>0
package epizza.order.status;
public enum OrderStatus {
NEW,
BAKING,
DELIVERING
}
|
package com.opalfire.foodorder.adapter;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.support.v7.widget.RecyclerView.Adapter;
import android.support.v7.widget.RecyclerView.ViewHolder;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.RequestOptions;
import com.opalfire.foodorder.R;
import com.opalfire.foodorder.activities.HotelViewActivity;
import com.opalfire.foodorder.helper.GlobalData;
import com.opalfire.foodorder.models.Shop;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.List;
public class RestaurantsAdapter extends Adapter<MyViewHolder> {
private Activity activity;
private Context context;
private List<Shop> list;
public RestaurantsAdapter(List<Shop> list, Context context, Activity activity) {
this.list = list;
this.context = context;
this.activity = activity;
}
public MyViewHolder onCreateViewHolder(ViewGroup viewGroup, int i) {
return new MyViewHolder(LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.restaurant_list_item, viewGroup, false));
}
public void add(Shop shop, int i) {
this.list.add(i, shop);
notifyItemInserted(i);
}
public void remove(Shop shop) {
shop = this.list.indexOf(shop);
this.list.remove(shop);
notifyItemRemoved(shop);
}
public void onBindViewHolder(MyViewHolder myViewHolder, int i) {
Shop shop = (Shop) this.list.get(i);
Glide.with(this.context).load(shop.getAvatar()).apply(new RequestOptions().diskCacheStrategy(DiskCacheStrategy.ALL).placeholder((int) R.drawable.ic_restaurant_place_holder).error((int) R.drawable.ic_restaurant_place_holder)).into(myViewHolder.dishImg);
myViewHolder.restaurantName.setText(shop.getName());
myViewHolder.category.setText(shop.getDescription());
int i2 = 0;
if (shop.getOfferPercent() == null) {
myViewHolder.offer.setVisibility(View.GONE);
} else {
myViewHolder.offer.setVisibility(View.VISIBLE);
TextView access$400 = myViewHolder.offer;
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("Flat ");
stringBuilder.append(shop.getOfferPercent().toString());
stringBuilder.append("% offer on all Orders");
access$400.setText(stringBuilder.toString());
}
if (shop.getShopstatus() != null) {
RelativeLayout relativeLayout = myViewHolder.closedLay;
if (!shop.getShopstatus().equalsIgnoreCase("CLOSED")) {
i2 = 8;
}
relativeLayout.setVisibility(i2);
}
if (shop.getRating() != null) {
Double valueOf = Double.valueOf(new BigDecimal(shop.getRating().doubleValue()).setScale(1, RoundingMode.HALF_UP).doubleValue());
TextView access$500 = myViewHolder.rating;
StringBuilder stringBuilder2 = new StringBuilder();
stringBuilder2.append("");
stringBuilder2.append(valueOf);
access$500.setText(stringBuilder2.toString());
} else {
myViewHolder.rating.setText("No Rating");
}
myViewHolder = myViewHolder.distanceTime;
StringBuilder stringBuilder3 = new StringBuilder();
stringBuilder3.append(shop.getEstimatedDeliveryTime().toString());
stringBuilder3.append(" Mins");
myViewHolder.setText(stringBuilder3.toString());
}
public int getItemCount() {
return this.list.size();
}
public class MyViewHolder extends ViewHolder implements OnClickListener {
RelativeLayout closedLay;
private TextView category;
private ImageView dishImg;
private TextView distanceTime;
private LinearLayout itemView;
private TextView offer;
private TextView price;
private TextView rating;
private TextView restaurantInfo;
private TextView restaurantName;
private MyViewHolder(View view) {
super(view);
this.itemView = (LinearLayout) view.findViewById(R.id.item_view);
this.closedLay = (RelativeLayout) view.findViewById(R.id.closed_lay);
this.dishImg = (ImageView) view.findViewById(R.id.dish_img);
this.restaurantName = (TextView) view.findViewById(R.id.restaurant_name);
this.category = (TextView) view.findViewById(R.id.category);
this.offer = (TextView) view.findViewById(R.id.offer);
this.rating = (TextView) view.findViewById(R.id.rating);
this.restaurantInfo = (TextView) view.findViewById(R.id.restaurant_info);
this.distanceTime = (TextView) view.findViewById(R.id.distance_time);
this.price = (TextView) view.findViewById(R.id.price);
this.itemView.setOnClickListener(this);
}
public void onClick(View view) {
if (view.getId() == this.itemView.getId()) {
GlobalData.selectedShop = (Shop) RestaurantsAdapter.this.list.get(getAdapterPosition());
if (GlobalData.selectedShop.getShopstatus().equalsIgnoreCase("CLOSED") == null) {
RestaurantsAdapter.this.context.startActivity(new Intent(RestaurantsAdapter.this.context, HotelViewActivity.class).putExtra("position", getAdapterPosition()).addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP));
RestaurantsAdapter.this.activity.overridePendingTransition(R.anim.slide_in_right, R.anim.anim_nothing);
((Shop) RestaurantsAdapter.this.list.get(getAdapterPosition())).getCuisines();
return;
}
Toast.makeText(RestaurantsAdapter.this.context, "The Shop is closed", 0).show();
}
}
}
}
|
# -*- coding: utf-8 -*-
import os
from calculator.tests.base import (
LgfsCalculatorTestCase, EvidenceProvisionFeeTestMixin, LgfsWarrantFeeTestMixin
)
class Lgfs2016CalculatorTestCase(
LgfsCalculatorTestCase, EvidenceProvisionFeeTestMixin, LgfsWarrantFeeTestMixin
):
scheme_id = 2
csv_path = os.path.join(
os.path.dirname(__file__),
'data/test_dataset_lgfs_2016.csv'
)
Lgfs2016CalculatorTestCase.create_tests()
|
require File.expand_path('../../../spec_helper', __FILE__)
require 'stringio'
require File.expand_path('../shared/each_char', __FILE__)
describe "StringIO#chars" do
it_behaves_like :stringio_each_char, :chars
end
describe "StringIO#chars when self is not readable" do
it_behaves_like :stringio_each_char_not_readable, :chars
end
|
#!/bin/bash
#SBATCH -J Act_cosper_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py cosper 329 Adamax 3 0.29612397620763653 0.0021147916889991463 glorot_normal 0.05
|
#!/bin/bash
for user in $(ls /Users/ | grep -v Shared); do
if [ -d "/Users/$user/Library/Application Support/iCloud/Accounts" ]; then
Accts=$(find "/Users/$user/Library/Application Support/iCloud/Accounts" | grep '@' | awk -F'/' '{print $NF}')
iCloudAccts+=(${user}: ${Accts})
fi
done
echo "<result>$(printf '%s\n' "${iCloudAccts[@]}")</result>"
|
set -e
set -x
export LC_ALL=C
interface=eth0
myip=`ifconfig $interface | grep "inet addr" | awk '{split($2, a, ":"); print a[2];}'`
interface=eth0
dataip=`ifconfig $interface | grep "inet addr" | awk '{split($2, a, ":"); print a[2];}'`
data_ip_pref=`ifconfig $interface | grep "inet addr" | awk '{split($2, a, ":"); split(a[2], b, "."); printf("%s.%s.%s.", b[1], b[2], b[3]);}'`
GW=${data_ip_pref}1
CONTRAIL_PKG_LOC="$PWD/contrail-install-packages_3.2.7.0-63_ubuntu-14-04mitaka_all.deb"
dpkg -i $CONTRAIL_PKG_LOC
cd /opt/contrail/contrail_packages
./setup.sh
cd -
cp oc-testbed.py testbed.py
sed -i "s/DATA_PREF/$data_ip_pref/g" testbed.py
sed -i "s/MY_IP/$myip/g" testbed.py
sed -i "s/DATA_IP/$dataip/g" testbed.py
sed -i "s/DATA_GW/$GW/g" testbed.py
cp testbed.py /opt/contrail/utils/fabfile/testbeds/
cd /opt/contrail/utils/ && fab install_pkg_all:$CONTRAIL_PKG_LOC && cd -
cd /opt/contrail/utils/ && fab install_contrail && cd -
# this one reboots the VM; so no use writing any commands after this one
cd /opt/contrail/utils/ && fab setup_all && cd -
|
def reverse_array(arr):
# Initialize low and high indices
low = 0
high = len(arr) - 1
# Swap elements of the array
while low < high:
arr[low], arr[high] = arr[high], arr[low]
low+=1
high-=1
return arr
# Test array
arr = [1, 8, 9, 11, 2]
# Call the function
reverse_arr = reverse_array(arr)
# Print the reversed array
print("The reversed array is {}".format(reverse_arr)) |
import { ApiProperty } from "@nestjs/swagger";
import { IsAlphanumeric, IsMobilePhone, IsNumber, IsOptional } from "class-validator";
export class UpdateEmployeeDTO {
@ApiProperty({ required: false })
@IsOptional()
@IsNumber()
accountNumber: number;
@ApiProperty({ required: false })
@IsOptional()
@IsAlphanumeric()
lastName: string;
@ApiProperty({ required: false })
@IsOptional()
@IsMobilePhone('en-NG', { strictMode: true },
{
message: 'Not a valid phone number. \
Ensure the country code is supplied or it\'s in format +2348*********'
}
)
phone: string;
}
|
<reponame>zanganken/pokemon-showdown-bot
const colors = require('colors')
const {differenceInMilliseconds} = require('date-fns')
const {dbLvl, server, port, MESSAGE_THROTTLE} = require('./conf')
var Utils = {
console: {
// messages reçus
recv: text => {
if (dbLvl === 0) console.log('recv'.grey + ' ' + text)
},
// commandes reçues
cmdr: text => {
if (dbLvl === 1) console.log('cmdr'.grey + ' ' + text)
},
// data envoyées
dsend: text => {
if (dbLvl <= 1) console.log('send'.grey + ' ' + text)
},
debug: text => {
if(dbLvl <= 2) console.log('debug'.blue + ' ' + text)
},
info: text => {
if(dbLvl <= 3) console.log('info'.cyan + ' ' + text)
},
ok: text => {
if (dbLvl <= 4) console.log('ok'.green + ' ' + text)
},
error: text => {
console.log('error'.red + ' ' + text)
}
},
Connection: {
con: null,
queue: [],
queueTimeout: new Date(),
send: function(data) {
if(data) {
this.queue.push(data)
let self = this
// Délai entre les messages
setTimeout(() => {
let data = self.queue.shift()
if(self.con && self.con.connected) {
if (!Array.isArray(data)) data = [data.toString()]
data = JSON.stringify(data)
Utils.console.dsend(data)
self.con.send(data)
}
self.queueTimeout = new Date()
}, this.queue.length * MESSAGE_THROTTLE - differenceInMilliseconds(new Date(), this.queueTimeout))
}
},
// chaine de connexion générée aléatoirement
get str() {
let chars = 'abcdefghijklmnopqrstuvwxyz0123456789_'
let str = ''
for(let i = 0, l = chars.length; i < 8; i++) {
str += chars.charAt(~~(Math.random() * l))
}
return `ws://${server}:${port}/showdown/${~~(Math.random() * 1000)}/${str}/websocket`
}
},
toId: text => {
return text?.toLowerCase().replace(/[^a-z0-9]/g, '')
}
}
module.exports = Utils
|
<reponame>yarntime/analysis-server<gh_stars>0
package controller
import (
"github.com/golang/glog"
"k8s.io/apimachinery/pkg/api/resource"
meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
k8s "k8s.io/client-go/kubernetes"
"k8s.io/client-go/pkg/api/v1"
batchv1 "k8s.io/client-go/pkg/apis/batch/v1"
batch "k8s.io/client-go/pkg/apis/batch/v2alpha1"
"k8s.io/client-go/pkg/util"
)
const (
ContainerNamePrefix = "predict-job"
)
type JobController struct {
K8sClient *k8s.Clientset
JobNamespace string
BaseImage string
}
func NewJobController(c *Config) *JobController {
return &JobController{
K8sClient: c.K8sClient,
JobNamespace: c.JobNamespace,
BaseImage: c.BaseImage,
}
}
func componentCronJob(container v1.Container, namespace string) *batch.CronJob {
return &batch.CronJob{
ObjectMeta: meta_v1.ObjectMeta{
Name: container.Name,
Namespace: namespace,
Labels: map[string]string{"component": container.Name, "tier": "training-job"},
},
Spec: batch.CronJobSpec{
Schedule: "",
ConcurrencyPolicy: batch.ForbidConcurrent,
SuccessfulJobsHistoryLimit: util.Int32Ptr(0),
FailedJobsHistoryLimit: util.Int32Ptr(10),
JobTemplate: batch.JobTemplateSpec{
Spec: batchv1.JobSpec{
Parallelism: util.Int32Ptr(1),
Completions: util.Int32Ptr(1),
Template: v1.PodTemplateSpec{
Spec: v1.PodSpec{
Containers: []v1.Container{container},
RestartPolicy: v1.RestartPolicyOnFailure,
},
},
},
},
},
}
}
func componentResources(cpu string) v1.ResourceRequirements {
return v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceName(v1.ResourceCPU): resource.MustParse(cpu),
},
}
}
func (jc *JobController) StartTrainingJob(node string) {
job := componentCronJob(v1.Container{
Name: ContainerNamePrefix,
Image: jc.BaseImage,
Command: []string{"/training"},
Args: []string{},
Resources: componentResources("500m")}, jc.JobNamespace)
_, err := jc.K8sClient.BatchV2alpha1().CronJobs(jc.JobNamespace).Create(job)
if err != nil {
glog.Errorf("Failed to create training job: %s/%s", job.Namespace, job.Name)
}
}
|
#!/bin/sh
lsw() {
xprop -notype -f "_NET_CLIENT_LIST" 0x ' $0+\n' -root "_NET_CLIENT_LIST" |\
cut -d' ' -f2- |\
sed 's/, */\
/g'
}
ishidden() {
xprop -notype -f "_NET_WM_STATE" 32a ' $0+\n' -id "$1" "_NET_WM_STATE" |\
cut -d' ' -f2- |\
sed 's/, */\
/g' | grep -q "_NET_WM_STATE_HIDDEN"
}
printname() {
name="$(xprop -notype -f "_NET_WM_NAME" 8s ' $0+\n' -id "$1" "_NET_WM_NAME" 2>/dev/null)"
[ "$(echo $name)" = "_NET_WM_NAME: not found." ] && name="$(xprop -notype -f "WM_NAME" 8s ' $0+\n' -id "$1" "WM_NAME" 2>/dev/null)"
echo $name |\
cut -d' ' -f2- |\
sed 's/, */\
/g'
}
for win in $(lsw)
do
ishidden $win && printf "%s: " $win && printname $win
done |\
dmenu -i -l 8 -p "unhide window:" |\
cut -d: -f1 |\
xargs wmctrl -b toggle,hidden -ir
|
<filename>packages/axyz-js/src/solana/setupEventListeners.ts
import type AxyzSolanaContext from './context';
import { clearStoredSignature, createOrLoadMessageSignature } from './signature';
const setupEventListeners = (context: AxyzSolanaContext) => {
context.on('connect', async (wallet, callback) => {
const { signature, message } = await createOrLoadMessageSignature(context, wallet);
context.setMany({
isConnected: true,
wallet,
publicKey: wallet.publicKey,
signaturePublicKey: wallet.publicKey,
signature,
signatureMessage: message,
});
await callback?.();
});
context.on('disconnect', async (callback) => {
clearStoredSignature(context);
context.setMany({
isConnected: false,
wallet: null,
publicKey: null,
signature: undefined,
signaturePublicKey: undefined,
signatureMessage: undefined,
});
await callback?.();
});
};
export default setupEventListeners;
|
var classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_angular_velocity_evaluation =
[
[ "InternalEvaluate", "classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_angular_velocity_evaluation.html#a996def7897b968eeb7685966b68bff51", null ],
[ "InternalReset", "classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_angular_velocity_evaluation.html#af830fedabc96a81d1f2d0ae1b93427d3", null ],
[ "PostSetup", "classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_angular_velocity_evaluation.html#a20d466fae2cc9c203e2e68aa25887b5d", null ]
]; |
package bot
import (
"bytes"
"context"
"crypto/aes"
"crypto/cipher"
"crypto/ed25519"
"crypto/rand"
"crypto/rsa"
"crypto/sha256"
"crypto/sha512"
"crypto/x509"
"encoding/base64"
"encoding/binary"
"encoding/json"
"encoding/pem"
"errors"
"fmt"
"io"
"time"
"github.com/MixinNetwork/mixin/crypto/edwards25519"
"golang.org/x/crypto/curve25519"
)
func EncryptPIN(ctx context.Context, pin, pinToken, sessionId, privateKey string, iterator uint64) (string, error) {
_, err := base64.RawURLEncoding.DecodeString(privateKey)
if err == nil {
return EncryptEd25519PIN(ctx, pin, pinToken, sessionId, privateKey, iterator)
}
privBlock, _ := pem.Decode([]byte(privateKey))
if privBlock == nil {
return "", errors.New("invalid pem private key")
}
priv, err := x509.ParsePKCS1PrivateKey(privBlock.Bytes)
if err != nil {
return "", err
}
token, _ := base64.StdEncoding.DecodeString(pinToken)
keyBytes, err := rsa.DecryptOAEP(sha256.New(), rand.Reader, priv, token, []byte(sessionId))
if err != nil {
return "", err
}
pinByte := []byte(pin)
timeBytes := make([]byte, 8)
binary.LittleEndian.PutUint64(timeBytes, uint64(time.Now().Unix()))
pinByte = append(pinByte, timeBytes...)
iteratorBytes := make([]byte, 8)
binary.LittleEndian.PutUint64(iteratorBytes, iterator)
pinByte = append(pinByte, iteratorBytes...)
padding := aes.BlockSize - len(pinByte)%aes.BlockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
pinByte = append(pinByte, padtext...)
block, err := aes.NewCipher(keyBytes)
if err != nil {
return "", err
}
ciphertext := make([]byte, aes.BlockSize+len(pinByte))
iv := ciphertext[:aes.BlockSize]
_, err = io.ReadFull(rand.Reader, iv)
if err != nil {
return "", err
}
mode := cipher.NewCBCEncrypter(block, iv)
mode.CryptBlocks(ciphertext[aes.BlockSize:], pinByte)
return base64.StdEncoding.EncodeToString(ciphertext), nil
}
func EncryptEd25519PIN(ctx context.Context, pin, pinTokenBase64, sessionId, privateKey string, iterator uint64) (string, error) {
privateBytes, err := base64.RawURLEncoding.DecodeString(privateKey)
if err != nil {
return "", err
}
private := ed25519.PrivateKey(privateBytes)
public, err := base64.RawURLEncoding.DecodeString(pinTokenBase64)
if err != nil {
return "", err
}
var keyBytes, curvePriv, pub [32]byte
PrivateKeyToCurve25519(&curvePriv, private)
copy(pub[:], public[:])
curve25519.ScalarMult(&keyBytes, &curvePriv, &pub)
pinByte := []byte(pin)
timeBytes := make([]byte, 8)
binary.LittleEndian.PutUint64(timeBytes, uint64(time.Now().Unix()))
pinByte = append(pinByte, timeBytes...)
iteratorBytes := make([]byte, 8)
binary.LittleEndian.PutUint64(iteratorBytes, iterator)
pinByte = append(pinByte, iteratorBytes...)
padding := aes.BlockSize - len(pinByte)%aes.BlockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
pinByte = append(pinByte, padtext...)
block, err := aes.NewCipher(keyBytes[:])
if err != nil {
return "", err
}
ciphertext := make([]byte, aes.BlockSize+len(pinByte))
iv := ciphertext[:aes.BlockSize]
_, err = io.ReadFull(rand.Reader, iv)
if err != nil {
return "", err
}
mode := cipher.NewCBCEncrypter(block, iv)
mode.CryptBlocks(ciphertext[aes.BlockSize:], pinByte)
return base64.RawURLEncoding.EncodeToString(ciphertext), nil
}
func VerifyPIN(ctx context.Context, uid, pin, pinToken, sessionId, privateKey string) (*User, error) {
var err error
var encryptedPIN string
pt, err := base64.RawURLEncoding.DecodeString(pinToken)
if err == nil && len(pt) == 32 {
encryptedPIN, err = EncryptEd25519PIN(ctx, pin, pinToken, sessionId, privateKey, uint64(time.Now().UnixNano()))
} else {
encryptedPIN, err = EncryptPIN(ctx, pin, pinToken, sessionId, privateKey, uint64(time.Now().UnixNano()))
}
if err != nil {
return nil, err
}
data, err := json.Marshal(map[string]interface{}{
"pin": encryptedPIN,
})
if err != nil {
return nil, err
}
path := "/pin/verify"
token, err := SignAuthenticationToken(uid, sessionId, privateKey, "POST", path, string(data))
if err != nil {
return nil, err
}
body, err := Request(ctx, "POST", path, data, token)
if err != nil {
return nil, err
}
var resp struct {
Data *User `json:"data"`
Error Error `json:"error"`
}
err = json.Unmarshal(body, &resp)
if err != nil {
return nil, BadDataError(ctx)
}
if resp.Error.Code > 0 {
return nil, resp.Error
}
return resp.Data, nil
}
func PrivateKeyToCurve25519(curve25519Private *[32]byte, privateKey ed25519.PrivateKey) {
h := sha512.New()
h.Write(privateKey.Seed())
digest := h.Sum(nil)
digest[0] &= 248
digest[31] &= 127
digest[31] |= 64
copy(curve25519Private[:], digest)
}
func PublicKeyToCurve25519(curve25519Public *[32]byte, publicKey ed25519.PublicKey) error {
var k [32]byte
copy(k[:], publicKey[:])
var A edwards25519.ExtendedGroupElement
if !A.FromBytes(&k) {
return fmt.Errorf("Invalid public key %x", publicKey)
}
// A.Z = 1 as a postcondition of FromBytes.
var x edwards25519.FieldElement
edwardsToMontgomeryX(&x, &A.Y)
edwards25519.FeToBytes(curve25519Public, &x)
return nil
}
func edwardsToMontgomeryX(outX, y *edwards25519.FieldElement) {
// We only need the x-coordinate of the curve25519 point, which I'll
// call u. The isomorphism is u=(y+1)/(1-y), since y=Y/Z, this gives
// u=(Y+Z)/(Z-Y). We know that Z=1, thus u=(Y+1)/(1-Y).
var oneMinusY edwards25519.FieldElement
edwards25519.FeOne(&oneMinusY)
edwards25519.FeSub(&oneMinusY, &oneMinusY, y)
edwards25519.FeInvert(&oneMinusY, &oneMinusY)
edwards25519.FeOne(outX)
edwards25519.FeAdd(outX, outX, y)
edwards25519.FeMul(outX, outX, &oneMinusY)
}
|
<reponame>ice563102472/rpcCluster<filename>src/main/java/com/linda/framework/rpc/cluster/redis/SimpleJedisPubListener.java
package com.linda.framework.rpc.cluster.redis;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPubSub;
import com.fasterxml.jackson.core.type.TypeReference;
import com.linda.framework.rpc.Service;
import com.linda.framework.rpc.cluster.JSONUtils;
import com.linda.framework.rpc.cluster.MessageListener;
import com.linda.framework.rpc.cluster.RpcClusterConst;
import com.linda.framework.rpc.cluster.RpcHostAndPort;
import com.linda.framework.rpc.cluster.RpcMessage;
/**
* redis pub sub 接收集群消息心跳
* @author lindezhi
*
*/
public class SimpleJedisPubListener extends JedisPubSub implements Service,Runnable{
private Jedis jedis;
private Thread messageReceiveThread;
private String channel;
private List<MessageListener> listeners = new ArrayList<MessageListener>();
private Logger logger = Logger.getLogger(SimpleJedisPubListener.class);
public Jedis getJedis() {
return jedis;
}
public void setJedis(Jedis jedis) {
this.jedis = jedis;
}
public String getChannel() {
return channel;
}
public void setChannel(String channel) {
this.channel = channel;
}
public void fireListeners(RpcMessage message){
for(MessageListener listener:listeners){
listener.onMessage(message);
}
}
public void addListener(MessageListener listener){
this.listeners.add(listener);
}
@Override
public void onMessage(String channel, String message) {
RpcMessage<RpcHostAndPort> rpcMessage = JSONUtils.fromJSON(message, new TypeReference<RpcMessage<RpcHostAndPort>>(){});
this.fireListeners(rpcMessage);
}
@Override
public void onPMessage(String pattern, String channel, String message) {
}
@Override
public void onSubscribe(String channel, int subscribedChannels) {
}
@Override
public void onUnsubscribe(String channel, int subscribedChannels) {
}
@Override
public void onPUnsubscribe(String pattern, int subscribedChannels) {
}
@Override
public void onPSubscribe(String pattern, int subscribedChannels) {
}
@Override
public void startService() {
messageReceiveThread = new Thread(this);
messageReceiveThread.start();
}
@Override
public void stopService() {
this.unsubscribe();
jedis.close();
}
@Override
public void run() {
logger.info("subscribe:"+channel);
jedis.subscribe(this, channel);
}
}
|
package com.atjl.dbtiming.domain.comparator;
import com.atjl.dbtiming.domain.biz.QueueWaitTask;
import java.util.Comparator;
/**
* 小->大 排列
*/
public class QueueWaitTaskComparator implements Comparator<QueueWaitTask> {
@Override
public int compare(QueueWaitTask o1, QueueWaitTask o2) {
return Math.toIntExact(o1.getNextExecuteTs() - o2.getNextExecuteTs());
}
}
|
<filename>server/stream_api.py
import os
import yaml
from flask import Blueprint, jsonify, abort
from dse.cluster import Cluster
from dse.auth import PlainTextAuthProvider
stream_route = Blueprint('stream', __name__)
with open(os.path.join(os.path.dirname(__file__), 'config.yaml')) as fd:
config = yaml.full_load(fd)
auth_provider = PlainTextAuthProvider(config['CASSANDRA_USER'], config['CASSANDRA_PASSWORD'])
cluster = Cluster(config['CASSANDRA_HOSTS'], auth_provider=auth_provider)
@stream_route.route('/countries')
def get_countries():
session = cluster.connect()
rows = session.execute('select distinct country from meetups.event_cities')
return jsonify([r.country for r in rows])
@stream_route.route('/cities/<country_code>')
def get_cities_by_coutry(country_code):
session = cluster.connect()
rows = session.execute('select city from meetups.event_cities where country = %s', (country_code,))
res = [r.city for r in rows]
if len(res) == 0:
abort(404)
return jsonify(res)
@stream_route.route('/events/<event_id>')
def get_event_by_id(event_id):
names = ['event_name', 'event_time', 'topics', 'group_name', 'country', 'city']
session = cluster.connect()
row = next(iter(session.execute("select * from meetups.events where event_id = %s", (str(event_id),))), None)
if row is None:
abort(404)
res = {name: getattr(row, name) for name in names}
res['topics'] = res['topics'].split(';')
return jsonify(res)
@stream_route.route('/groups/<city_name>')
def get_groups_by_city(city_name):
names = ['city_name', 'group_name', 'group_id']
session = cluster.connect()
rows = session.execute('select * from meetups.cities_groups where city_name = %s', (city_name,))
res = [
{name: getattr(row, name) for name in names} for row in rows
]
if len(res) == 0:
abort(404)
return jsonify(res)
@stream_route.route('/events_by_group/<int:group_id>')
def get_event_by_group(group_id):
names = ['event_name', 'event_time', 'topics', 'group_name', 'country', 'city']
session = cluster.connect()
rows = session.execute('select * from meetups.groups_events where group_id = %s', (group_id,))
res = [
{name: getattr(row, name) for name in names} for row in rows
]
if len(res) == 0:
abort(404)
return jsonify(res)
|
#!/bin/bash
# Copyright (C) 2017 The LineageOS Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
DEVICE=CP3600I
VENDOR=yulong
INITIAL_COPYRIGHT_YEAR=2017
# Load extractutils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "$MY_DIR" ]]; then MY_DIR="$PWD"; fi
CM_ROOT="$MY_DIR"/../../..
HELPER="$CM_ROOT"/vendor/cm/build/tools/extract_utils.sh
if [ ! -f "$HELPER" ]; then
echo "Unable to find helper script at $HELPER"
exit 1
fi
. "$HELPER"
# Initialize the helper
setup_vendor "$DEVICE" "$VENDOR" "$CM_ROOT"
# Copyright headers and guards
write_headers
# The standard blobs
write_makefiles "$MY_DIR"/proprietary-files.txt
cat << EOF >> "$ANDROIDMK"
\$(shell mkdir -p \$(PRODUCT_OUT)/system/vendor/lib/egl && pushd \$(PRODUCT_OUT)/system/vendor/lib > /dev/null && ln -s egl/libEGL_adreno.so libEGL_adreno.so && popd > /dev/null)
\$(shell mkdir -p \$(PRODUCT_OUT)/system/vendor/lib64/egl && pushd \$(PRODUCT_OUT)/system/vendor/lib64 > /dev/null && ln -s egl/libEGL_adreno.so libEGL_adreno.so && popd > /dev/null)
EOF
# Blobs for TWRP data decryption
cat << EOF >> "$BOARDMK"
ifeq (\$(RECOVERY_VARIANT),twrp)
TARGET_RECOVERY_DEVICE_DIRS += vendor/$VENDOR/$DEVICE/proprietary
endif
EOF
# We are done!
write_footers
|
# bash completion file for {{ prog }}
{{ prefix }}{{ prog }}()
{
local cur prev words cword split
_init_completion -s || return
case $prev in
{{ no_comp }})
return
;;
{% for c in choices -%}
{{ c.opt}})
COMPREPLY=( $( compgen -W '{{ c.choices }}' -- "$cur" ) )
return
;;
{% endfor -%}
{{ file_comp }})
_filedir
return
;;
{{ dir_comp }})
_filedir -d
return
;;
esac
$split && return
COMPREPLY=( $( compgen -W '{{ all_opts }}' -- "$cur" ) )
[[ $COMPREPLY == *= ]] && compopt -o nospace
} &&
complete -F {{ prefix }}{{ prog }} {{ prog }}
|
<reponame>sori9088/mealplan-client
import React, { useState } from 'react';
import CssBaseline from '@material-ui/core/CssBaseline';
import TextField from '@material-ui/core/TextField';
import FormControlLabel from '@material-ui/core/FormControlLabel';
import Checkbox from '@material-ui/core/Checkbox';
import Link from '@material-ui/core/Link';
import Grid from '@material-ui/core/Grid';
import Box from '@material-ui/core/Box';
import LockOutlinedIcon from '@material-ui/icons/LockOutlined';
import Typography from '@material-ui/core/Typography';
import Container from '@material-ui/core/Container';
import { Avatar, Button } from '@material-ui/core';
import { useHistory } from 'react-router-dom';
import { Form } from 'react-bootstrap'
import { makeStyles, createMuiTheme, ThemeProvider } from '@material-ui/core/styles';
import { green, red, pink, orange } from '@material-ui/core/colors';
import { uploadFile } from 'react-s3';
import { store } from 'react-notifications-component';
const config = {
bucketName: 'mealplann',
dirName: 'user', /* optional */
region: 'ap-northeast-2',
accessKeyId: process.env.REACT_APP_CLIENT,
secretAccessKey: process.env.REACT_APP_KEY,
}
function Copyright() {
return (
<Typography variant="body2" color="textSecondary" align="center">
{'Copyright © '}
<Link color="inherit" href="https://material-ui.com/">
Meal Plan
</Link>{' '}
{new Date().getFullYear()}
{'.'}
</Typography>
);
}
const useStyles = makeStyles(theme => ({
paper: {
marginTop: theme.spacing(8),
display: 'flex',
flexDirection: 'column',
alignItems: 'center',
},
avatar: {
margin: theme.spacing(1),
backgroundColor: '#7aa557',
},
form: {
width: '100%', // Fix IE 11 issue.
marginTop: theme.spacing(3),
},
submit: {
margin: theme.spacing(3, 0, 2),
},
}));
const theme = createMuiTheme({
palette: {
primary: orange,
},
});
export default function Signup() {
const [input, setInput] = useState({ seller: "false" })
const [file, setFile] = useState(null)
const [avatarurl, setAvatarUrl] = useState("")
const history = useHistory()
const hansol = e => {
setInput({
...input,
[e.target.name]: e.target.value
})
}
const onChangeHandler = (e) => {
setFile(e.target.files[0])
}
console.log(file)
const upload = () => {
uploadFile(file, config)
.then(data => {
store.addNotification({
message: "Completely uploaded :)",
type: "success",
insert: "top",
container: "bottom-center",
animationIn: ["animated", "fadeIn"],
animationOut: ["animated", "fadeOut"],
dismiss: {
duration: 3000,
onScreen: true
}
});
console.log(data)
setAvatarUrl(data.location)
})
.catch(err => console.error(err))
}
const register = async e => {
e.preventDefault()
const res = await fetch(process.env.REACT_APP_BURL + "/register", {
method: "POST",
headers: {
'Content-Type': "application/json"
},
body: JSON.stringify({
input,
"avatar_url" : avatarurl})
})
if (res.ok) {
const data = await res.json()
if (data.success) {
// window.location(process.env.REACT_APP_FURL+"/login") // redirect using window
history.push('/login')
} else {
alert(data.message)
}
}
}
const classes = useStyles();
return (
<ThemeProvider theme={theme}>
<Container component="main" maxWidth="xs" className="my-5">
<CssBaseline />
<div className={classes.paper}>
<Avatar className={classes.avatar}>
<LockOutlinedIcon />
</Avatar>
<Typography component="h1" variant="h5">
Sign up
</Typography>
<div className={classes.form}>
<Grid item xs={12}>
<label>Profile Image</label><br />
<input type="file" name="file" onChange={(e) => onChangeHandler(e)} />
<Button
type="submit"
onClick={()=> upload()}
color="primary"
>
Upload
</Button>
</Grid>
</div>
<form className={classes.form} noValidate onChange={e => hansol(e)} onSubmit={(e) => register(e)}>
<Grid container spacing={2}>
<Grid item xs={12}>
<TextField
variant="outlined"
required
fullWidth
id="name"
label="UserName / SellerName"
name="name"
autoComplete="name"
/>
</Grid>
<Grid item xs={12}>
<TextField
variant="outlined"
required
fullWidth
id="email"
label="Email Address"
name="email"
autoComplete="email"
/>
</Grid>
<Grid item xs={12}>
<TextField
variant="outlined"
required
fullWidth
name="password"
label="Password"
type="password"
id="password"
autoComplete="current-password"
/>
</Grid>
<Grid item xs={12}>
<Form.Group controlId="exampleForm.ControlSelect1">
<label>What you wanna be?</label>
<Form.Control as="select" name="seller">
<option value={false} >customer</option>
<option value={true}>seller</option>
</Form.Control>
</Form.Group>
</Grid>
</Grid>
<Button
type="submit"
fullWidth
variant="contained"
color="primary"
className={classes.submit}
>
Sign Up
</Button>
<Grid container justify="flex-end">
<Grid item>
<Link href="/login" variant="body2">
Already have an account? Sign in
</Link>
</Grid>
</Grid>
</form>
</div>
<Box mt={5}>
<Copyright />
</Box>
</Container>
</ThemeProvider>
);
} |
<reponame>sitewhere/sitewhere-java-api
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.model.device.request;
import java.util.HashMap;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.sitewhere.rest.model.common.request.PersistentEntityCreateRequest;
import com.sitewhere.spi.device.DeviceAssignmentStatus;
import com.sitewhere.spi.device.request.IDeviceAssignmentCreateRequest;
/**
* Holds fields needed to create a device assignment.
*/
@JsonInclude(Include.NON_NULL)
public class DeviceAssignmentCreateRequest extends PersistentEntityCreateRequest
implements IDeviceAssignmentCreateRequest {
/** Serialization version identifier */
private static final long serialVersionUID = -6880578458870122016L;
/** Device token */
private String deviceToken;
/** Customer token */
private String customerToken;
/** Area token */
private String areaToken;
/** Asset token */
private String assetToken;
/** Status */
private DeviceAssignmentStatus status;
/*
* @see com.sitewhere.spi.device.request.IDeviceAssignmentCreateRequest#
* getDeviceToken()
*/
@Override
public String getDeviceToken() {
return deviceToken;
}
public void setDeviceToken(String deviceToken) {
this.deviceToken = deviceToken;
}
/*
* @see com.sitewhere.spi.device.request.IDeviceAssignmentCreateRequest#
* getCustomerToken()
*/
@Override
public String getCustomerToken() {
return customerToken;
}
public void setCustomerToken(String customerToken) {
this.customerToken = customerToken;
}
/*
* @see
* com.sitewhere.spi.device.request.IDeviceAssignmentCreateRequest#getAreaToken(
* )
*/
@Override
public String getAreaToken() {
return areaToken;
}
public void setAreaToken(String areaToken) {
this.areaToken = areaToken;
}
/*
* @see
* com.sitewhere.spi.device.request.IDeviceAssignmentCreateRequest#getAssetToken
* ()
*/
@Override
public String getAssetToken() {
return assetToken;
}
public void setAssetToken(String assetToken) {
this.assetToken = assetToken;
}
/*
* @see
* com.sitewhere.spi.device.request.IDeviceAssignmentCreateRequest#getStatus()
*/
@Override
public DeviceAssignmentStatus getStatus() {
return status;
}
public void setStatus(DeviceAssignmentStatus status) {
this.status = status;
}
public static class Builder {
/** Request being built */
private DeviceAssignmentCreateRequest request = new DeviceAssignmentCreateRequest();
public Builder(String deviceToken, String customerToken, String areaToken, String assetToken) {
request.setDeviceToken(deviceToken);
request.setCustomerToken(customerToken);
request.setAreaToken(areaToken);
request.setAssetToken(assetToken);
}
public Builder metadata(String name, String value) {
if (request.getMetadata() == null) {
request.setMetadata(new HashMap<String, String>());
}
request.getMetadata().put(name, value);
return this;
}
public DeviceAssignmentCreateRequest build() {
return request;
}
}
} |
// (C) 2007-2020 GoodData Corporation
import {
getCellClassNames,
getCellStyleAndFormattedValue,
getMeasureCellFormattedValue,
getMeasureCellStyle,
} from "../tableCell";
import { IMappingHeader } from "@gooddata/sdk-ui";
import { IAttributeDescriptor, IMeasureDescriptor } from "@gooddata/sdk-backend-spi";
import { DataViewFirstPage } from "@gooddata/sdk-backend-mockingbird";
import { ReferenceRecordings } from "@gooddata/reference-workspace";
import { recordedDataFacade } from "../../../__mocks__/recordings";
const fixtures = recordedDataFacade(
ReferenceRecordings.Scenarios.PivotTable.SingleMeasureWithRowAndColumnAttributes,
DataViewFirstPage,
);
const rowHeaders = fixtures.meta().dimensionItemDescriptors(0);
const measureHeaders = fixtures.meta().measureDescriptors();
//
// Cell formatting is done based on attribute or measure headers
//
function customizeFormat(desc: IMeasureDescriptor, format: string): IMeasureDescriptor {
return {
measureHeaderItem: {
...desc.measureHeaderItem,
format,
},
};
}
const DefaultAttributeHeader: IMappingHeader = rowHeaders[0] as IAttributeDescriptor;
const HeaderWithFractions: IMappingHeader = measureHeaders[0];
const HeaderWithCustomNumberFormat: IMappingHeader = customizeFormat(
measureHeaders[0],
"[>=0]$#,##0;[<0]-$#,##0",
);
const HeaderWithCustomColor: IMappingHeader = customizeFormat(
measureHeaders[0],
"[red][>=0]$#,##0;[<0]-$#,##0",
);
const HeaderWithCustomBgColor: IMappingHeader = customizeFormat(
measureHeaders[0],
"[backgroundColor=ffff00][red][>=0]$#,##0;[<0]-$#,##0",
);
//
//
//
describe("Table utils - Cell", () => {
describe("getCellClassNames", () => {
it("should get class names for non drillable cell", () => {
expect(getCellClassNames(3, 9, false)).toEqual("gd-cell s-cell-3-9 s-table-cell");
});
it("should get class names for drillable cell", () => {
expect(getCellClassNames(3, 9, true)).toEqual(
"gd-cell-drillable gd-cell s-cell-3-9 s-table-cell",
);
});
});
describe("getCellStyleAndFormattedValue", () => {
it("should get style and formattedValue for attribute", () => {
expect(
getCellStyleAndFormattedValue(DefaultAttributeHeader, { uri: "foo", name: "Apple" }),
).toEqual({
style: {},
formattedValue: "Apple",
});
});
it("should get styled dash when the value is null", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomNumberFormat, null)).toEqual({
style: {
color: "#94a1ad",
fontWeight: "bold",
},
formattedValue: "–",
});
});
it("should get style and formattedValue for measure without color", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomNumberFormat, "1234567.89")).toEqual({
style: {},
formattedValue: "$1,234,568",
});
});
it("should get style and formattedValue for measure with color", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomColor, "9876543.21")).toEqual({
style: {
color: "#FF0000",
},
formattedValue: "$9,876,543",
});
});
it("should get style and formattedValue for measure with color and backgroundColor", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomBgColor, "9876543.21")).toEqual({
style: {
backgroundColor: "#ffff00",
color: "#FF0000",
},
formattedValue: "$9,876,543",
});
});
it("should apply color when the argument applyColor is true", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomColor, "9876543.21", true)).toEqual({
style: {
color: "#FF0000",
},
formattedValue: "$9,876,543",
});
});
it("should apply color and backgroundColor when the argument applyColor is true", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomBgColor, "9876543.21", true)).toEqual({
style: {
backgroundColor: "#ffff00",
color: "#FF0000",
},
formattedValue: "$9,876,543",
});
});
it("should NOT apply color or backgroundColor whe the argument applyColor is false", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomBgColor, "9876543.21", false)).toEqual({
style: {},
formattedValue: "$9,876,543",
});
});
it("should get styled dash when the value is null even if the param applyColor is false", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomNumberFormat, null, false)).toEqual({
style: {
color: "#94a1ad",
fontWeight: "bold",
},
formattedValue: "–",
});
});
it("should get style and formattedValue if separators are not defined (integer number)", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomNumberFormat, "1234567")).toEqual({
style: {},
formattedValue: "$1,234,567",
});
});
it("should get style and formattedValue if separators are not defined (float number)", () => {
expect(getCellStyleAndFormattedValue(HeaderWithCustomNumberFormat, "1234567.49")).toEqual({
style: {},
formattedValue: "$1,234,567",
});
});
it("should get style and formattedValue if separators are dot and comma (small integer number)", () => {
expect(
getCellStyleAndFormattedValue(HeaderWithFractions, "123", undefined, {
thousand: ".",
decimal: ",",
}),
).toEqual({
style: {},
formattedValue: "$123,00",
});
});
it("should get style and formattedValue if separators are dot and comma (big integer number)", () => {
expect(
getCellStyleAndFormattedValue(HeaderWithFractions, "1234567", undefined, {
thousand: ".",
decimal: ",",
}),
).toEqual({
style: {},
formattedValue: "$1.234.567,00",
});
});
it("should get style and formattedValue if separators are dot and comma (float number)", () => {
expect(
getCellStyleAndFormattedValue(HeaderWithFractions, "1234567.89", undefined, {
thousand: ".",
decimal: ",",
}),
).toEqual({
style: {},
formattedValue: "$1.234.567,89",
});
});
it("should get style and formattedValue if separators are empty strings", () => {
expect(
getCellStyleAndFormattedValue(HeaderWithFractions, "1234567.89", undefined, {
thousand: "",
decimal: "",
}),
).toEqual({
style: {},
formattedValue: "$123456789",
});
});
it("should get style and formattedValue if separators are spaces", () => {
expect(
getCellStyleAndFormattedValue(HeaderWithFractions, "1234567.89", undefined, {
thousand: " ",
decimal: " ",
}),
).toEqual({
style: {},
formattedValue: "$1 234 567 89",
});
});
});
describe("getMeasureCellFormattedValue", () => {
it("should get '-' when cellContent=null", () => {
expect(getMeasureCellFormattedValue(null, "[red]$#,##0.00", undefined)).toEqual("–");
});
it("should NOT get 'NaN' when cellContent=''", () => {
expect(getMeasureCellFormattedValue("", "[red]$#,##0.00", undefined)).toEqual("NaN");
});
it("should get formatted value for number", () => {
expect(
getMeasureCellFormattedValue("123456789", "[red]$#,##0.00", { thousand: ".", decimal: "," }),
).toEqual("$123.456.789,00");
});
});
describe("getMeasureCellStyle", () => {
it("should get empty value style when cellContent=null", () => {
expect(getMeasureCellStyle(null, "[red]$#,##0.00", undefined, true)).toEqual({
color: "#94a1ad",
fontWeight: "bold",
});
});
it("should NOT get style when cellContent=''", () => {
expect(getMeasureCellStyle("", "[red]$#,##0.00", undefined, true)).toEqual({});
});
it("should get style for number with color in format when applyColor=true", () => {
expect(getMeasureCellStyle("123456789", "[red]$#,##0.00", undefined, true)).toEqual({
color: "#FF0000",
});
});
it("should get style for number with backgroundColor in format when applyColor=true", () => {
expect(
getMeasureCellStyle("123456789", "[backgroundColor=ffff00]$#,##0.00", undefined, true),
).toEqual({
backgroundColor: "#ffff00",
});
});
it("should get style for number with color and backgroundColor in format when applyColor=true", () => {
expect(
getMeasureCellStyle("123456789", "[backgroundColor=ffff00][red]$#,##0.00", undefined, true),
).toEqual({
backgroundColor: "#ffff00",
color: "#FF0000",
});
});
it("should NOT get style for number with color in format when applyColor=false", () => {
expect(getMeasureCellStyle("123456789", "[red]$#,##0.00", undefined, false)).toEqual({});
});
it("should NOT get style for number with backgroundColor in format when applyColor=false", () => {
expect(
getMeasureCellStyle("123456789", "[backgroundColor=ffff00]$#,##0.00", undefined, false),
).toEqual({});
});
it("should NOT get style for number without color or backgroundColor in format when applyColor=true", () => {
expect(getMeasureCellStyle("123456789", "$#,##0.00", undefined, true)).toEqual({});
});
});
});
|
package cyclops.container.traversable;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import cyclops.stream.companion.Streams;
import cyclops.container.control.Trampoline;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.Test;
public class StreamTest {
Trampoline<Long> fibonacci(int i) {
return fibonacci(i,
1,
0);
}
Trampoline<Long> fibonacci(int n,
long a,
long b) {
return n == 0 ? Trampoline.done(b) : Trampoline.more(() -> fibonacci(n - 1,
a + b,
a));
}
@Test
public void arrayStream() {
List<String> res = Streams.forEach2(Stream.of("hello world",
"hello"),
a -> Stream.of("boo!"),
(v1, v2) -> v1 + "1" + v2)
.collect(Collectors.toList());
List<String> expected = Arrays.asList("hello world1boo!",
"hello1boo!");
assertThat(expected,
equalTo(res));
}
@Test
public void stringStream() {
List<String> res = Streams.forEach2("hello world".chars()
.boxed()
.map(i -> Character.toChars(i)[0]),
i -> Stream.of(i),
(a, b) -> "" + a + "1")
.collect(Collectors.toList());
List<String> expected = Arrays.asList("h1",
"e1",
"l1",
"l1",
"o1",
" 1",
"w1",
"o1",
"r1",
"l1",
"d1");
assertThat(expected,
equalTo(res));
}
}
|
import torch
from mmdet.ops.iou3d.iou3d_utils import nms_gpu
def rotate_nms_torch(rbboxes,
scores,
pre_max_size=None,
post_max_size=None,
iou_threshold=0.5):
if pre_max_size is not None:
num_keeped_scores = scores.shape[0]
pre_max_size = min(num_keeped_scores, pre_max_size)
scores, indices = torch.topk(scores, k=pre_max_size)
rbboxes = rbboxes[indices]
if len(rbboxes) == 0:
keep = torch.empty((0,), dtype=torch.int64)
else:
ret = nms_gpu(rbboxes, scores, iou_threshold)
keep = ret[:post_max_size]
if keep.shape[0] == 0:
return None
if pre_max_size is not None:
return indices[keep]
else:
return keep |
<gh_stars>0
module Elibri
module ONIX
module Release_3_0
class ExcerptInfo
#Informacja o fragmencie publikacji (e-book)
ATTRIBUTES = [
:file_type, :file_size, :md5, :updated_at, :link
]
RELATIONS = [
:inspect_include_fields
]
attr_accessor :file_type, :file_size, :md5, :updated_at, :link, :eid, :to_xml
def initialize(data)
@to_xml = data.to_s
if data.name == "excerpt"
@file_type = data.attributes['file_type'].value
@file_size = data.attributes['file_size'].value.to_i
@md5 = data.attributes['md5'].value
@updated_at = Time.parse(data.attributes['updated_at'].value)
@link = data.text
@eid = data.attributes['id'].value.to_i
elsif data.name == "ResourceVersion"
last_updated_node = data.css("ContentDate").find { |date|
date.css("ContentDateRole").first.inner_text == Elibri::ONIX::Dict::Release_3_0::ContentDateRole::LAST_UPDATED }
if last_updated_node
@updated_at = Time.parse(last_updated_node.css("Date").first.inner_text)
end
@link = data.css("ResourceLink").first.text
@eid = @link.split("/")[4].to_i
@file_type = @link.split(".").last + "_excerpt"
data.css("ResourceVersionFeature").each do |feature|
feature_type = feature.css("ResourceVersionFeatureType").first.inner_text
feature_value = feature.css("FeatureValue").first.inner_text
if feature_type == Elibri::ONIX::Dict::Release_3_0::ResourceVersionFeatureType::MD5_HASH_VALUE
@md5 = feature_value
elsif feature_type == Elibri::ONIX::Dict::Release_3_0::ResourceVersionFeatureType::SIZE_IN_BYTES
@file_size = feature_value.to_i
end
end
else
raise ArgumentError, "Unknow element for ExcerptInfo: #{data.name}"
end
end
def inspect_include_fields
[:link]
end
end
end
end
end
|
package chylex.hee.item;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.IIcon;
import net.minecraft.world.ChunkPosition;
import net.minecraft.world.World;
import chylex.hee.HardcoreEnderExpansion;
import chylex.hee.init.BlockList;
import chylex.hee.system.abstractions.Pos;
import chylex.hee.system.abstractions.nbt.NBT;
import chylex.hee.system.abstractions.nbt.NBTCompound;
import chylex.hee.system.collections.CollectionUtil;
import chylex.hee.system.util.ColorUtil;
import chylex.hee.system.util.MathUtil;
import chylex.hee.tileentity.TileEntityEnergyCluster;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class ItemEnergyOracle extends Item{
@SideOnly(Side.CLIENT)
private static boolean showColor;
private static int updateTimer;
private static Pos clusterPos = null, lastPlayerPos = null;
private static NBTCompound lastRootTag;
private static float clusterHue;
private static Set<Pos> getIgnoredPositions(NBTCompound root){
return root.getList("ignoreList").readLongs().mapToObj(Pos::at).collect(Collectors.toSet());
}
@SideOnly(Side.CLIENT)
private IIcon iconMarker;
@Override
public void onUpdate(ItemStack is, World world, Entity entity, int slot, boolean isHeld){
if (!world.isRemote){
if (world.getTotalWorldTime()%100 == 0 && is.hasTagCompound()){
NBTCompound tag = NBT.wrap(is.getTagCompound());
Set<Pos> ignored = getIgnoredPositions(tag);
int prevSize = ignored.size();
for(Iterator<Pos> iter = ignored.iterator(); iter.hasNext();){
Pos pos = iter.next();
if (world.blockExists(pos.getX(), pos.getY(), pos.getZ()) && pos.getBlock(world) != BlockList.energy_cluster){
iter.remove();
}
}
if (ignored.size() != prevSize)tag.writeList("ignoreList", ignored.stream().mapToLong(Pos::toLong));
}
}
else if (entity == HardcoreEnderExpansion.proxy.getClientSidePlayer()){
if (!isHeld)clusterPos = null;
else if (((clusterPos == null || ++updateTimer >= 4) && (lastPlayerPos == null || lastPlayerPos.distance(entity) > 1D)) || !Objects.equals(lastRootTag, is.getTagCompound())){
updateTimer = 0;
lastPlayerPos = Pos.at(entity);
lastRootTag = Optional.ofNullable(is.getTagCompound()).map(tag -> NBT.wrap((NBTTagCompound)tag.copy())).orElse(null);
if (clusterPos != null && lastPlayerPos.distance(clusterPos) > 80D)clusterPos = null;
final int chunkX = lastPlayerPos.getX()>>4, chunkZ = lastPlayerPos.getZ()>>4;
List<TileEntityEnergyCluster> clusters = new ArrayList<>();
Set<Pos> ignored = getIgnoredPositions(NBT.item(is, false));
Pos.forEachBlock(Pos.at(-5, 0, -5), Pos.at(5, 0, 5), offset -> {
if (MathUtil.square(16*offset.x-8)+MathUtil.square(16*offset.z-8) > 6400)return; // 80 blocks
((Map<ChunkPosition, TileEntity>)world.getChunkFromChunkCoords(chunkX+offset.getX(), chunkZ+offset.getZ()).chunkTileEntityMap).entrySet()
.stream()
.filter(entry -> entry.getValue().getClass() == TileEntityEnergyCluster.class && lastPlayerPos.distanceSquared(entry.getValue()) <= 6400D && !ignored.contains(Pos.at(entry.getValue())))
.map(entry -> (TileEntityEnergyCluster)entry.getValue())
.forEach(clusters::add);
});
CollectionUtil.min(clusters, cluster -> lastPlayerPos.distanceSquared(cluster)).ifPresent(cluster -> {
clusterPos = Pos.at(cluster);
clusterHue = ColorUtil.getHue(cluster.getColor(0), cluster.getColor(1), cluster.getColor(2));
});
}
}
}
@Override
public boolean onItemUse(ItemStack is, EntityPlayer player, World world, int x, int y, int z, int side, float hitX, float hitY, float hitZ){
Pos pos = Pos.at(x, y, z);
if (!world.isRemote && pos.getBlock(world) == BlockList.energy_cluster){
NBTCompound tag = NBT.item(is, true);
Set<Pos> ignored = getIgnoredPositions(tag);
if (!ignored.remove(pos))ignored.add(pos);
tag.writeList("ignoreList", ignored.stream().mapToLong(Pos::toLong));
}
return false;
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(ItemStack is, int pass){
return pass == 1 ? iconMarker : itemIcon;
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIcon(ItemStack is, int pass, EntityPlayer player, ItemStack usingItem, int useRemaining){
showColor = player == HardcoreEnderExpansion.proxy.getClientSidePlayer() && is == player.getHeldItem();
return super.getIcon(is, pass, player, usingItem, useRemaining);
}
@Override
@SideOnly(Side.CLIENT)
public int getColorFromItemStack(ItemStack is, int pass){
if (pass == 1 && showColor && clusterPos != null && lastPlayerPos != null){
float mp = (float)lastPlayerPos.distance(clusterPos)/80F;
float[] color = ColorUtil.hsvToRgb(clusterHue, 0.35F+mp*0.15F, 1F-mp*0.75F);
return ((int)(color[0]*255F)<<16)|((int)(color[1]*255F)<<8)|(int)(color[2]*255F);
}
else return pass == 1 ? (32<<16)|(32<<8)|32 : 16777215;
}
@Override
@SideOnly(Side.CLIENT)
public boolean requiresMultipleRenderPasses(){
return true;
}
@Override
@SideOnly(Side.CLIENT)
public int getRenderPasses(int metadata){
return 2;
}
@Override
@SideOnly(Side.CLIENT)
public void registerIcons(IIconRegister iconRegister){
super.registerIcons(iconRegister);
iconMarker = iconRegister.registerIcon(getIconString()+"_marker");
}
}
|
'use strict'
const { TABLE_PREFIX } = process.env
class Repository {
constructor(documentClient) {
this._documentClient = documentClient
}
get _baseParams() {
return {
TableName: TABLE_PREFIX
}
}
async list() {
const params = this._createParamObject()
const response = await this._documentClient.scan(params).promise()
return response.Items || []
}
async get(id) {
const params = this._createParamObject({
Key: {
id
}
})
const response = await this._documentClient.get(params).promise()
return response.Item
}
async put(item) {
const params = this._createParamObject({
Item: item
})
await this._documentClient.put(params).promise()
return item
}
async delete(id) {
const params = this._createParamObject({
Key: {
id
}
})
await this._documentClient.delete(params).promise()
return id
}
_createParamObject(additionalArgs = {}) {
return Object.assign({}, this._baseParams, additionalArgs)
}
}
exports.Repository = Repository |
<filename>src/listeners/commandHandler/commandStart.js
const { Listener } = require("discord-akairo")
module.exports = class extends Listener {
constructor() {
super('commandStart', {
event: 'commandStarted',
emitter: 'commandHandler'
});
}
exec(message, command) {
// This logic assumes that the name of the command is the command ID
this.client.statCord.postCommand(command.id, message.author.id);
}
}
|
#!/bin/sh
#
# Copyright © 2008 IBM Corporation
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version
# 2 of the License, or (at your option) any later version.
# This script checks prom_init.o to see what external symbols it
# is using, if it finds symbols not in the whitelist it returns
# an error. The point of this is to discourage people from
# intentionally or accidentally adding new code to prom_init.c
# which has side effects on other parts of the kernel.
# If you really need to reference something from prom_init.o add
# it to the list below:
WHITELIST="add_reloc_offset __bss_start __bss_stop copy_and_flush
_end enter_prom memcpy memset reloc_offset __secondary_hold
__secondary_hold_acknowledge __secondary_hold_spinloop __start
strcmp strcpy strlcpy strlen strncmp strstr kstrtobool logo_linux_clut224
reloc_got2 kernstart_addr memstart_addr linux_banner _stext
__prom_init_toc_start __prom_init_toc_end btext_setup_display TOC."
NM="$1"
OBJ="$2"
ERROR=0
for UNDEF in $($NM -u $OBJ | awk '{print $2}')
do
# On 64-bit nm gives us the function descriptors, which have
# a leading . on the name, so strip it off here.
UNDEF="${UNDEF#.}"
if [ $KBUILD_VERBOSE ]; then
if [ $KBUILD_VERBOSE -ne 0 ]; then
echo "Checking prom_init.o symbol '$UNDEF'"
fi
fi
OK=0
for WHITE in $WHITELIST
do
if [ "$UNDEF" = "$WHITE" ]; then
OK=1
break
fi
done
# ignore register save/restore funcitons
case $UNDEF in
_restgpr_*|_restgpr0_*|_rest32gpr_*)
OK=1
;;
_savegpr_*|_savegpr0_*|_save32gpr_*)
OK=1
;;
esac
if [ $OK -eq 0 ]; then
ERROR=1
echo "Error: External symbol '$UNDEF' referenced" \
"from prom_init.c" >&2
fi
done
exit $ERROR
|
package com.github.chen0040.leetcode.day11.medium;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by xschen on 6/8/2017.
*
*
*/
public class PrintBinaryTree {
public static class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) { val = x; }
}
public static void main(String[] args) {
Solution s = new Solution();
TreeNode root = new TreeNode(1);
root.left = new TreeNode(2);
List<List<String>> result = s.printTree(root);
for(int i=0; i < result.size(); ++i) {
for(int j=0; j < result.get(i).size(); ++j) {
System.out.print(" \"" + result.get(i).get(j) + "\"");
}
System.out.println();
}
}
public static class Solution {
private Map<Integer, Map<Integer, Integer>> states;
public List<List<String>> printTree(TreeNode root) {
states = new HashMap<Integer, Map<Integer, Integer>>();
int depth = print(root, 0, 0);
List<List<String>> result = new ArrayList<List<String>>();
int rowLen = 0;
for(int i=0; i < depth; ++i) {
rowLen += 1 << i;
}
for(int i=0; i < depth; ++i) {
Map<Integer, Integer> list = states.getOrDefault(i, new HashMap<Integer, Integer>());
List<String> list2 = new ArrayList<String>();
for(int j=0; j < rowLen; ++j) {
int pos = j - (rowLen)/ 2;
if(list.containsKey(pos)) {
list2.add("" + list.get(pos));
} else {
list2.add("");
}
}
result.add(list2);
}
return result;
}
private int print(TreeNode x, int d, int pos) {
if(x == null) {
return d;
}
int depth_l = print(x.left, d+1, pos-1);
Map<Integer, Integer> list = null;
if(states.containsKey(d)){
list = states.get(d);
} else {
list = new HashMap<Integer, Integer>();
states.put(d, list);
}
list.put(pos, x.val);
int depth_r = print(x.right, d+1, pos+1);
return Math.max(depth_l, depth_r);
}
}
}
|
class UsersController < ApplicationController
def index
@users = User.all
end
def create
@user = User.new(user_params)
if @user.save
redirect_to users_path
else
render 'new'
end
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find(params[:id])
if @user.update(user_params)
redirect_to users_path
else
render :edit
end
end
def destroy
User.find(params[:id]).destroy
redirect_to users_path
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :password_confirmation)
end
end |
#!/bin/bash
set -ev
# Install gimme
curl -sL -o ~/bin/gimme https://raw.githubusercontent.com/travis-ci/gimme/master/gimme
chmod +x ~/bin/gimme
#Install Python libraries
python -m pip install --upgrade pip setuptools wheel tox tox-travis tox-pyenv pytest pytest-runner
# Compile SWIG
mkdir swig_build && \
cd swig_build && \
wget http://prdownloads.sourceforge.net/swig/swig-3.0.12.tar.gz && \
tar -zxf swig-3.0.12.tar.gz && \
cd swig-3.0.12 && \
sudo ./configure --prefix=/usr && \
sudo make && \
sudo make install && \
cd ../../ && \
sudo rm -rf swig_build
|
#**********************************************************************************#
# Copyright by @bkozdras <b.kozdras@gmail.com> #
# Purpose: To build and run unit tests for BeeHive local controllers project. #
# Version: 1.0 #
# Licence: MIT #
#**********************************************************************************#
#!/bin/bash
if [[ "$RASPBERRY_PI_PICO_BUILD_ENVIRONMENT_CONTAINER" != "1" ]]
then
echo "This script should be called from docker container - see dockerimage submodule!"
exit -1
fi
if ! cmake --version &> /dev/null
then
echo "CMake is not found!"
exit -1
fi
if ! make --version &> /dev/null
then
echo "Make is not found!"
exit -1
fi
THIS_DIR=$(pwd)
EXPECTED_DIR=$(git rev-parse --show-toplevel)
if [[ "$THIS_DIR" != "$EXPECTED_DIR" ]]
then
echo "This script should be called from git repo root directory!"
exit -1
fi
mkdir -p build
cd build
THIS_ARCH=$(uname -m | tr -d '\n')
BUILD_DIR=ut_$THIS_ARCH
mkdir -p $BUILD_DIR
cd $BUILD_DIR
cmake \
-DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_C_COMPILER=gcc-$GCC_VERSION \
-DBEEHIVEIOT_LC_BUILD_UNIT_TESTS=ON ../.. \
&& make -j$(nproc --all) \
&& ctest -j$(nproc --all) --output-on-failure --timeout 5
exit 0
|
#ifndef _z2clibex_ErrorReporter_h_
#define _z2clibex_ErrorReporter_h_
#include <z2clib/Source.h>
class Overload;
class Constant;
class Def;
class Node;
class ObjectType;
class Assembly;
class ErrorReporter {
public:
static void CantAccess(const ZSource& src, const Point& p, Overload& over, const String& cls);
static void CantAccess(const ZSource& src, const Point& p, Constant& over);
static void Error(const ZSource& src, const Point& p, const String& text);
static void Error(const ZClass& cls, const Point& p, const String& text);
static void Dup(const ZSource& src, const Point& p, const Point& p2, const String& text, const String& text2 = "");
static void CallError(const ZClass& cls, Point& p, Assembly& ass, ObjectType* ci, Def* def, Vector<Node*>& params, bool cons = false);
static void ErrItemCountMissing(const ZClass& cls, const Point& p, const String& text) {
Error(cls, p, "\f'" + text + "'\f" + " can't be instantiated without an item count.");
}
static void ErrItemCountNegative(const ZClass& cls, const Point& p, const String& text) {
Error(cls, p, "\f'" + text + "'\f" + " can only be instantiated with a greater than 0 item count.");
}
static void ErrItemCountNotInteger(const ZClass& cls, const Point& p, const String& text) {
Error(cls, p, "\f'" + text + "'\f" + " can only be instantiated with a integer item count.");
}
static void ErrNotCompileTime(const ZClass& cls, const Point& p, const String& text) {
Error(cls, p, "Compile time expression expected.");
}
static void ErrCArrayLiteralQualified(const ZClass& cls, const Point& p, const String& text, const String& text2) {
Error(cls, p, "Fully qualified class '\f" + text + "\f' used as a vector literal. Remove the numeric qualifier: '\f" + text2 + "\f'.");
}
static void ErrCArrayNoQual(const ZClass& cls, const Point& p, const String& text) {
Error(cls, p, "Explicitly specified type of \f'" + text + "'\f" + " must include an item count.");
}
static void ErrCArrayMoreElements(const ZClass& cls, const Point& p, const String& text, int c1, int c2) {
Error(cls, p, "Literal '\f" + text + "\f' declared with an element count of '" + IntStr(c1) + "' initialized with '" + IntStr(c2) + "' (more) elements.");
}
static void ErrCArrayLessElements(const ZClass& cls, const Point& p, const String& text, int c1, int c2) {
Error(cls, p, "Literal '\f" + text + "\f' declared with an element count of '" + IntStr(c1) + "' initialized with '" + IntStr(c2) + "' (less) elements.");
}
static void ErrEllipsisNocount(const ZClass& cls, const Point& p, const String& text) {
Error(cls, p, "Vector literal '\f" + text + "\f' has ellipsis but no explicit item count.");
}
static void ErrIncompatOp(const ZClass& cls, const Point& p, const String& op, const String& text, const String& text2) {
Error(cls, p, "Can't apply operator '" + op + "' on types: \n\t\t'\f" + text + "\f' and \n\t\t'\f" + text2 + "\f'");
}
};
#endif
|
preConfigurePhases+=" autoreconfPhase"
for i in @autoconf@ @automake@ @libtool@; do
findInputs $i nativePkgs propagated-native-build-inputs
done
autoreconfPhase() {
runHook preAutoreconf
autoreconf ${autoreconfFlags:---install --force}
runHook postAutoreconf
}
|
import { client, q } from '@config/db'
const deleteHabit = id =>
client
.query(q.Delete(q.Ref(q.Collection('Habit'), id)))
.then(res => res)
.catch(err => console.warn(err.message))
export default deleteHabit
|
<reponame>kiwicom/orb
const globby = require("globby");
const fs = require("fs-extra");
const path = require("path");
const { format } = require("prettier");
const parserTypeScript = require("prettier/parser-typescript");
const { getScope, getByName, getAst } = require("./helpers");
module.exports = async (
{ actions, createNodeId, createContentDigest, reporter },
{ path: folder },
) => {
try {
const { createNode } = actions;
const files = await globby(`${folder}/**/*.tsx`);
files.forEach(file => {
const { name } = path.parse(file);
const getProperty = (content, prop) => getByName(getAst(content), prop);
const exampleFolder = path.dirname(file).split("/").slice(-1).join("").toLowerCase();
const content = fs.readFileSync(file, "utf-8");
const id = [exampleFolder, "-", name.toLowerCase()].join("");
const scope = getScope(content);
const example = getProperty(content, "Example");
const knobCode = getProperty(content, "exampleKnobs");
const variants = getProperty(content, "exampleVariants");
const formatSource = source =>
format(source, { parser: "json-stringify", quoteProps: "consistent" });
// eslint-disable-next-line no-eval
const knobs = knobCode ? eval(formatSource(knobCode)) : [];
const code = format(example, {
parser: "typescript",
plugins: [parserTypeScript],
});
const data = {
absolutePath: file,
example_id: id,
example: code,
exampleKnobs: knobs,
// eslint-disable-next-line no-eval
exampleVariants: variants ? eval(variants) : [],
scope,
};
createNode({
...data,
id: createNodeId(`example-${data.example_id}`),
parent: null,
children: [],
internal: {
type: `Example`,
content: JSON.stringify(data),
contentDigest: createContentDigest(data),
},
});
});
} catch (err) {
console.error(err);
reporter.panicOnBuild(err);
}
};
|
/*
* Copyright 2014 Groupon.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arpnetworking.configuration.jackson;
import com.arpnetworking.configuration.Listener;
import com.arpnetworking.configuration.Trigger;
import com.arpnetworking.logback.annotations.LogValue;
import com.arpnetworking.steno.LogValueMapFactory;
import com.arpnetworking.steno.Logger;
import com.arpnetworking.steno.LoggerFactory;
import com.arpnetworking.utility.Launchable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import net.sf.oval.constraint.NotNull;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* Dynamic configuration implementation of {@link com.arpnetworking.configuration.Configuration}.
*
* @author <NAME> (ville dot koskela at inscopemetrics dot io)
*/
public final class DynamicConfiguration extends BaseJacksonConfiguration implements Launchable {
@LogValue
@Override
public Object toLogValue() {
return LogValueMapFactory.builder(this)
.put("super", super.toLogValue())
.put("snapshot", _snapshot)
.put("sourceBuilders", _sourceBuilders)
.put("listeners", _listeners)
.put("triggerEvaluator", _triggerEvaluator)
.build();
}
@Override
protected JsonNodeSource getJsonSource() {
return _snapshot.get().getJsonSource();
}
@Override
public synchronized void launch() {
LOGGER.debug()
.setMessage("Launching")
.addData("component", this)
.log();
_triggerEvaluatorExecutor = Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, "DynamicConfigTriggerEvaluator"));
_triggerEvaluatorExecutor.execute(_triggerEvaluator);
}
@Override
public synchronized void shutdown() {
LOGGER.debug()
.setMessage("Stopping")
.addData("component", this)
.log();
try {
_triggerEvaluator.stop();
// CHECKSTYLE.OFF: IllegalCatch - Prevent dynamic configuration from shutting down.
} catch (final Exception e) {
// CHECKSTYLE.ON: IllegalCatch
LOGGER.error()
.setMessage("Stop failed")
.addData("component", this)
.addData("reason", "trigger evaluator failed to stop")
.setThrowable(e)
.log();
}
_triggerEvaluatorExecutor.shutdown();
try {
_triggerEvaluatorExecutor.awaitTermination(10, TimeUnit.SECONDS);
} catch (final InterruptedException e) {
LOGGER.warn()
.setMessage("Stop failed")
.addData("component", this)
.addData("reason", "trigger evaluator executor failed to stop")
.setThrowable(e)
.log();
}
}
private void loadConfiguration() {
final List<JsonNodeSource> sources =
Lists.<com.arpnetworking.commons.builder.Builder<? extends JsonNodeSource>, JsonNodeSource>transform(
_sourceBuilders,
com.arpnetworking.commons.builder.Builder::build);
final StaticConfiguration snapshot = new StaticConfiguration.Builder()
.setObjectMapper(_objectMapper)
.setSources(sources)
.build();
for (final Listener listener : _listeners) {
try {
LOGGER.debug()
.setMessage("Offering configuration")
.addData("listener", listener)
.log();
listener.offerConfiguration(snapshot);
// CHECKSTYLE.OFF: IllegalCatch - Any exception is considered validation failure.
} catch (final Exception e) {
// CHECKSTYLE.ON: IllegalCatch
LOGGER.error()
.setMessage("Validation of offered configuration failed")
.addData("listener", listener)
.addData("configuration", snapshot)
.setThrowable(e)
.log();
// TODO(vkoskela): Persist "good" configuration across restarts [MAI-?]
// The code will leave the good configuration in the running instance
// but the configuration sources may be in a state such that the next
// restart will only have the latest (currently bad) configuration
// available.
return;
}
}
_snapshot.set(snapshot);
for (final Listener listener : _listeners) {
try {
LOGGER.debug()
.setMessage("Applying configuration")
.addData("listener", listener)
.log();
listener.applyConfiguration();
// CHECKSTYLE.OFF: IllegalCatch - Apply configuration to all instances.
} catch (final Exception e) {
// CHECKSTYLE.ON: IllegalCatch
LOGGER.warn()
.setMessage("Application of new configuration failed")
.addData("listener", listener)
.addData("configuration", _snapshot)
.setThrowable(e)
.log();
}
}
}
private DynamicConfiguration(final Builder builder) {
super(builder);
_sourceBuilders = ImmutableList.copyOf(builder._sourceBuilders);
_listeners = ImmutableList.copyOf(builder._listeners);
_triggerEvaluator = new TriggerEvaluator(Lists.newArrayList(builder._triggers));
}
private final AtomicReference<StaticConfiguration> _snapshot = new AtomicReference<>();
private final List<com.arpnetworking.commons.builder.Builder<? extends JsonNodeSource>> _sourceBuilders;
private final List<Listener> _listeners;
private final TriggerEvaluator _triggerEvaluator;
private ExecutorService _triggerEvaluatorExecutor;
private static final Duration TRIGGER_EVALUATION_INTERVAL = Duration.ofSeconds(60);
private static final Logger LOGGER = LoggerFactory.getLogger(DynamicConfiguration.class);
private final class TriggerEvaluator implements Runnable {
private TriggerEvaluator(final List<Trigger> triggers) {
_triggers = triggers;
_isRunning = true;
}
public void stop() {
_isRunning = false;
}
@Override
public void run() {
Thread.currentThread().setUncaughtExceptionHandler(
(thread, throwable) -> LOGGER.error()
.setMessage("Unhandled exception")
.setThrowable(throwable)
.log());
while (_isRunning) {
// Evaluate all the triggers to ensure all triggers are reset
// before loading the configuration.
boolean reload = false;
for (final Trigger trigger : _triggers) {
try {
reload = reload || trigger.evaluateAndReset();
// CHECKSTYLE.OFF: IllegalCatch - Evaluate and reset all triggers
} catch (final Throwable t) {
// CHECKSTYLE.ON: IllegalCatch
LOGGER.warn()
.setMessage("Failed to evaluate and reset trigger")
.addData("trigger", trigger)
.setThrowable(t)
.log();
}
}
// Reload the configuration
if (reload) {
try {
loadConfiguration();
// CHECKSTYLE.OFF: IllegalCatch - Prevent thread from being killed
} catch (final Exception e) {
// CHECKSTYLE.ON: IllegalCatch
LOGGER.error()
.setMessage("Failed to load configuration")
.setThrowable(e)
.log();
}
}
// Wait for the next evaluation period
try {
final ZonedDateTime sleepTimeout = ZonedDateTime.now().plus(TRIGGER_EVALUATION_INTERVAL);
while (ZonedDateTime.now().isBefore(sleepTimeout) && _isRunning) {
Thread.sleep(100);
}
} catch (final InterruptedException e) {
LOGGER.debug()
.setMessage("Interrupted")
.addData("isRunning", _isRunning)
.setThrowable(e)
.log();
}
}
}
@LogValue
public Object toLogValue() {
return LogValueMapFactory.builder(this)
.put("isRunning", _isRunning)
.put("triggers", _triggers)
.build();
}
@Override
public String toString() {
return toLogValue().toString();
}
private final List<Trigger> _triggers;
private volatile boolean _isRunning;
}
/**
* Builder for {@link DynamicConfiguration}.
*/
public static final class Builder extends BaseJacksonConfiguration.Builder<Builder, DynamicConfiguration> {
/**
* Public constructor.
*/
public Builder() {
super(DynamicConfiguration::new);
}
/**
* Set the {@link List} of {@link JsonNodeSource} instance
* {@link Builder} instances. Cannot be null.
*
* @param value The {@link List} of {@link JsonNodeSource}
* instance {@link Builder} instances.
* @return This {@link Builder} instance.
*/
public Builder setSourceBuilders(final List<com.arpnetworking.commons.builder.Builder<? extends JsonNodeSource>> value) {
_sourceBuilders = Lists.newArrayList(value);
return self();
}
/**
* Add a {@link JsonNodeSource} {@link Builder} instance.
*
* @param value The {@link JsonNodeSource} {@link Builder} instance.
* @return This {@link Builder} instance.
*/
public Builder addSourceBuilder(final com.arpnetworking.commons.builder.Builder<? extends JsonNodeSource> value) {
if (_sourceBuilders == null) {
_sourceBuilders = Lists.newArrayList();
}
_sourceBuilders.add(value);
return self();
}
/**
* Set the {@link List} of {@link Trigger} instances. Cannot
* be null.
*
* @param value The {@link List} of {@link Trigger} instances.
* @return This {@link Builder} instance.
*/
public Builder setTriggers(final List<Trigger> value) {
_triggers = Lists.newArrayList(value);
return self();
}
/**
* Add a {@link Trigger} instance.
*
* @param value The {@link Trigger} instance.
* @return This {@link Builder} instance.
*/
public Builder addTrigger(final Trigger value) {
if (_triggers == null) {
_triggers = Lists.newArrayList();
}
_triggers.add(value);
return self();
}
/**
* Set the {@link List} of {@link Listener} instances. Cannot
* be null.
*
* @param value The {@link List} of {@link Listener} instances.
* @return This {@link Builder} instance.
*/
public Builder setListeners(final List<Listener> value) {
_listeners = Lists.newArrayList(value);
return self();
}
/**
* Add a {@link Listener} instance.
*
* @param value The {@link Listener} instance.
* @return This {@link Builder} instance.
*/
public Builder addListener(final Listener value) {
if (_listeners == null) {
_listeners = Lists.newArrayList();
}
_listeners.add(value);
return self();
}
@Override
protected Builder self() {
return this;
}
@NotNull
private List<com.arpnetworking.commons.builder.Builder<? extends JsonNodeSource>> _sourceBuilders;
@NotNull
private List<Trigger> _triggers = Lists.newArrayList();
@NotNull
private List<Listener> _listeners;
}
}
|
<reponame>dadanan/windowsHt
import request from '@/utils/request'
export function login(userName, pwd) {
const data = {
userName,
pwd
}
return request({
url: '/server/user/login',
method: 'post',
params: data
})
}
export function updateUser(data) {
return request({
url: '/server/user/updateUser',
method: 'put',
data
})
}
export function createUser(data) {
return request({
url: '/server/user/createUser',
method: 'post',
data
})
}
export function delUser(id) {
return request({
url: `/server/user/delUser/${id}`,
method: 'delete'
})
}
export function getUserList() {
return request({
url: '/server/user/getUserList',
method: 'get',
})
}
export function getCurrentUser() {
return request({
url: '/server/user/getCurrentUser',
method: 'get'
})
}
export function logout() {
return request({
url: '/server/user/logout',
method: 'delete'
})
}
export function getUserById(id) {
return request({
url: `/server/user/getUserById/${id}`,
method: 'get'
})
}
|
<gh_stars>1-10
/*
* JobGenHashesToken.h
*
* Created on: Apr 10, 2013
* Author: johnchronis
*/
#ifndef JOBGENHASHESTOKEN_H_
#define JOBGENHASHESTOKEN_H_
#include "job.h"
namespace mple {
class JobGenHashesToken: public mple::Job {
int length;int sec;
public:
JobGenHashesToken(int l,int s){length=l;sec=s;}
virtual ~JobGenHashesToken();
int Run();
int getWeight() {
if (length < 7 || length > 13) {
return 10;
}
return 50;
}
};
} /* namespace mple */
#endif /* JOBGENHASHESTOKEN_H_ */
|
<filename>src/include/concurrency/transaction_manager.h
/**
* transaction_manager.h
*
*/
#pragma once
#include <atomic>
#include <unordered_set>
#include "common/config.h"
#include "concurrency/lock_manager.h"
#include "logging/log_manager.h"
namespace cmudb {
class TransactionManager {
public:
TransactionManager(LockManager *lock_manager,
LogManager *log_manager = nullptr)
: next_txn_id_(0), lock_manager_(lock_manager),
log_manager_(log_manager) {}
Transaction *Begin();
void Commit(Transaction *txn);
void Abort(Transaction *txn);
private:
std::atomic<txn_id_t> next_txn_id_;
LockManager *lock_manager_;
LogManager *log_manager_;
};
} // namespace cmudb
|
package org.hisp.dhis.program.comparator;
import java.util.Comparator;
import org.hisp.dhis.program.ProgramStage;
public class ProgramStageOrderComparator implements Comparator<ProgramStage>
{
public int compare( ProgramStage programStage1, ProgramStage programStage2 )
{
return programStage1.getStageInProgram() - programStage2.getStageInProgram();
}
}
|
#!/bin/bash
#SBATCH -o ./cluster_logs/evaluate/%A-%a.o
#SBATCH -e ./cluster_logs/evaluate/%A-%a.e
#SBATCH --gres=gpu:1 # reserves GPUs
#SBATCH -p alldlc_gpu-rtx2080
#SBATCH -x dlcgpu15,dlcgpu02,dlcgpu42
#SBATCH -a 1
#SBATCH -J ens_from_pool # sets the job name. If not specified, the file name will be used as job name
# Activate virtual environment
source activate python37
PYTHONPATH=$PWD python nes/ensemble_selection/ensembles_from_pools.py \
--M $2 \
--pool_name $1 \
--save_dir experiments-49k/cifar10/ensembles_selected/run_$SLURM_ARRAY_TASK_ID \
--load_bsls_dir experiments-49k/cifar10/baselearners/$1/run_$SLURM_ARRAY_TASK_ID \
--dataset cifar10 \
--esa beam_search \
--arch_id 0 # used only for DeepEns (RS) + ESA
|
<filename>src/app/pages/orders/services/orders.service.ts<gh_stars>10-100
import { Injectable } from '@angular/core';
import { HttpClient, HttpParams } from '@angular/common/http';
import { environment } from '../../../../environments/environment';
import { Observable } from 'rxjs';
import { CrudService } from '../../shared/services/crud.service';
@Injectable({
providedIn: 'root'
})
export class OrdersService {
constructor(
private crudService: CrudService
) {
}
getOrders(params): Observable<any> {
// const params = {
// 'count': '50',
// 'start': '0'
// };
return this.crudService.get('/v1/private/orders', params);
}
getOrderDetails(orderID): Observable<any> {
return this.crudService.get('/v1/private/orders/' + orderID);
}
getCountry(): Observable<any> {
return this.crudService.get('/v1/country')
}
getBillingZone(value): Observable<any> {
return this.crudService.get('/v1/zones?code=' + value)
}
getShippingZone(value): Observable<any> {
return this.crudService.get('/v1/zones?code=' + value)
}
getHistory(orderID): Observable<any> {
return this.crudService.get('/v1/private/orders/' + orderID + '/history')
}
addHistory(orderID, param): Observable<any> {
return this.crudService.post('/v1/private/orders/' + orderID + '/history', param);
}
updateOrder(orderID, param): Observable<any> {
return this.crudService.patch('/v1/private/orders/' + orderID + '/customer', param);
}
getNextTransaction(orderID): Observable<any> {
return this.crudService.get('/v1/private/orders/' + orderID + '/payment/nextTransaction');
}
refundOrder(orderID): Observable<any> {
return this.crudService.post('/v1/private/orders/' + orderID + '/refund', {});
}
captureOrder(orderID): Observable<any> {
return this.crudService.post('/v1/private/orders/' + orderID + '/capture', {});
}
getTransactions(orderID): Observable<any> {
return this.crudService.get('/v1/private/orders/' + orderID + '/payment/transactions');
}
}
|
//Bank Accounts funcs
$(document).ready(function () {
$('#ownBankAccountsRadio').attr('checked', true);
$('#ba-headline').text("Own bank accounts");
$('#ba-placeholder').empty().load('own');
});
$(document).ready(() =>
$('#newAccountRadio').click(() => {
$('#ba-headline').text("Create account");
$('#ba-placeholder').empty().load('create');
}));
$(document).ready(() =>
$('#ownBankAccountsRadio').click(() => {
$('#ba-headline').text("Own bank accounts");
$('#ba-placeholder').empty().load('own');
}));
$(document).ready(() =>
$('#allBankAccountsRadio').click(() => {
$('#ba-headline').text("All bank accounts");
$('#ba-placeholder').empty().load('all');
}));
//Cards funcs
$(document).ready(function () {
$('#ownCardsRadio').attr('checked', true);
$('#c-headline').text("Own cards");
$('#c-placeholder').empty().load('own');
});
$(document).ready(() =>
$('#newDebitCardRadio').click(() => {
$('#c-headline').text("Debit card request");
$('#c-placeholder').empty().load('debit');
}));
$(document).ready(() =>
$('#newCreditCardRadio').click(() => {
$('#c-headline').text("Credit card request");
$('#c-placeholder').empty().load('credit');
}));
$(document).ready(() =>
$('#ownCardsRadio').click(() => {
$('#c-headline').text("Own cards");
$('#c-placeholder').empty().load('own');
}));
$(document).ready(() =>
$('#allUsersCardsRadio').click(() => {
$('#c-headline').text("All users' cards");
$('#c-placeholder').empty().load('all');
}));
function showCreditLimit(currentObject) {
if ($(currentObject).is(":checked") && $(currentObject).val() === "CREDIT_CARD") {
$('#inputMaxCredit').show().focus();
} else {
$('#inputMaxCredit').val('').hide();
}
}
//Transactions funcs
$(document).ready(function () {
$('#ownTransactionsRadio').attr('checked', true);
$('#tr-headline').text("Transaction");
$('#tr-placeholder').empty().load('own');
});
$(document).ready(() =>
$('#newTransactionRadio').click(() => {
$('#tr-headline').text("New transaction");
$('#tr-placeholder').empty().load('initiate');
}));
$(document).ready(() =>
$('#ownTransactionsRadio').click(() => {
$('#tr-headline').text("Own transactions");
$('#tr-placeholder').empty().load('own');
}));
$(document).ready(() =>
$('#allTransactionsRadio').click(() => {
$('#tr-headline').text("All transactions");
$('#tr-placeholder').empty().load('all');
}));
function showRecipient(currentObject) {
if ($(currentObject).is(":checked") && $(currentObject).val() === "TRANSFER") {
$('#recipientIbanInput').show().focus();
$('#recipientFirstNameInput').show().focus();
$('#recipientLastNameInput').show().focus();
} else {
$('#recipientIbanInput').hide();
$('#recipientFirstNameInput').hide();
$('#recipientLastNameInput').hide();
}
}
function showRegularities(currentObject) {
if ($(currentObject).is(":checked")) {
$('#regularityInput').show().focus();
} else {
$('#regularityInput').hide();
$('#inputRegularity').val('');
}
}
//Other funcs
$(document).on('click', '#cancel', function (e) {
e.preventDefault();
parent.history.back();
});
// $('button#cancel').on('click', function(e){
// e.preventDefault();
// window.history.back();
// });
//Using .ajax
// $(document).ready(() =>
// $('#ownBankAccountsRadio').click(function () {
// $.ajax({
// method: "GET",
// url: "home",
// cache: false,
// success: () => {
// $('#ba-headline').text("Own bank accounts");
// $('#ba-placeholder').empty().load("own-accounts");
// },
// error: () => $('#ba-placeholder').text("Loading failed....")
// })
// })); |
<gh_stars>10-100
'use strict'
const fp = require('fastify-plugin')
const { createClient } = require('@supabase/supabase-js')
function fastifySupabase (fastify, options, next) {
const { namespace, supabaseKey, supabaseUrl, ...supabaseOptions } = options
if (!supabaseKey) {
return next(new Error('You must provide a Supabase API key'))
}
if (!supabaseUrl) {
return next(new Error('You must provide a Supabase Project URL'))
}
const supabase = createClient(supabaseUrl, supabaseKey, supabaseOptions)
if (namespace) {
if (supabase[namespace]) {
return next(new Error(`fastify-supabase '${namespace}' is a reserved keyword`))
} else if (!fastify.supabase) {
fastify.decorate('supabase', Object.create(null))
} else if (Object.prototype.hasOwnProperty.call(fastify.supabase, namespace)) {
return next(new Error(`Supabase client '${namespace}' instance name has already been registered`))
}
fastify.supabase[namespace] = supabase
} else {
if (fastify.supabase) {
return next(new Error('fastify-supabase has already been registered'))
} else {
fastify.decorate('supabase', supabase)
}
}
next()
}
module.exports = fp(fastifySupabase, {
fastify: '>=3.0.0',
name: 'fastify-supabase'
})
|
#!/usr/bin/env bash
# --- begin runfiles.bash initialization v2 ---
# Copy-pasted from the Bazel Bash runfiles library v2.
set -uo pipefail; f=bazel_tools/tools/bash/runfiles/runfiles.bash
source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
source "$0.runfiles/$f" 2>/dev/null || \
source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
{ echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
# --- end runfiles.bash initialization v2 ---
# MARK - Locate Dependencies
fail_sh_location=cgrindel_bazel_starlib/shlib/lib/fail.sh
fail_sh="$(rlocation "${fail_sh_location}")" || \
(echo >&2 "Failed to locate ${fail_sh_location}" && exit 1)
source "${fail_sh}"
github_sh_location=cgrindel_bazel_starlib/shlib/lib/github.sh
github_sh="$(rlocation "${github_sh_location}")" || \
(echo >&2 "Failed to locate ${github_sh_location}" && exit 1)
source "${github_sh}"
# MARK - Test
urls=()
urls+=(git@github.com:cgrindel/bazel-starlib.git)
urls+=(git@github.com:cgrindel/bazel-starlib)
urls+=(https://github.com/foo_bar/bazel-starlib.git)
urls+=(https://github.com/chicken-smidgen/bazel-starlib)
urls+=(https://api.github.com/repos/chicken-smidgen/bazel-starlib)
expected=bazel-starlib
for (( i = 0; i < ${#urls[@]}; i++ )); do
url="${urls[$i]}"
actual="$( get_gh_repo_name "${url}" )"
[[ "${actual}" == "${expected}" ]] || \
fail "Expected name not found. url: ${url}, expected: ${expected}, actual: ${actual}"
done
|
import os
import subprocess
def get_desktop_environment():
# Function to detect the desktop environment
desktop_session = os.environ.get("DESKTOP_SESSION")
if desktop_session:
return desktop_session.split('/')[-1].lower()
else:
return "unknown"
def set_wallpaper(wallpaper_path):
# Function to set the wallpaper based on the detected desktop environment
desktop_environment = get_desktop_environment()
if desktop_environment == "gnome":
command = f'gsettings set org.gnome.desktop.background picture-uri "file://{wallpaper_path}"'
elif desktop_environment == "mate":
command = f'gsettings set org.mate.background picture-filename "{wallpaper_path}"'
elif desktop_environment == "cinnamon":
command = f'gsettings set org.cinnamon.desktop.background picture-uri "file://{wallpaper_path}"'
elif desktop_environment == "xfce":
command = f'xfconf-query -c xfce4-desktop -p /backdrop/screen0/monitorDisplayPort-1/workspace0/last-image --set "{wallpaper_path}"'
else:
return f"Unsupported desktop environment: {desktop_environment}"
try:
subprocess.run(command, shell=True, check=True)
return f"Wallpaper set successfully for {desktop_environment.capitalize()}"
except subprocess.CalledProcessError:
return f"Failed to set wallpaper for {desktop_environment.capitalize()}"
# Test the function
print(set_wallpaper('/path/to/wallpaper.jpg')) |
#!/usr/bin/env bash
Eudic_ID=$(osascript -e 'id of app "Eudb_en_free"' 2>/dev/null) || \
Eudic_ID=$(osascript -e 'id of app "Eudb_en"' 2>/dev/null) || \
Eudic_ID=$(osascript -e 'id of app "Eudic"' 2>/dev/null)
if [[ -z "$Eudic_ID" ]]; then
osascript <<EOF
display dialog "Please install EuDic"
EOF
exit
fi
osascript <<EOF
tell application id "$Eudic_ID"
speak word with word "$1"
end tell
EOF
|
#include <ctype.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include "../../lib/common.h"
#define ROW_COUNT 10
#define COL_COUNT 10
typedef uint8_t Grid[ROW_COUNT][COL_COUNT];
static void propagateFlash(Grid rows, Grid flashed) {
// Flash all octopuses with 9 or higher
bool loopAgain = false;
for (int i = 0; i < ROW_COUNT; i++) {
for (int j = 0; j < COL_COUNT; j++) {
if (rows[i][j] > 9 && flashed[i][j] != 1) {
flashed[i][j] = 1;
loopAgain = true;
// And increase all surrounding
// above
if (i > 0) rows[i - 1][j]++;
// below
if (i < ROW_COUNT - 1) rows[i + 1][j]++;
// left
if (j > 0) rows[i][j - 1]++;
// right
if (j < COL_COUNT - 1) rows[i][j + 1]++;
// above left
if (i > 0 && j > 0) rows[i - 1][j - 1]++;
// above right
if (i > 0 && j < COL_COUNT - 1) rows[i - 1][j + 1]++;
// below left
if (i < ROW_COUNT - 1 && j > 0) rows[i + 1][j - 1]++;
// below right
if (i < ROW_COUNT - 1 && j < COL_COUNT - 1) rows[i + 1][j + 1]++;
}
}
}
if (loopAgain) propagateFlash(rows, flashed);
}
static uint8_t step(Grid rows) {
Grid flashed = {0};
// Increase all by one
for (int i = 0; i < ROW_COUNT; i++) {
for (int j = 0; j < COL_COUNT; j++) {
rows[i][j]++;
}
}
propagateFlash(rows, flashed);
// Reset all that flashed, count flashed
uint8_t flashCount = 0;
for (int i = 0; i < ROW_COUNT; i++) {
for (int j = 0; j < COL_COUNT; j++) {
if (flashed[i][j] == 1) {
rows[i][j] = 0;
flashCount++;
}
}
}
return flashCount;
}
int main(int argc, char* argv[]) {
if (argc != 2) {
fprintf(stderr, "\033[1;31mExactly one argument expected: path to input\n");
return EXIT_FAILURE;
}
char* file = readFile(argv[1]);
Grid rows = {0};
uint8_t charCount = 0;
uint8_t lineCount = 0;
uint8_t colCount = 0;
char prev;
for (;;) {
char c = file[charCount];
charCount++;
if (isdigit(c)) {
rows[lineCount][colCount] = (uint8_t)(c - 48);
colCount++;
} else if (c == '\n') {
if (prev == '\n') continue;
lineCount++;
colCount = 0;
} else if (c == '\0') {
break;
}
prev = c;
}
free(file);
// Model steps
uint16_t flashCount = 0;
uint16_t allFlashedInStep;
for (int i = 0;; i++) {
uint8_t flashesInStep = step(rows);
if (i < 100) flashCount += flashesInStep;
if (flashesInStep == 100) {
allFlashedInStep = i + 1;
if (i >= 100) break;
}
}
printf("Part 1: Total flashed after 100 steps: %d\n", flashCount);
printf("Part 2: All flash in step: %d\n", allFlashedInStep);
return EXIT_SUCCESS;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.